You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
then run: %%sqlflow SELECT * FROM sql_flow_test.activepower_train TO TRAIN sqlflow_models.DeepEmbeddingClusterModel WITH model.n_clusters=3, model.pretrain_epochs=10, model.train_max_iters=800, model.train_lr=0.01, model.pretrain_lr=1, train.batch_size=256 COLUMN m1,m2,m3,m4,m5,m6,m7,m8,m9,m10,m11,m12,m13,m14,m15,m16,m17,m18,m19,m20,m21,m22,m23,m24,m25,m26,m27,m28,m29,m30,m31,m32,m33,m34,m35,m36,m37,m38,m39,m40,m41,m42,m43,m44,m45,m46,m47,m48 INTO sqlflow_models.my_activepower_train_model;
run activepower clustering demo in tutorial recieve ' AttributeError: 'EmbeddingColumn' object has no attribute 'key''
iginal_sql='''\nSELECT * FROM sql_flow_test.activepower_train\nTO TRAIN sqlflow_models.DeepEmbeddingClusterModel\nWITH\n model.n_clusters=3,\n model.pretrain_epochs=10,\n model.train_max_iters=800,\n model.train_lr=0.01,\n model.pretrain_lr=1,\n train.batch_size=256\nCOLUMN m1,m2,m3,m4,m5,m6,m7,m8,m9,m10,m11,m12,m13,m14,m15,m16,m17,m18,m19,m20,m21,m22,m23,m24,m25,m26,m27,m28,m29,m30,m31,m32,m33,m34,m35,m36,m37,m38,m39,m40,m41,m42,m43,m44,m45,m46,m47,m48\nINTO sqlflow_models.my_activepower_train_model;\n''',\n feature_column_names_map=feature_column_names_map)\n\n==========Output==========\nTraceback (most recent call last):\n File "", line 823, in \n File "/opt/sqlflow/python/runtime/tensorflow/train.py", line 116, in train\n load_pretrained_model, model_meta, is_pai)\n File "/opt/sqlflow/python/runtime/tensorflow/train_keras.py", line 144, in keras_train_and_save_legacy\n validation_steps, has_none_optimizer)\n File "/opt/sqlflow/python/runtime/tensorflow/train_keras.py", line 155, in keras_train_compiled\n classifier.sqlflow_train_loop(train_dataset)\n File "/usr/lib/python3.6/site-packages/sqlflow_models/deep_embedding_cluster.py", line 246, in sqlflow_train_loop\n self.pre_train(x)\n File "/usr/lib/python3.6/site-packages/sqlflow_models/deep_embedding_cluster.py", line 153, in pre_train\n y = x.cache().map(map_func=_concate_generate)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 1900, in map\n MapDataset(self, map_func, preserve_cardinality=False))\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 3416, in init\n use_legacy_function=use_legacy_function)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 2695, in init\n self._function = wrapper_fn._get_concrete_function_internal()\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 1854, in _get_concrete_function_internal\n *args, **kwargs)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 1848, in _get_concrete_function_internal_garbage_collected\n graph_function, _, _ = self._maybe_define_function(args, kwargs)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2150, in _maybe_define_function\n graph_function = self._create_graph_function(args, kwargs)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2041, in _create_graph_function\n capture_by_value=self._capture_by_value),\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 915, in func_graph_from_py_func\n func_outputs = python_func(*func_args, **func_kwargs)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 2689, in wrapper_fn\n ret = _wrapper_helper(*args)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 2634, in _wrapper_helper\n ret = autograph.tf_convert(func, ag_ctx)(*nested_args)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/autograph/impl/api.py", line 237, in wrapper\n raise e.ag_error_metadata.to_exception(e)\nAttributeError: in converted code:\n\n /usr/lib/python3.6/site-packages/sqlflow_models/deep_embedding_cluster.py:150 _concate_generate *\n concate_y = tf.stack([dataset_element[feature.key] for feature in self._feature_columns], axis=1)\n\n AttributeError: 'EmbeddingColumn' object has no attribute 'key'\n\n" >
workflow step failed: runSQLProgram error: failed: exit status 1
==========Generated Code:==========
-- coding: utf-8 --
import copy
import traceback
import tensorflow as tf
import runtime
from runtime.tensorflow.train import train
from runtime.tensorflow.get_tf_version import tf_is_version2
File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 2689, in wrapper_fn
ret = _wrapper_helper(*args)
File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 2634, in _wrapper_helper
ret = autograph.tf_convert(func, ag_ctx)(*nested_args)
File "/usr/lib/python3.6/site-packages/tensorflow_core/python/autograph/impl/api.py", line 237, in wrapper
raise e.ag_error_metadata.to_exception(e)
AttributeError: in converted code:
/usr/lib/python3.6/site-packages/sqlflow_models/deep_embedding_cluster.py:150 _concate_generate *
concate_y = tf.stack([dataset_element[feature.key] for feature in self._feature_columns], axis=1)
AttributeError: 'EmbeddingColumn' object has no attribute 'key'`
Environment (Please complete the following information):
OS:Windos 10
Browser:Chrome
docker destop
Docker Engine : v20.10.6
Kubernetes: v1.19.7
The text was updated successfully, but these errors were encountered:
Description
run activepower clustering demo in tutorial recieve ' AttributeError: 'EmbeddingColumn' object has no attribute 'key''
demo in tutorial activepower_clustering
I set mysql database locally with sql script
so I have the same data with demo.
then run:
%%sqlflow SELECT * FROM sql_flow_test.activepower_train TO TRAIN sqlflow_models.DeepEmbeddingClusterModel WITH model.n_clusters=3, model.pretrain_epochs=10, model.train_max_iters=800, model.train_lr=0.01, model.pretrain_lr=1, train.batch_size=256 COLUMN m1,m2,m3,m4,m5,m6,m7,m8,m9,m10,m11,m12,m13,m14,m15,m16,m17,m18,m19,m20,m21,m22,m23,m24,m25,m26,m27,m28,m29,m30,m31,m32,m33,m34,m35,m36,m37,m38,m39,m40,m41,m42,m43,m44,m45,m46,m47,m48 INTO sqlflow_models.my_activepower_train_model;
run activepower clustering demo in tutorial recieve ' AttributeError: 'EmbeddingColumn' object has no attribute 'key''
====
`2021/05/20 10:02:13 SQLFlow Step Execute:
SELECT * FROM sql_flow_test.activepower_train
TO TRAIN sqlflow_models.DeepEmbeddingClusterModel
WITH
model.n_clusters=3,
model.pretrain_epochs=10,
model.train_max_iters=800,
model.train_lr=0.01,
model.pretrain_lr=1,
train.batch_size=256
COLUMN m1,m2,m3,m4,m5,m6,m7,m8,m9,m10,m11,m12,m13,m14,m15,m16,m17,m18,m19,m20,m21,m22,m23,m24,m25,m26,m27,m28,m29,m30,m31,m32,m33,m34,m35,m36,m37,m38,m39,m40,m41,m42,m43,m44,m45,m46,m47,m48
INTO sqlflow_models.my_activepower_train_model;
Start training using keras model...
2021-05-20 10:02:38.341380 Start pre_train.
2021-05-20 10:02:38.341461 Start preparing training dataset to save into memory.
message:<message:"runSQLProgram error: failed: exit status 1\n==========Generated Code:==========\n# -- coding: utf-8 --\nimport copy\nimport traceback\nimport tensorflow as tf\nimport runtime\nfrom runtime.tensorflow.train import train\nfrom runtime.tensorflow.get_tf_version import tf_is_version2\nfrom tensorflow.estimator import (DNNClassifier,\n DNNRegressor,\n LinearClassifier,\n LinearRegressor,\n BoostedTreesClassifier,\n BoostedTreesRegressor,\n DNNLinearCombinedClassifier,\n DNNLinearCombinedRegressor)\nif tf_is_version2():\n from tensorflow.keras.optimizers import Adadelta, Adagrad, Adam, Adamax, Ftrl, Nadam, RMSprop, SGD\n from tensorflow.keras.losses import BinaryCrossentropy, CategoricalCrossentropy, CategoricalHinge, CosineSimilarity, Hinge, Huber, KLDivergence, LogCosh, MeanAbsoluteError, MeanAbsolutePercentageError, MeanSquaredError, MeanSquaredLogarithmicError, Poisson, SparseCategoricalCrossentropy, SquaredHinge\nelse:\n from tensorflow.train import AdadeltaOptimizer, AdagradOptimizer, AdamOptimizer, FtrlOptimizer, RMSPropOptimizer, GradientDescentOptimizer, MomentumOptimizer\n from tensorflow.keras.losses import BinaryCrossentropy, CategoricalCrossentropy, CategoricalHinge, CosineSimilarity, Hinge, Huber, KLDivergence, LogCosh, MeanAbsoluteError, MeanAbsolutePercentageError, MeanSquaredError, MeanSquaredLogarithmicError, Poisson, SparseCategoricalCrossentropy, SquaredHinge\ntry:\n import sqlflow_models\nexcept Exception as e:\n print("failed to import sqlflow_models: %s", e)\n traceback.print_exc()\n\nfeature_column_names = [\n"dates",\n\n"m1",\n\n"m2",\n\n"m3",\n\n"m4",\n\n"m5",\n\n"m6",\n\n"m7",\n\n"m8",\n\n"m9",\n\n"m10",\n\n"m11",\n\n"m12",\n\n"m13",\n\n"m14",\n\n"m15",\n\n"m16",\n\n"m17",\n\n"m18",\n\n"m19",\n\n"m20",\n\n"m21",\n\n"m22",\n\n"m23",\n\n"m24",\n\n"m25",\n\n"m26",\n\n"m27",\n\n"m28",\n\n"m29",\n\n"m30",\n\n"m31",\n\n"m32",\n\n"m33",\n\n"m34",\n\n"m35",\n\n"m36",\n\n"m37",\n\n"m38",\n\n"m39",\n\n"m40",\n\n"m41",\n\n"m42",\n\n"m43",\n\n"m44",\n\n"m45",\n\n"m46",\n\n"m47",\n\n"m48",\n\n"class",\n]\n\n# feature_column_names_map is used to determine the order of feature columns of each target:\n# e.g. when using DNNLinearCombinedClassifer.\n# feature_column_names_map will be saved to a single file when using PAI.\nfeature_column_names_map = dict()\n\nfeature_column_names_map["feature_columns"] = ["dates","m1","m2","m3","m4","m5","m6","m7","m8","m9","m10","m11","m12","m13","m14","m15","m16","m17","m18","m19","m20","m21","m22","m23","m24","m25","m26","m27","m28","m29","m30","m31","m32","m33","m34","m35","m36","m37","m38","m39","m40","m41","m42","m43","m44","m45","m46","m47","m48","class",]\n\n\nfeature_metas = dict()\n\n\nfeature_metas["dates"] = {\n "feature_name": "dates",\n "dtype": "string",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m1"] = {\n "feature_name": "m1",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m2"] = {\n "feature_name": "m2",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m3"] = {\n "feature_name": "m3",\n "dtype": "float32",\n "delimiter": "",\n "format": ""
,\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m4"] = {\n "feature_name": "m4",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m5"] = {\n "feature_name": "m5",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m6"] = {\n "feature_name": "m6",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m7"] = {\n "feature_name": "m7",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m8"] = {\n "feature_name": "m8",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m9"] = {\n "feature_name": "m9",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m10"] = {\n "feature_name": "m10",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m11"] = {\n "feature_name": "m11",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m12"] = {\n "feature_name": "m12",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m13"] = {\n "feature_name": "m13",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m14"] = {\n "feature_name": "m14",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m15"] = {\n "feature_name": "m15",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m16"] = {\n "feature_name": "m16",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m17"] = {\n "feature_name": "m17",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m18"] = {\n "feature_name": "m18",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "
dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m19"] = {\n "feature_name": "m19",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m20"] = {\n "feature_name": "m20",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m21"] = {\n "feature_name": "m21",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m22"] = {\n "feature_name": "m22",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m23"] = {\n "feature_name": "m23",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m24"] = {\n "feature_name": "m24",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m25"] = {\n "feature_name": "m25",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m26"] = {\n "feature_name": "m26",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m27"] = {\n "feature_name": "m27",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m28"] = {\n "feature_name": "m28",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m29"] = {\n "feature_name": "m29",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m30"] = {\n "feature_name": "m30",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m31"] = {\n "feature_name": "m31",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m32"] = {\n "feature_name": "m32",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m33"] = {\n "feature_name": "m33",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\n
feature_metas["m34"] = {\n "feature_name": "m34",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m35"] = {\n "feature_name": "m35",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m36"] = {\n "feature_name": "m36",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m37"] = {\n "feature_name": "m37",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m38"] = {\n "feature_name": "m38",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m39"] = {\n "feature_name": "m39",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m40"] = {\n "feature_name": "m40",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m41"] = {\n "feature_name": "m41",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m42"] = {\n "feature_name": "m42",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m43"] = {\n "feature_name": "m43",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m44"] = {\n "feature_name": "m44",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m45"] = {\n "feature_name": "m45",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m46"] = {\n "feature_name": "m46",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m47"] = {\n "feature_name": "m47",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["m48"] = {\n "feature_name": "m48",\n "dtype": "float32",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\nfeature_metas["class"] = {\n "feature_name": "class"
,\n "dtype": "int64",\n "delimiter": "",\n "format": "",\n "shape": [1],\n "is_sparse": "false" == "true",\n "dtype_weight": "int64",\n "delimiter_kv": ""\n}\n\n\n\nlabel_meta = {\n "feature_name": "",\n "dtype": "int64",\n "delimiter": "",\n "shape": [1],\n "is_sparse": "false" == "true"\n}\n\nmodel_params=dict()\n\nmodel_params["n_clusters"]=3\n\nmodel_params["pretrain_epochs"]=10\n\nmodel_params["pretrain_lr"]=1\n\nmodel_params["train_lr"]=0.010000\n\nmodel_params["train_max_iters"]=800\n\n\n# Construct optimizer objects to pass to model initializer.\n# The original model_params is serializable (do not have tf.xxx objects).\nmodel_params_constructed = copy.deepcopy(model_params)\nfor optimizer_arg in ["optimizer", "dnn_optimizer", "linear_optimizer"]:\n if optimizer_arg in model_params_constructed:\n model_params_constructed[optimizer_arg] = eval(model_params_constructed[optimizer_arg])\n\nif "loss" in model_params_constructed:\n model_params_constructed["loss"] = eval(model_params_constructed["loss"])\n\n# feature_columns_code will be used to save the training informations together\n# with the saved model.\nfeature_columns_code = """{"feature_columns": [tf.feature_column.embedding_column(tf.feature_column.categorical_column_with_vocabulary_list(key="dates", vocabulary_list=["2/26","3/5","3/17","3/23","4/10","5/5","1/13","1/20","2/24","4/24","6/23","1/6","1/9","6/3","2/10","3/16","2/18","4/11","4/22","5/3","6/22","1/10","1/30","6/4","6/11","6/17","6/26","1/28","5/20","5/21","2/11","3/1","3/8","4/26","5/6","5/9","1/21","2/4","5/17","4/21","4/25","5/29","6/27","2/23","3/3","4/18","4/23","5/12","2/2","3/14","3/22","4/2","4/20","5/8","5/23","5/25","1/26","1/27","6/1","4/13","1/22","3/13","4/6","4/19","2/7","3/27","4/28","6/2","6/8","1/4","1/25","3/9","3/12","3/21","4/5","6/6","2/15","3/6","2/13","3/29","2/8","2/12","2/28","3/2","3/31","4/16","5/14","1/8","1/17","5/7","5/24","2/21","2/25","3/18","4/3","6/7","6/10","6/29","2/16","3/7","6/14","6/16","6/24","1/2","3/20","4/12","5/31","2/5","4/1","5/22","6/9","6/15","6/19","6/25","6/30","1/16","3/24","3/30","6/28","1/15","1/23","1/31","2/17","3/25","4/7","1/7","1/14","5/30","6/12","3/11","3/28","5/10","1/1","1/3","2/27","3/19","4/14","4/30","5/28","6/18","1/12","1/18","6/20","4/9","5/4","5/11","5/13","5/16","1/29","2/1","2/9","2/20","5/18","5/19","1/5","1/24","5/26","6/21","3/15","5/15","4/8","4/15","5/27","1/11","4/4","2/19","3/4","3/26","4/27","4/29","6/13","1/19","2/6","3/10","4/17","5/2","2/14","2/22","6/5","2/3","5/1"]), dimension=128, combiner="sum"),\ntf.feature_column.numeric_column("m1", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m2", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m3", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m4", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m5", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m6", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m7", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m8", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m9", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m10", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m11", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m12", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m13", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_c
olumn("m14", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m15", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m16", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m17", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m18", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m19", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m20", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m21", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m22", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m23", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m24", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m25", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m26", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m27", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m28", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m29", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m30", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m31", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m32", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m33", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m34", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m35", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m36", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m37", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m38", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m39", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m40", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m41", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m42", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m43", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m44", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m45", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m46", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m47", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("m48", shape=[1], dtype=tf.dtypes.float32),\ntf.feature_column.numeric_column("class", shape=[1], dtype=tf.dtypes.int64)]}"""\nfeature_columns = eval(feature_columns_code)\n\ntrain_max_steps = 0\ntrain_max_steps = None if train_max_steps == 0 else train_max_steps\n\ntrain(datasource="mysql://yuepf:yuepf123456@tcp(192.195.253.130:3306)/?maxAllowedPacket=0",\n estimator_string="""sqlflow_models.DeepEmbeddingClusterModel""",\n select="""\nSELECT * FROM sql_flow_test.activepower_train\n""",\n validation_select="""""",\n feature_columns=feature_columns,\n feature_column_names=feature_column_names,\n feature_metas=feature_metas,\n label_meta=label_meta,\n model_params=model_params_constructed,\n validation_metrics="Accuracy".split(","),\n save="model_save",\n batch_size=256,\n epoch=1,\n validation_steps=1,\n verbose=0,\n max_steps=train_max_steps,\n validation_start_delay_secs=0,\n validation_throttle_secs=0,\n save_checkpoints_steps=100,\n log_every_n_iter=10,\n load_pretrained_model="false" == "true",\n is_pai="false" == "true",\n pai_table="",\n pai_val_table="",\n feature_columns_code=feature_columns_code,\n model_params_code_map=model_params,\n model_repo_image="",\n or
iginal_sql='''\nSELECT * FROM sql_flow_test.activepower_train\nTO TRAIN sqlflow_models.DeepEmbeddingClusterModel\nWITH\n model.n_clusters=3,\n model.pretrain_epochs=10,\n model.train_max_iters=800,\n model.train_lr=0.01,\n model.pretrain_lr=1,\n train.batch_size=256\nCOLUMN m1,m2,m3,m4,m5,m6,m7,m8,m9,m10,m11,m12,m13,m14,m15,m16,m17,m18,m19,m20,m21,m22,m23,m24,m25,m26,m27,m28,m29,m30,m31,m32,m33,m34,m35,m36,m37,m38,m39,m40,m41,m42,m43,m44,m45,m46,m47,m48\nINTO sqlflow_models.my_activepower_train_model;\n''',\n feature_column_names_map=feature_column_names_map)\n\n==========Output==========\nTraceback (most recent call last):\n File "", line 823, in \n File "/opt/sqlflow/python/runtime/tensorflow/train.py", line 116, in train\n load_pretrained_model, model_meta, is_pai)\n File "/opt/sqlflow/python/runtime/tensorflow/train_keras.py", line 144, in keras_train_and_save_legacy\n validation_steps, has_none_optimizer)\n File "/opt/sqlflow/python/runtime/tensorflow/train_keras.py", line 155, in keras_train_compiled\n classifier.sqlflow_train_loop(train_dataset)\n File "/usr/lib/python3.6/site-packages/sqlflow_models/deep_embedding_cluster.py", line 246, in sqlflow_train_loop\n self.pre_train(x)\n File "/usr/lib/python3.6/site-packages/sqlflow_models/deep_embedding_cluster.py", line 153, in pre_train\n y = x.cache().map(map_func=_concate_generate)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 1900, in map\n MapDataset(self, map_func, preserve_cardinality=False))\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 3416, in init\n use_legacy_function=use_legacy_function)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 2695, in init\n self._function = wrapper_fn._get_concrete_function_internal()\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 1854, in _get_concrete_function_internal\n *args, **kwargs)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 1848, in _get_concrete_function_internal_garbage_collected\n graph_function, _, _ = self._maybe_define_function(args, kwargs)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2150, in _maybe_define_function\n graph_function = self._create_graph_function(args, kwargs)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2041, in _create_graph_function\n capture_by_value=self._capture_by_value),\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 915, in func_graph_from_py_func\n func_outputs = python_func(*func_args, **func_kwargs)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 2689, in wrapper_fn\n ret = _wrapper_helper(*args)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 2634, in _wrapper_helper\n ret = autograph.tf_convert(func, ag_ctx)(*nested_args)\n File "/usr/lib/python3.6/site-packages/tensorflow_core/python/autograph/impl/api.py", line 237, in wrapper\n raise e.ag_error_metadata.to_exception(e)\nAttributeError: in converted code:\n\n /usr/lib/python3.6/site-packages/sqlflow_models/deep_embedding_cluster.py:150 _concate_generate *\n concate_y = tf.stack([dataset_element[feature.key] for feature in self._feature_columns], axis=1)\n\n AttributeError: 'EmbeddingColumn' object has no attribute 'key'\n\n" >
workflow step failed: runSQLProgram error: failed: exit status 1
==========Generated Code:==========
-- coding: utf-8 --
import copy
import traceback
import tensorflow as tf
import runtime
from runtime.tensorflow.train import train
from runtime.tensorflow.get_tf_version import tf_is_version2
from tensorflow.estimator import (DNNClassifier,
if tf_is_version2():
else:
try:
except Exception as e:
feature_column_names = [
"dates",
"m1",
"m2",
"m3",
"m4",
"m5",
"m6",
"m7",
"m8",
"m9",
"m10",
"m11",
"m12",
"m13",
"m14",
"m15",
"m16",
"m17",
"m18",
"m19",
"m20",
"m21",
"m22",
"m23",
"m24",
"m25",
"m26",
"m27",
"m28",
"m29",
"m30",
"m31",
"m32",
"m33",
"m34",
"m35",
"m36",
"m37",
"m38",
"m39",
"m40",
"m41",
"m42",
"m43",
"m44",
"m45",
"m46",
"m47",
"m48",
"class",
]
feature_column_names_map is used to determine the order of feature columns of each target:
e.g. when using DNNLinearCombinedClassifer.
feature_column_names_map will be saved to a single file when using PAI.
feature_column_names_map = dict()
feature_column_names_map["feature_columns"] = ["dates","m1","m2","m3","m4","m5","m6","m7","m8","m9","m10","m11","m12","m13","m14","m15","m16","m17","m18","m19","m20","m21","m22","m23","m24","m25","m26","m27","m28","m29","m30","m31","m32","m33","m34","m35","m36","m37","m38","m39","m40","m41","m42","m43","m44","m45","m46","m47","m48","class",]
feature_metas = dict()
feature_metas["dates"] = {
}
feature_metas["m1"] = {
}
feature_metas["m2"] = {
}
feature_metas["m3"] = {
}
feature_metas["m4"] = {
}
feature_metas["m5"] = {
}
feature_metas["m6"] = {
}
feature_metas["m7"] = {
}
feature_metas["m8"] = {
}
feature_metas["m9"] = {
}
feature_metas["m10"] = {
}
feature_metas["m11"] = {
}
feature_metas["m12"] = {
}
feature_metas["m13"] = {
}
feature_metas["m14"] = {
}
feature_metas["m15"] = {
}
feature_metas["m16"] = {
}
feature_metas["m17"] = {
}
feature_metas["m18"] = {
}
feature_metas["m19"] = {
}
feature_metas["m20"] = {
}
feature_metas["m21"] = {
}
feature_metas["m22"] = {
}
feature_metas["m23"] = {
}
feature_metas["m24"] = {
}
feature_metas["m25"] = {
}
feature_metas["m26"] = {
}
feature_metas["m27"] = {
}
feature_metas["m28"] = {
}
feature_metas["m29"] = {
}
feature_metas["m30"] = {
}
feature_metas["m31"] = {
}
feature_metas["m32"] = {
}
feature_metas["m33"] = {
}
feature_metas["m34"] = {
}
feature_metas["m35"] = {
}
feature_metas["m36"] = {
}
feature_metas["m37"] = {
}
feature_metas["m38"] = {
}
feature_metas["m39"] = {
}
feature_metas["m40"] = {
}
feature_metas["m41"] = {
}
feature_metas["m42"] = {
}
feature_metas["m43"] = {
}
feature_metas["m44"] = {
}
feature_metas["m45"] = {
}
feature_metas["m46"] = {
}
feature_metas["m47"] = {
}
feature_metas["m48"] = {
}
feature_metas["class"] = {
}
label_meta = {
}
model_params=dict()
model_params["n_clusters"]=3
model_params["pretrain_epochs"]=10
model_params["pretrain_lr"]=1
model_params["train_lr"]=0.010000
model_params["train_max_iters"]=800
Construct optimizer objects to pass to model initializer.
The original model_params is serializable (do not have tf.xxx objects).
model_params_constructed = copy.deepcopy(model_params)
for optimizer_arg in ["optimizer", "dnn_optimizer", "linear_optimizer"]:
if "loss" in model_params_constructed:
feature_columns_code will be used to save the training informations together
with the saved model.
feature_columns_code = """{"feature_columns": [tf.feature_column.embedding_column(tf.feature_column.categorical_column_with_vocabulary_list(key="dates", vocabulary_list=["2/26","3/5","3/17","3/23","4/10","5/5","1/13","1/20","2/24","4/24","6/23","1/6","1/9","6/3","2/10","3/16","2/18","4/11","4/22","5/3","6/22","1/10","1/30","6/4","6/11","6/17","6/26","1/28","5/20","5/21","2/11","3/1","3/8","4/26","5/6","5/9","1/21","2/4","5/17","4/21","4/25","5/29","6/27","2/23","3/3","4/18","4/23","5/12","2/2","3/14","3/22","4/2","4/20","5/8","5/23","5/25","1/26","1/27","6/1","4/13","1/22","3/13","4/6","4/19","2/7","3/27","4/28","6/2","6/8","1/4","1/25","3/9","3/12","3/21","4/5","6/6","2/15","3/6","2/13","3/29","2/8","2/12","2/28","3/2","3/31","4/16","5/14","1/8","1/17","5/7","5/24","2/21","2/25","3/18","4/3","6/7","6/10","6/29","2/16","3/7","6/14","6/16","6/24","1/2","3/20","4/12","5/31","2/5","4/1","5/22","6/9","6/15","6/19","6/25","6/30","1/16","3/24","3/30","6/28","1/15","1/23","1/31","2/17","3/25","4/7","1/7","1/14","5/30","6/12","3/11","3/28","5/10","1/1","1/3","2/27","3/19","4/14","4/30","5/28","6/18","1/12","1/18","6/20","4/9","5/4","5/11","5/13","5/16","1/29","2/1","2/9","2/20","5/18","5/19","1/5","1/24","5/26","6/21","3/15","5/15","4/8","4/15","5/27","1/11","4/4","2/19","3/4","3/26","4/27","4/29","6/13","1/19","2/6","3/10","4/17","5/2","2/14","2/22","6/5","2/3","5/1"]), dimension=128, combiner="sum"),
tf.feature_column.numeric_column("m1", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m2", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m3", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m4", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m5", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m6", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m7", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m8", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m9", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m10", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m11", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m12", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m13", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m14", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m15", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m16", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m17", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m18", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m19", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m20", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m21", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m22", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m23", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m24", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m25", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m26", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m27", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m28", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m29", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m30", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m31", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m32", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m33", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m34", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m35", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m36", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m37", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m38", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m39", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m40", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m41", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m42", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m43", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m44", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m45", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m46", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m47", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("m48", shape=[1], dtype=tf.dtypes.float32),
tf.feature_column.numeric_column("class", shape=[1], dtype=tf.dtypes.int64)]}"""
feature_columns = eval(feature_columns_code)
train_max_steps = 0
train_max_steps = None if train_max_steps == 0 else train_max_steps
train(datasource="mysql://yuepf:yuepf123456@tcp(192.195.253.130:3306)/?maxAllowedPacket=0",
SELECT * FROM sql_flow_test.activepower_train
""",
SELECT * FROM sql_flow_test.activepower_train
TO TRAIN sqlflow_models.DeepEmbeddingClusterModel
WITH
model.n_clusters=3,
model.pretrain_epochs=10,
model.train_max_iters=800,
model.train_lr=0.01,
model.pretrain_lr=1,
train.batch_size=256
COLUMN m1,m2,m3,m4,m5,m6,m7,m8,m9,m10,m11,m12,m13,m14,m15,m16,m17,m18,m19,m20,m21,m22,m23,m24,m25,m26,m27,m28,m29,m30,m31,m32,m33,m34,m35,m36,m37,m38,m39,m40,m41,m42,m43,m44,m45,m46,m47,m48
INTO sqlflow_models.my_activepower_train_model;
''',
==========Output==========
Traceback (most recent call last):
File "", line 823, in
File "/opt/sqlflow/python/runtime/tensorflow/train.py", line 116, in train
File "/opt/sqlflow/python/runtime/tensorflow/train_keras.py", line 144, in keras_train_and_save_legacy
File "/opt/sqlflow/python/runtime/tensorflow/train_keras.py", line 155, in keras_train_compiled
File "/usr/lib/python3.6/site-packages/sqlflow_models/deep_embedding_cluster.py", line 246, in sqlflow_train_loop
File "/usr/lib/python3.6/site-packages/sqlflow_models/deep_embedding_cluster.py", line 153, in pre_train
File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 1900, in map
File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 3416, in init
File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 2695, in init
File "/usr/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 1854, in _get_concrete_function_internal
File "/usr/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 1848, in _get_concrete_function_internal_garbage_collected
File "/usr/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2150, in _maybe_define_function
File "/usr/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2041, in _create_graph_function
File "/usr/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 915, in func_graph_from_py_func
File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 2689, in wrapper_fn
File "/usr/lib/python3.6/site-packages/tensorflow_core/python/data/ops/dataset_ops.py", line 2634, in _wrapper_helper
File "/usr/lib/python3.6/site-packages/tensorflow_core/python/autograph/impl/api.py", line 237, in wrapper
AttributeError: in converted code:
Environment (Please complete the following information):
The text was updated successfully, but these errors were encountered: