Я пытаюсь создать условный вариационный автоэнкодер с Keras. Я могу скомпилировать вариационный автоэнкодер без ошибок, однако я получаю следующую ошибку, когда пытаюсь обучить модель
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:571 train_function *
outputs = self.distribute_strategy.run(
/opt/conda/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:951 run **
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:2290 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:2649 _call_for_each_replica
return fn(*args, **kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:531 train_step **
y_pred = self(x, training=True)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py:927 __call__
outputs = call_fn(cast_inputs, *args, **kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/network.py:719 call
convert_kwargs_to_constants=base_layer_utils.call_context().saving)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/network.py:888 _run_internal_graph
output_tensors = layer(computed_tensors, **kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py:927 __call__
outputs = call_fn(cast_inputs, *args, **kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/network.py:719 call
convert_kwargs_to_constants=base_layer_utils.call_context().saving)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/network.py:888 _run_internal_graph
output_tensors = layer(computed_tensors, **kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py:927 __call__
outputs = call_fn(cast_inputs, *args, **kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/layers/core.py:888 call
result = self.function(inputs, **kwargs)
<ipython-input-19-c0d327a2f369>:5 sampling
eps = K.random_normal(shape=(batch, dim))
/opt/conda/lib/python3.7/site-packages/keras/backend/tensorflow_backend.py:4329 random_normal
shape, mean=mean, stddev=stddev, dtype=dtype, seed=seed)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/backend.py:5662 random_normal
shape, mean=mean, stddev=stddev, dtype=dtype, seed=seed)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/ops/random_ops.py:87 random_normal
shape_tensor = tensor_util.shape_tensor(shape)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/tensor_util.py:1015 shape_tensor
return ops.convert_to_tensor(shape, dtype=dtype, name="shape")
/opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/ops.py:1341 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/ops/array_ops.py:1455 _autopacking_conversion_function
return _autopacking_helper(v, dtype, name or "packed")
/opt/conda/lib/python3.7/site-packages/tensorflow/python/ops/array_ops.py:1391 _autopacking_helper
return gen_array_ops.pack(elems_as_tensors, name=scope)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/ops/gen_array_ops.py:6333 pack
values, axis=axis, name=name, ctx=_ctx)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/ops/gen_array_ops.py:6375 pack_eager_fallback
ctx=ctx, name=name)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/execute.py:75 quick_execute
raise e
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/execute.py:60 quick_execute
inputs, attrs, num_outputs)
TypeError: An op outside of the function building code is being passed
a "Graph" tensor. It is possible to have Graph tensors
leak out of the function building context by including a
tf.init_scope in your function building code.
For example, the following function will fail:
@tf.function
def has_init_scope():
my_constant = tf.constant(1.)
with tf.init_scope():
added = my_constant * 2
The graph tensor has name: CVAE/encoder/latent_space/strided_slice:0
Вся модель кодировщика показана ниже
def build_encoder(image,label):
inputs = Concatenate()([image, label])
x = Dense(625, activation = 'relu')(inputs)
x = Reshape((25,25,1))(x)
x = LocallyConnected2D(8, (5,5), padding = 'valid')(x)
x = LeakyReLU()(x)
x = LocallyConnected2D(8, (5,5), padding = 'valid')(x)
x = LeakyReLU()(x)
x = LocallyConnected2D(8, (3,3), padding = 'valid')(x)
x = LeakyReLU()(x)
x = LocallyConnected2D(8, (3,3), padding = 'valid')(x)
x = LeakyReLU()(x)
x = AveragePooling2D((2, 2))(x)
x = Flatten()(x)
mu = Dense(latent_size, name='mu')(x)
sigma = Dense(latent_size, name='sigma')(x)
latent_space = Lambda(sampling, output_shape=(latent_size,), name = 'latent_space')([mu, sigma])
encoder = Model(inputs = [image,label], outputs = [mu, sigma, latent_space], name='encoder')
return encoder, mu, sigma
I ' м предполагая, что это должно быть с моим слоем сэмплирования, но я не совсем уверен, что
def sampling(args):
mu, sigma = args
batch = K.shape(mu)[0]
dim = K.int_shape(mu)[1]
eps = K.random_normal(shape=(batch, dim))
return mu + K.exp(sigma / 2) * eps