Я использую TensorFlow 2.1.0
Я пытаюсь запустить следующий фрагмент:
model = BetaVAE((64, 64, 1), latent_dim=FLAGS.latent_dim, loss_type="bce")
model.vae.fit_generator(train_loader, steps_per_epoch=len(train_loader), epochs=FLAGS.epochs, verbose=1)
Однако я сразу получаю следующую ошибку:
Exception has occurred: _SymbolicException
Inputs to eager execution function cannot be Keras symbolic tensors, but found [<tf.Tensor 'dense_3/Identity:0' shape=(None, 32) dtype=float32>, <tf.Tensor 'dense_2/Identity:0' shape=(None, 32) dtype=float32>]
Я подозреваю, что проблема заключается в пользовательской функции потери. У меня есть следующая модель:
class BetaVAE:
def __init__(self, input_shape, latent_dim=32, loss_type="mse", learning_rate=0.0005):
self.latent_dim = latent_dim
self.C = 0
self.gamma = 100
channels = input_shape[2]
# create encoder
encoder_input = Input(shape=input_shape)
X = Conv(32, 4)(encoder_input)
X = Conv(32, 4)(X)
X = Conv(32, 4)(X)
X = Conv(32, 4)(X)
X = Flatten()(X)
X = Dense(256, activation="relu")(X)
X = Dense(256, activation="relu")(X)
Z_mu = Dense(self.latent_dim)(X)
Z_logvar = Dense(self.latent_dim, activation="relu")(X)
Z = Reparameterize()([Z_mu, Z_logvar])
# create decoder
output_activation = "sigmoid" if channels == 1 else None
decoder_input = Input(shape=(self.latent_dim,))
X = Dense(256, activation="relu")(decoder_input)
X = Dense(256, activation="relu")(X)
X = Dense(512, activation="relu")(X)
X = Reshape((4, 4, 32))(X)
X = Deconv(32, 4)(X)
X = Deconv(32, 4)(X)
X = Deconv(32, 4)(X)
decoder_output = Deconv(channels, 4, activation=output_activation)(X)
# define vae losses
def reconstruction_loss(X, X_pred):
if loss_type == "bce":
bce = tf.keras.losses.BinaryCrossentropy()
return bce(X, X_pred) * np.prod(input_shape)
elif loss_type == "mse":
mse = tf.losses.MeanSquaredError()
return mse(X, X_pred) * np.prod(input_shape)
else:
raise ValueError("Unknown reconstruction loss type. Try 'bce' or 'mse'")
def kl_divergence(X, X_pred):
self.C += (1/1440) # TODO use correct scalar
self.C = min(self.C, 35) # TODO make variable
kl = -0.5 * tf.reduce_mean(1 + Z_logvar - Z_mu**2 - tf.math.exp(Z_logvar))
return self.gamma * tf.math.abs(kl - self.C)
def loss(X, X_pred):
return reconstruction_loss(X, X_pred) + kl_divergence(X, X_pred)
# create models
self.encoder = Model(encoder_input, [Z_mu, Z_logvar, Z])
self.decoder = Model(decoder_input, decoder_output)
self.vae = Model(encoder_input, self.decoder(Z))
self.vae.compile(optimizer='adam', loss=loss, metrics=[reconstruction_loss, kl_divergence])
Что я делаю не так? Откуда взялись тензоры Symboli c? Я уже указал input_shape или я что-то упустил?