В качестве заголовка у меня есть ошибка при компиляции модели с выбором y_true и y_pred my self. Ошибка говорит о том, что я поставил 'bool', но я не сделал, я просто поместил выходные данные каждого входного слоя модели. Я хочу, чтобы функция потерь работала между 'main_input' и 'main_output', что показано на загруженной картинке.
Спасибо за помощь
#Data Input
main_input=Input(shape=(2*N_c),name='main_input')
encoding_x=Dense(2*N_c,activation='relu',name='input_layer')(main_input)
#Channel Input
channel_input=Input(shape=(4,),dtype='complex64',name='channel_input')
padded_channel = Lambda(z_padding,name='ppading_layerddddd')(channel_input)
ffted_channel = Lambda(ffting,name='ffting_channel')(padded_channel)
realed_ffted_channel = Lambda(complex_to_real,name='c_to_r')(ffted_channel)
realed_ffted_channel1 = Dense(2*N_c,activation='relu',name='channel_layer')(realed_ffted_channel)
#Precoding Encoder
precoded_data = Lambda(lambda x: tf.concat([x[0],x[1]],1),name='precoding_layer')([encoding_x,realed_ffted_channel1])
# encoder_data = Dense(2*N_c,activation='relu',name='prencoder_layer1')(precoded_dataasdasd)
# encoder_data_1 = Dense(4*N_c,activation='relu',name='prencoder_layer2')(encoder_data)
encoder_data1 = Dense(4*N_c,activation='relu',name='prencoder_layer3')(precoded_data)
encoder_data2= Dense(2*N_c,activation='linear',name='prencoder_layer4')(encoder_data1)
encoder_data3 = Lambda(real_to_complex,name='r_to_c')(encoder_data2)
encoder_data4=BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(encoder_data3)
iffted_encoded_data = Lambda(iffting,name='iffting_layer')(encoder_data4)
#Channel
conved_data = Lambda(lambda x:conv_channel(x[0],x[1]),name='convolution_layer')([iffted_encoded_data,channel_input])
noised_data = Lambda(noising,name='adding_noise_layer')(conved_data)
#Decoder
ffted_data = Lambda(ffting, name='ffting')(noised_data)
ffted_data2 = Lambda(lambda x : tf.reshape(tf.reshape(x,(67,))[0:64:],(1,64)),name='removing_delay')(ffted_data)
#single_tap
single_tap_equalizer= Lambda(lambda x:tf.math.divide(x[0],x[1]),name='dividing')([ffted_data2,ffted_channel])
realed_received_data = Lambda(complex_to_real,name='c_to_r2')(ffted_data2)
decoder_y=Dense(2*N_c,activation='relu',name='decoder_y')(realed_received_data)
decoder_y1=Dense(4*N_c,activation='relu',name='decoder_y1')(decoder_y)
decoder_y2=Dense(4*N_c,activation='relu',name='decoder_y2')(decoder_y1)
main_output=Dense(2*N_c,activation='linear',name='main_output')(decoder_y1)
autoencoder = Model(inputs=[main_input,channel_input],outputs=[main_output])
autoencoder.compile(optimizer=Adam(lr=0.01),loss=tf.keras.losses.KLD(main_input,main_output))
error
---------------------------------------------------------------------------
OperatorNotAllowedInGraphError Traceback (most recent call last)
<ipython-input-24-e5499c17d360> in <module>()
41 main_output=Dense(2*N_c,activation='linear',name='main_output')(decoder_y1)
42 autoencoder = Model(inputs=[main_input,channel_input],outputs=[main_output])
---> 43 autoencoder.compile(optimizer=Adam(lr=0.01),loss=tf.keras.losses.KLD(main_input,main_output))
4 frames
/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/framework/ops.py in _disallow_in_graph_mode(self, task)
515 raise errors.OperatorNotAllowedInGraphError(
516 "{} is not allowed in Graph execution. Use Eager execution or decorate"
--> 517 " this function with @tf.function.".format(task))
518
519 def _disallow_bool_casting(self):
OperatorNotAllowedInGraphError: using a `tf.Tensor` as a Python `bool` is not allowed in Graph execution. Use Eager execution or decorate this function with @tf.function.
функциональная модель API