Keras Seq2Seq NMT с вниманием выдает «AttributeError: объект NoneType» не имеет атрибута «_inbound_nodes» »для сети Seq2Seq NMT с вниманием - PullRequest
0 голосов
/ 07 ноября 2018

Я строю модель Seq2Seq NMT в Керасе и вижу следующую ошибку при попытке построить модель:

AttributeError: у объекта 'NoneType' нет атрибута '_inbound_nodes'

Я не могу понять, на какой объект NoneType он ссылается. Вот фрагмент кода. Что я делаю не так, и как я могу это исправить?

#Encoder Specification
encoder_inputs = Input(shape=(None,))
encoder_embedding = Embedding(max_words, latent_dim, weights=[embedding_matrix], trainable=False, mask_zero=True)

embedding_output = encoder_embedding(encoder_inputs)

encoder1 =LSTM(latent_dim, return_state=True,return_sequences=True) #orig
encoder2 =LSTM(latent_dim, return_state=True,return_sequences=True) 
encoder3 =LSTM(latent_dim, return_state=True,return_sequences=True)
encoder_outputs1, state_h1, state_c1 = encoder1(embedding_output)#orig
encoder_outputs2, state_h2, state_c2 = encoder2(encoder_outputs1)
encoder_outputs3, state_h3, state_c3 = encoder3(encoder_outputs2)

encoder_states1 = [state_h1, state_c1]
encoder_states2 = [state_h2, state_c2]
encoder_states3 = [state_h3, state_c3]
encoder_states = [state_h1, state_c1,state_h2, state_c2,state_h3, state_c3]

#Attention layers specification

from keras.activations import tanh, softmax
dec_units = 1024
W1 = Dense(dec_units)
W2 = Dense(dec_units)
V =  Dense(1)
from keras.backend import sum, expand_dims
from keras.layers import Activation, Permute, Multiply

hidden_with_time_axis = expand_dims(state_h3, 1)
print("hidden_with_time_axis:", hidden_with_time_axis)

w1_encoder_outputs = W1(encoder_outputs3)
w2_state_h_outputs = W2(hidden_with_time_axis)
score = Activation('tanh')(w1_encoder_outputs + w2_state_h_outputs )
print("score:", score.shape)

v_score = V(score)
print("v_score", v_score.shape)

attention_weights = Activation('softmax')(v_score)

print("attention_weights:", attention_weights)

context_vector = Multiply()([attention_weights, encoder_outputs3])
print("context_vector:", context_vector.shape)

#DECOCER specification
from keras.layers import Concatenate, RepeatVector, Add

decoder_inputs = Input(shape=(None,))

decoder_embedding = Embedding(max_words, latent_dim, weights=[embedding_matrix], trainable=False, mask_zero=True)
dec_embed_out = decoder_embedding(decoder_inputs)

decoder_lstm1 = LSTM(latent_dim, return_sequences=True, return_state=True)
decoder_lstm2 = LSTM(latent_dim, return_sequences=True, return_state=True)
decoder_lstm3 = LSTM(latent_dim, return_sequences=True, return_state=True)


print("context_vector:", context_vector.shape)
print("dec_embed_out:", dec_embed_out.shape)
concat_layer = Concatenate(axis=-1)

context_embed = concat_layer([context_vector,dec_embed_out])


print("conetxt_embed:", context_embed.shape)

decoder_outputs1, _, _ = decoder_lstm1(context_embed, initial_state=encoder_states[0:2])
decoder_outputs2, _, _ = decoder_lstm2(decoder_outputs1, initial_state=encoder_states[2:4])
decoder_outputs3, _, _ = decoder_lstm3(decoder_outputs2, initial_state=encoder_states[4:6])

model = Model([encoder_inputs, decoder_inputs], context_embed)

Сообщение об ошибке:

/usr/local/lib/python3.6/dist-packages/keras/engine/network.py in build_map(tensor, finished_nodes, nodes_in_progress, layer, node_index, tensor_index)
   1323             ValueError: if a cycle is detected.
   1324         """
-> 1325         node = layer._inbound_nodes[node_index]
   1326 
   1327         # Prevent cycles.

AttributeError: 'NoneType' object has no attribute '_inbound_nodes'
...