def convolutional_block(X, f, filters, stage, block, s = 2):
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
F1, F2, F3 = filters
X_shortcut = X
X = Conv2D(F1, (1, 1), strides = (s,s), name = conv_name_base + '2a', padding='valid', kernel_initializer = glorot_uniform(seed=0))(X)
X = BatchNormalization(axis = 3, name = bn_name_base + '2a')(X)
X = Activation('relu')(X)
X = Conv2D(F2, (f, f), strides = (1,1), name = conv_name_base + '2b', padding='same', kernel_initializer = glorot_uniform(seed=0))(X)
X = BatchNormalization(axis = 3, name = bn_name_base + '2b')(X)
X = Activation('relu')(X)
X = Conv2D(F3, (1, 1), strides = (1,1), name = conv_name_base + '2c', padding='valid', kernel_initializer = glorot_uniform(seed=0))(X)
X = BatchNormalization(axis = 3, name = bn_name_base + '2c')(X) # <<< X = BatchNormalization(axis = 3, name = bn_name_base + '2b')(X) also works!
X_shortcut = Conv2D(F3, (1, 1), strides = (s,s), name = conv_name_base + '1', padding='valid', kernel_initializer = glorot_uniform(seed=0))(X_shortcut)
X_shortcut = BatchNormalization(axis = 3, name = bn_name_base + '1')(X_shortcut)
X = Add()([X, X_shortcut])
X = Activation('relu')(X)
return X
tf.reset_default_graph()
with tf.Session() as test:
np.random.seed(1)
A_prev = tf.placeholder("float", [3, 4, 4, 6])
X = np.random.randn(3, 4, 4, 6)
A = convolutional_block(A_prev, f = 2, filters = [2, 4, 6], stage = 1, block = 'a')
test.run(tf.global_variables_initializer())
out = test.run([A], feed_dict={A_prev: X, K.learning_phase(): 0})
print("out = " + str(out[0][1][1][0]))
print(tf.global_variables())
Я поражен тем фактом, что эту строку X = BatchNormalization(axis = 3, name = bn_name_base + '2c')(X)
можно реплицировать с X = BatchNormalization(axis = 3, name = bn_name_base + '2b')(X)
, и код не выдает ошибку. Я ожидал, что это даст ошибку дублированного имени. Может ли кто-нибудь объяснить, почему это работает. Спасибо.