В моем проекте мне нужен пользовательский слой пула, то есть статистический пул, который берет матрицу из предыдущего слоя и возвращает вектор со значениями среднего и стандартного отклонения. Я добавляю этот слой в модель, используя Lambda. И когда я пытаюсь сделать прямое распространение, тогда вычисление останавливается в моем пользовательском слое, и я получаю следующую ошибку:
Traceback (most recent call last):
File "C:\Users\OMEN\Anaconda3\lib\runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "C:\Users\OMEN\Anaconda3\lib\runpy.py", line 85, in _run_code
exec(code, run_globals)
File "c:\Users\OMEN\.vscode\extensions\ms-python.python-2020.3.71659\pythonFiles\lib\python\debugpy\wheels\debugpy\__main__.py", line 45, in <module>
cli.main()
File "c:\Users\OMEN\.vscode\extensions\ms-python.python-2020.3.71659\pythonFiles\lib\python\debugpy\wheels\debugpy/..\debugpy\server\cli.py", line 429, in main
run()
File "c:\Users\OMEN\.vscode\extensions\ms-python.python-2020.3.71659\pythonFiles\lib\python\debugpy\wheels\debugpy/..\debugpy\server\cli.py", line 266, in run_file
runpy.run_path(options.target, run_name=compat.force_str("__main__"))
File "C:\Users\OMEN\Anaconda3\lib\runpy.py", line 263, in run_path
pkg_name=pkg_name, script_name=fname)
File "C:\Users\OMEN\Anaconda3\lib\runpy.py", line 96, in _run_module_code
mod_name, mod_spec, pkg_name, script_name)
File "C:\Users\OMEN\Anaconda3\lib\runpy.py", line 85, in _run_code
exec(code, run_globals)
File "e:\Praca Inzynierska\praca-inzynierska\src\TrainModel.py", line 47, in <module>
logits = encoder(X1, training=True)
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\base_layer.py", line 891, in __call__
outputs = self.call(cast_inputs, *args, **kwargs)
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\network.py", line 708, in call
convert_kwargs_to_constants=base_layer_utils.call_context().saving)
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\network.py", line 860, in _run_internal_graph
output_tensors = layer(computed_tensors, **kwargs)
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\keras\engine\base_layer.py", line 891, in __call__
outputs = self.call(cast_inputs, *args, **kwargs)
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\keras\layers\core.py", line 795, in call
return self.function(inputs, **arguments)
File "e:\Praca Inzynierska\praca-inzynierska\src\ResNet.py", line 19, in StatsPooling
stat_pooling = K.reshape(std_mean_mat, shape=(1, new_shape))
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\keras\backend.py", line 2737, in reshape
return array_ops.reshape(x, shape)
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\ops\array_ops.py", line 131, in reshape
result = gen_array_ops.reshape(tensor, shape, name)
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\ops\gen_array_ops.py", line 8106, in reshape
tensor, shape, name=name, ctx=_ctx)
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\ops\gen_array_ops.py", line 8139, in reshape_eager_fallback
_attr_T, (tensor,) = _execute.args_to_matching_eager([tensor], _ctx)
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\eager\execute.py", line 257, in args_to_matching_eager
t, dtype, preferred_dtype=default_dtype, ctx=ctx))
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\framework\ops.py", line 1296, in internal_convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\ops\resource_variable_ops.py", line 1789, in _dense_var_to_tensor
return var._dense_var_to_tensor(dtype=dtype, name=name, as_ref=as_ref) # pylint: disable=protected-access
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\ops\resource_variable_ops.py", line 1214, in _dense_var_to_tensor
return self.value()
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\ops\resource_variable_ops.py", line 524, in value
return self._read_variable_op()
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\ops\resource_variable_ops.py", line 608, in _read_variable_op
self._dtype)
File "C:\Users\OMEN\Anaconda3\lib\site-packages\tensorflow_core\python\ops\gen_resource_variable_ops.py", line 583, in read_variable_op
_six.raise_from(_core._status_to_exception(e.code, message), None)
File "<string>", line 3, in raise_from
tensorflow.python.framework.errors_impl.FailedPreconditionError: Error while reading resource variable _AnonymousVar258 from Container: localhost. This could mean that the variable was uninitialized. Not found: Resource localhost/_AnonymousVar258/class tensorflow::Var does not exist. [Op:ReadVariableOp]
Statisti c Слой пула:
def StatsPooling(x):
mean = K.mean(x, axis=0)
std = K.std(x, axis=0)
std_mean_mat = K.variable([mean, std])
new_shape = 2 * std.shape[0]
stat_pooling = K.reshape(std_mean_mat, shape=(1, new_shape))
return stat_pooling
Произошла ошибка в строке :
stat_pooling = K.reshape(std_mean_mat, shape=(1, new_shape))
Выполнить модель
if usedModel == "ResNet":
with tf.device("/GPU:0"):
encoder = ResNet()
siamsNet = SiameseNetwork(encoder, (120,24))
opt = Adam(clipnorm=1.)
#siamsNet.compile(loss='binary_crossentropy', optimizer=opt, metrics=['accuracy'])
loss_fn = BinaryCrossentropy(from_logits=True)
with open(TEST_SET_PATH, 'r') as tests:
test_set = tests.readlines()
for idx, val in enumerate(test_set):
test_set[idx] = val.replace(".wav", ".npy", 2)
for i in range(0, len(test_set)):
(y, X1, X2) = getBatchOfCoefficients(test_set, start=i, n_batches=1)
y = y[0]
X1 = X1[0]
X2 = X2[0]
#siamsNet.fit(x=[X1_t, X2_t], y=y_t, batch_size=1, epochs=1)
with tf.GradientTape() as tape:
logits = encoder(X1, training=True)
Я использую tenorflow-gpu, версия 2.0
Буду благодарен за любую помощь.