model.fit () дает мне KeyError: input_1 в Keras - PullRequest
0 голосов
/ 07 августа 2020

Я пытаюсь обучить resnet50 на наборе данных cifar10, используя keras. Я заменил последние полностью связанные слои плотным слоем с 10 нейронами и активацией softmax. Вызов model.fit () дает мне:

KeyError: 'input_1'

Ниже приведен код для воспроизведения результата:

import tensorflow as tf
import tensorflow_datasets as tfds
from tensorflow import keras

train_ds = tfds.load('cifar10', split='train')
train_ds = train_ds.shuffle(1000).batch(100)
model = tf.keras.applications.ResNet50(include_top=False, input_shape=(32, 32, 3), pooling="avg")
x = model.output
x = keras.layers.Dense(10, activation="softmax")(x)
model = keras.Model(model.input, x)
model.compile(loss=keras.losses.SparseCategoricalCrossentropy(), metrics=[keras.metrics.SparseCategoricalAccuracy()], optimizer='Adam')
model.fit(x=train_ds, epochs=10)

И стек вызовов:

KeyError
---------------------------------------------------------------------------
KeyError                                  Traceback (most recent call last)
<ipython-input-1-71ed1fc54857> in <module>
     10 model = keras.Model(model.input, x)
     11 model.compile(loss=keras.losses.SparseCategoricalCrossentropy(), metrics=[keras.metrics.SparseCategoricalAccuracy()], optimizer='Adam')
---> 12 model.fit(x=train_ds, epochs=10)

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py in _method_wrapper(self, *args, **kwargs)
     64   def _method_wrapper(self, *args, **kwargs):
     65     if not self._in_multi_worker_mode():  # pylint: disable=protected-access
---> 66       return method(self, *args, **kwargs)
     67 
     68     # Running inside `run_distribute_coordinator` already.

/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
    846                 batch_size=batch_size):
    847               callbacks.on_train_batch_begin(step)
--> 848               tmp_logs = train_function(iterator)
    849               # Catch OutOfRangeError for Datasets of unknown size.
    850               # This blocks until the batch has finished executing.

/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
    578         xla_context.Exit()
    579     else:
--> 580       result = self._call(*args, **kwds)
    581 
    582     if tracing_count == self._get_tracing_count():

/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
    625       # This is the first call of __call__, so we have to initialize.
    626       initializers = []
--> 627       self._initialize(args, kwds, add_initializers_to=initializers)
    628     finally:
    629       # At this point we know that the initialization is complete (or less

/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
    504     self._concrete_stateful_fn = (
    505         self._stateful_fn._get_concrete_function_internal_garbage_collected(  # pylint: disable=protected-access
--> 506             *args, **kwds))
    507 
    508     def invalid_creator_scope(*unused_args, **unused_kwds):

/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
   2444       args, kwargs = None, None
   2445     with self._lock:
-> 2446       graph_function, _, _ = self._maybe_define_function(args, kwargs)
   2447     return graph_function
   2448 

/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
   2775 
   2776       self._function_cache.missed.add(call_context_key)
-> 2777       graph_function = self._create_graph_function(args, kwargs)
   2778       self._function_cache.primary[cache_key] = graph_function
   2779       return graph_function, args, kwargs

/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
   2665             arg_names=arg_names,
   2666             override_flat_arg_shapes=override_flat_arg_shapes,
-> 2667             capture_by_value=self._capture_by_value),
   2668         self._function_attributes,
   2669         # Tell the ConcreteFunction to clean up its graph once it goes out of

/opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
    979         _, original_func = tf_decorator.unwrap(python_func)
    980 
--> 981       func_outputs = python_func(*func_args, **func_kwargs)
    982 
    983       # invariant: `func_outputs` contains only Tensors, CompositeTensors,

/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
    439         # __wrapped__ allows AutoGraph to swap in a converted function. We give
    440         # the function a weak reference to itself to avoid a reference cycle.
--> 441         return weak_wrapped_fn().__wrapped__(*args, **kwds)
    442     weak_wrapped_fn = weakref.ref(wrapped_fn)
    443 

/opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
    966           except Exception as e:  # pylint:disable=broad-except
    967             if hasattr(e, "ag_error_metadata"):
--> 968               raise e.ag_error_metadata.to_exception(e)
    969             else:
    970               raise

KeyError: in user code:

    /opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:571 train_function  *
        outputs = self.distribute_strategy.run(
    /opt/conda/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:951 run  **
        return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
    /opt/conda/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:2290 call_for_each_replica
        return self._call_for_each_replica(fn, args, kwargs)
    /opt/conda/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:2649 _call_for_each_replica
        return fn(*args, **kwargs)
    /opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:531 train_step  **
        y_pred = self(x, training=True)
    /opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py:927 __call__
        outputs = call_fn(cast_inputs, *args, **kwargs)
    /opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/network.py:719 call
        convert_kwargs_to_constants=base_layer_utils.call_context().saving)
    /opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/network.py:826 _run_internal_graph
        inputs = self._flatten_to_reference_inputs(inputs)
    /opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/network.py:926 _flatten_to_reference_inputs
        return [tensors[inp._keras_history.layer.name] for inp in ref_inputs]
    /opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/network.py:926 <listcomp>
        return [tensors[inp._keras_history.layer.name] for inp in ref_inputs]

    KeyError: 'input_1'

1 Ответ

1 голос
/ 08 августа 2020

Я совершенно уверен, что с вашим кодом не так, но это должно сработать за вас:

import tensorflow as tf
import tensorflow_datasets as tfds
from tensorflow import keras

train_ds = tfds.load('cifar10', split='train')
train_ds = train_ds.shuffle(1000).batch(100)

inputs = tf.keras.layers.Input(shape=(32, 32, 3))
x = tf.keras.applications.ResNet50(include_top=False, pooling="avg")(inputs)
out = keras.layers.Dense(10, activation="softmax")(x)
model = keras.Model(inputs=inputs, outputs=out)
model.compile(
    loss=keras.losses.SparseCategoricalCrossentropy(), 
    metrics=[keras.metrics.SparseCategoricalAccuracy()], 
    optimizer='Adam')
model.fit(x=train_ds, epochs=10)

Итак, вы определяете свой собственный Input и используете модель ResNet50, как если бы это был слой.

Добро пожаловать на сайт PullRequest, где вы можете задавать вопросы и получать ответы от других членов сообщества.
...