при экспорте настраиваемой обученной модели с помощью python export_intereference_graph.py произошла эта ошибка: хап должен иметь ранг 1, но ранг 0 - PullRequest
0 голосов
/ 06 марта 2019

Я пытаюсь развернуть пользовательскую модель обнаружения обученных объектов, используя api обнаружения объекта тензорного потока.

для экспорта графа помех Я использовал это:

python export_inference_graph.py \ --input_type image_tensor> --pipeline_config_path training / ssd_mobilenet_v1_pets.config> --trained_checkpoint_prefix training / model.ckpt-3458> --output_directory цилиндр_граф

вот основное сообщение об ошибке:

ValueError: Shape must be rank 1 but is rank 0 for 'Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/zeros' (op: 'Fill') with input shapes: [], [].

вот полная ошибкасообщение:

    home/akash/venv/lib/python3.6/site-packages/absl/flags/_validators.py:358: UserWarning: Flag --pipeline_config_path has a non-None default value; therefore, mark_flag_as_required will pass even if flag is not specified in the command line!
'command line!' % flag_name)
/home/akash/venv/lib/python3.6/site-packages/absl/flags/_validators.py:358: UserWarning: Flag --trained_checkpoint_prefix has a non-None default value; therefore, mark_flag_as_required will pass even if flag is not specified in the command line!
'command line!' % flag_name)
/home/akash/venv/lib/python3.6/site-packages/absl/flags/_validators.py:358: UserWarning: Flag --output_directory has a non-None default value; therefore, mark_flag_as_required will pass even if flag is not specified in the command line!
'command line!' % flag_name)
Traceback (most recent call last):
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/array_ops.py", line 1505, in zeros
    raise TypeError
TypeError

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/common_shapes.py", line 686, in _call_cpp_shape_fn_impl
    input_tensors_as_shapes, status)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/errors_impl.py", line 473, in __exit__
    c_api.TF_GetCode(self.status.status))
tensorflow.python.framework.errors_impl.InvalidArgumentError: Shape must be rank 1 but is rank 0 for 'Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/zeros' (op: 'Fill') with input shapes: [], [].

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
File "export_inference_graph.py", line 152, in <module>
    tf.app.run()
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/platform/app.py", line 124, in run
    _sys.exit(main(argv))
File "export_inference_graph.py", line 148, in main
    write_inference_graph=FLAGS.write_inference_graph)
File "/home/akash/QuiVision/models/research/object_detection/exporter.py", line 455, in export_inference_graph
    write_inference_graph=write_inference_graph)
File "/home/akash/QuiVision/models/research/object_detection/exporter.py", line 359, in _export_inference_graph
    graph_hook_fn=graph_hook_fn)
File "/home/akash/QuiVision/models/research/object_detection/exporter.py", line 327, in _build_detection_graph
    output_collection_name=output_collection_name)
File "/home/akash/QuiVision/models/research/object_detection/exporter.py", line 306, in _get_outputs_from_inputs
    output_tensors, true_image_shapes)
File "/home/akash/QuiVision/models/research/object_detection/meta_architectures/ssd_meta_arch.py", line 701, in postprocess
    masks=prediction_dict.get('mask_predictions'))
File "/home/akash/QuiVision/models/research/object_detection/core/post_processing.py", line 477, in batch_multiclass_non_max_suppression
    parallel_iterations=parallel_iterations)
File "/home/akash/QuiVision/models/research/object_detection/utils/shape_utils.py", line 228, in static_or_dynamic_map_fn
    return tf.map_fn(fn, elems, dtype, parallel_iterations, back_prop)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/functional_ops.py", line 409, in map_fn
    swap_memory=swap_memory)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/control_flow_ops.py", line 2934, in while_loop
    result = loop_context.BuildLoop(cond, body, loop_vars, shape_invariants)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/control_flow_ops.py", line 2720, in BuildLoop
    pred, body, original_loop_vars, loop_vars, shape_invariants)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/control_flow_ops.py", line 2662, in _BuildLoop
    body_result = body(*packed_vars_for_body)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/functional_ops.py", line 399, in compute
    packed_fn_values = fn(packed_values)
File "/home/akash/QuiVision/models/research/object_detection/core/post_processing.py", line 451, in _single_image_nms_fn
    additional_fields=per_image_additional_fields)
File "/home/akash/QuiVision/models/research/object_detection/core/post_processing.py", line 173, in multiclass_non_max_suppression
    tf.zeros(max_selection_size-num_valid_nms_boxes, tf.int32)], 0)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/array_ops.py", line 1510, in zeros
    output = fill(shape, constant(zero, dtype=dtype), name=name)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/gen_array_ops.py", line 1801, in fill
    "Fill", dims=dims, value=value, name=name)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/op_def_library.py", line 787, in _apply_op_helper
    op_def=op_def)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 3162, in create_op
    compute_device=compute_device)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 3208, in _create_op_helper
    set_shapes_for_outputs(op)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 2427, in set_shapes_for_outputs
    return _set_shapes_for_outputs(op)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 2400, in _set_shapes_for_outputs
    shapes = shape_func(op)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 2330, in call_with_requiring
    return call_cpp_shape_fn(op, require_shape_fn=True)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/common_shapes.py", line 627, in call_cpp_shape_fn
    require_shape_fn)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/common_shapes.py", line 691, in _call_cpp_shape_fn_impl
    raise ValueError(err.message)
ValueError: Shape must be rank 1 but is rank 0 for 'Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/zeros' (op: 'Fill') with input shapes: [], [].
home/akash/venv/lib/python3.6/site-packages/absl/flags/_validators.py:358: UserWarning: Flag --pipeline_config_path has a non-None default value; therefore, mark_flag_as_required will pass even if flag is not specified in the command line!
'command line!' % flag_name)
/home/akash/venv/lib/python3.6/site-packages/absl/flags/_validators.py:358: UserWarning: Flag --trained_checkpoint_prefix has a non-None default value; therefore, mark_flag_as_required will pass even if flag is not specified in the command line!
'command line!' % flag_name)
/home/akash/venv/lib/python3.6/site-packages/absl/flags/_validators.py:358: UserWarning: Flag --output_directory has a non-None default value; therefore, mark_flag_as_required will pass even if flag is not specified in the command line!
'command line!' % flag_name)
Traceback (most recent call last):
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/array_ops.py", line 1505, in zeros
    raise TypeError
TypeError

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/common_shapes.py", line 686, in _call_cpp_shape_fn_impl
    input_tensors_as_shapes, status)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/errors_impl.py", line 473, in __exit__
    c_api.TF_GetCode(self.status.status))
tensorflow.python.framework.errors_impl.InvalidArgumentError: Shape must be rank 1 but is rank 0 for 'Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/zeros' (op: 'Fill') with input shapes: [], [].

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
File "export_inference_graph.py", line 152, in <module>
    tf.app.run()
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/platform/app.py", line 124, in run
    _sys.exit(main(argv))
File "export_inference_graph.py", line 148, in main
    write_inference_graph=FLAGS.write_inference_graph)
File "/home/akash/QuiVision/models/research/object_detection/exporter.py", line 455, in export_inference_graph
    write_inference_graph=write_inference_graph)
File "/home/akash/QuiVision/models/research/object_detection/exporter.py", line 359, in _export_inference_graph
    graph_hook_fn=graph_hook_fn)
File "/home/akash/QuiVision/models/research/object_detection/exporter.py", line 327, in _build_detection_graph
    output_collection_name=output_collection_name)
File "/home/akash/QuiVision/models/research/object_detection/exporter.py", line 306, in _get_outputs_from_inputs
    output_tensors, true_image_shapes)
File "/home/akash/QuiVision/models/research/object_detection/meta_architectures/ssd_meta_arch.py", line 701, in postprocess
    masks=prediction_dict.get('mask_predictions'))
File "/home/akash/QuiVision/models/research/object_detection/core/post_processing.py", line 477, in batch_multiclass_non_max_suppression
    parallel_iterations=parallel_iterations)
File "/home/akash/QuiVision/models/research/object_detection/utils/shape_utils.py", line 228, in static_or_dynamic_map_fn
    return tf.map_fn(fn, elems, dtype, parallel_iterations, back_prop)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/functional_ops.py", line 409, in map_fn
    swap_memory=swap_memory)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/control_flow_ops.py", line 2934, in while_loop
    result = loop_context.BuildLoop(cond, body, loop_vars, shape_invariants)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/control_flow_ops.py", line 2720, in BuildLoop
    pred, body, original_loop_vars, loop_vars, shape_invariants)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/control_flow_ops.py", line 2662, in _BuildLoop
    body_result = body(*packed_vars_for_body)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/functional_ops.py", line 399, in compute
    packed_fn_values = fn(packed_values)
File "/home/akash/QuiVision/models/research/object_detection/core/post_processing.py", line 451, in _single_image_nms_fn
    additional_fields=per_image_additional_fields)
File "/home/akash/QuiVision/models/research/object_detection/core/post_processing.py", line 173, in multiclass_non_max_suppression
    tf.zeros(max_selection_size-num_valid_nms_boxes, tf.int32)], 0)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/array_ops.py", line 1510, in zeros
    output = fill(shape, constant(zero, dtype=dtype), name=name)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/ops/gen_array_ops.py", line 1801, in fill
    "Fill", dims=dims, value=value, name=name)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/op_def_library.py", line 787, in _apply_op_helper
    op_def=op_def)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 3162, in create_op
    compute_device=compute_device)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 3208, in _create_op_helper
    set_shapes_for_outputs(op)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 2427, in set_shapes_for_outputs
    return _set_shapes_for_outputs(op)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 2400, in _set_shapes_for_outputs
    shapes = shape_func(op)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 2330, in call_with_requiring
    return call_cpp_shape_fn(op, require_shape_fn=True)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/common_shapes.py", line 627, in call_cpp_shape_fn
    require_shape_fn)
File "/home/akash/venv/lib/python3.6/site-packages/tensorflow/python/framework/common_shapes.py", line 691, in _call_cpp_shape_fn_impl
    raise ValueError(err.message)
ValueError: Shape must be rank 1 but is rank 0 for 'Postprocessor/BatchMultiClassNonMaxSuppression/map/while/MultiClassNonMaxSuppression/zeros' (op: 'Fill') with input shapes: [], [].

Я уже испробовал наиболее распространенное решение, т.е. экспортировать PYTHONPATH = $ PYTHONPATH: 'pwd': 'pwd' / slim

, согласно моему наблюдению, это кусок кодапредоставлено tenorflow_object_detection_api, который делает эту ошибку: но не может решить проблему, вот код:

def call_cpp_shape_fn(op, require_shape_fn=True):
"""A shape function that delegates to the registered C++ shape function.

Args:
    op: the node in the graph for which to compute output shapes.
    require_shape_fn: If true, and the C++ shape function is not registered
    in the current binary then an exception is raised; otherwise, if the
    C++ shape function is not registered then unknown_shape is used.

Returns:
    A dictionary with the following keys:
    shapes: A TensorShape list of the output shapes of the op, as computed
        using the C++ shape inference function registered for the op.
    handle_shapes: A TensorShape list of the shapes for handle outputs, if
        any.
    handle_dtypes: A list of DataType enums for the handle outputs, if any.

Raises:
    ValueError: If the C++ shape function returned an error (e.g. because the
    shapes of the inputs are of the wrong rank or otherwise incompatible
    according to the shape function).
    RuntimeError: If the C++ shape function is not registered and
    <require_shape_fn> is True.
"""
if op.type == "Const":
    # To avoid serializing large constants, we special-case constant
    # here, even though it has a C++ shape function.  When Python
    # calls the C / C-API directly, we should be able to remove this.
    return {
        "shapes": [tensor_shape.TensorShape(op.get_attr("value").tensor_shape)],
        "handle_data": [None]
    }

input_tensors_needed = []
input_tensors_as_shapes_needed = []

while True:
    res = _call_cpp_shape_fn_impl(op, input_tensors_needed,
                                input_tensors_as_shapes_needed,
                                require_shape_fn)
    if not isinstance(res, dict):
    # Handles the case where _call_cpp_shape_fn_impl calls unknown_shape(op).
    return res

    # See if we need to evaluate some inputs.
    if not res["inputs_needed"]:
    return res
    p = cpp_shape_inference_pb2.CppShapeInferenceInputsNeeded()
    p = p.FromString(res["inputs_needed"])
    changed = False
    for idx in p.input_tensors_needed:
    if idx not in input_tensors_needed:
        input_tensors_needed.append(idx)
        changed = True
    for idx in p.input_tensors_as_shapes_needed:
    if idx not in input_tensors_as_shapes_needed:
        input_tensors_as_shapes_needed.append(idx)
        changed = True
    if not changed:
    return res


def _call_cpp_shape_fn_impl(
    op, input_tensors_needed, input_tensors_as_shapes_needed, require_shape_fn):
"""Core implementation of call_cpp_shape_fn."""
graph_def_version = op.graph.graph_def_versions.producer
node_def_str = op.node_def.SerializeToString()

def tensor_to_inference_result(t):
    r = cpp_shape_inference_pb2.CppShapeInferenceResult()
    r.shape.CopyFrom(t.get_shape().as_proto())
    # pylint: disable=protected-access
    if t._handle_data is not None:
    r.handle_data.CopyFrom(t._handle_data)
    # pylint: enable=protected-access
    return r.SerializeToString()
input_shapes = [tensor_to_inference_result(i) for i in op.inputs]

input_tensors = [None for i in input_shapes]
for idx in input_tensors_needed:
    v = tensor_util.constant_value(op.inputs[idx])
    if v is not None:
    input_tensors[idx] = np.asarray(v)

serialized_unknown_shape = (
    tensor_shape.TensorShape(None).as_proto().SerializeToString())
arr = [serialized_unknown_shape for i in input_shapes]
for idx in input_tensors_as_shapes_needed:
    s = tensor_util.constant_value_as_shape(op.inputs[idx])
    if s is not None:
    arr[idx] = s.as_proto().SerializeToString()
input_tensors_as_shapes = arr

missing_shape_fn = False
try:
    with errors.raise_exception_on_not_ok_status() as status:
    output = pywrap_tensorflow.RunCppShapeInference(
        graph_def_version, node_def_str, input_shapes, input_tensors,
        input_tensors_as_shapes, status)
except errors.InvalidArgumentError as err:
    if err.message.startswith("No shape inference function exists for op"):
    missing_shape_fn = True
    else:
    raise ValueError(err.message)

if missing_shape_fn:
    if require_shape_fn:
    raise RuntimeError(
        "No C++ shape function registered for standard op: %s" % op.type)
    return unknown_shape(op)

output_shapes = output[:-1]

# Convert TensorShapeProto values in output_shapes.
result_protos = [
    cpp_shape_inference_pb2.CppShapeInferenceResult().FromString(s)
    for s in output_shapes
]
result = [r.shape for r in result_protos]
result_handle_data = [
    r.handle_data if r.handle_data.is_set else None for r in result_protos
]

return {
    "shapes": result,
    "handle_data": result_handle_data,
    "inputs_needed": output[-1]
}

# pylint: disable=protected-access
ops._set_call_cpp_shape_fn(call_cpp_shape_fn)
Добро пожаловать на сайт PullRequest, где вы можете задавать вопросы и получать ответы от других членов сообщества.
...