Как получить выходной тензор из указанного слоя c? - PullRequest
0 голосов
/ 27 мая 2020

Я хотел бы выяснить, можно ли получить вывод из определенного слоя c с помощью tensorflow lite для среды android. На данный момент я знаю, что с помощью: 'интерпретатор.run ()' мы получаем "стандартный" вывод, но это не то, что я ищу. Спасибо за любой совет.

1 Ответ

0 голосов
/ 31 мая 2020

@ Simo Я напишу здесь обход этой проблемы. Как насчет сохранения той части модели, которую вы хотите, в файл .tflite. Позвольте мне объяснить. Вместо того, чтобы делать ниже и сохранять всю модель:

# WHOLE MODEL
tflite_model = tf.keras.models.load_model('face_recog.weights.best.hdf5')
converter = tf.lite.TFLiteConverter.from_keras_model(tflite_model)
tflite_save = converter.convert()
open("face_recog.tflite", "wb").write(tflite_save)

Вы можете распечатать слои вашей модели keras:

print([layer.name for layer in keras_model.layers])
Output:
['anchor', 'positive', 'negative', 'model', 'lambda']

print([layer.name for layer in keras_model.get_layer('model').layers])
Output:  
['input_1', 'Conv1_pad', 'Conv1', 'bn_Conv1', 'Conv1_relu', 'expanded_conv_depthwise', 'expanded_conv_depthwise_BN', 'expanded_conv_depthwise_relu', 'expanded_conv_project', 'expanded_conv_project_BN', 'block_1_expand', 'block_1_expand_BN', 'block_1_expand_relu', 'block_1_pad', 'block_1_depthwise', 'block_1_depthwise_BN', 'block_1_depthwise_relu', 'block_1_project', 'block_1_project_BN', 'block_2_expand', 'block_2_expand_BN', 'block_2_expand_relu', 'block_2_depthwise', 'block_2_depthwise_BN', 'block_2_depthwise_relu', 'block_2_project', 'block_2_project_BN', 'block_2_add', 'block_3_expand', 'block_3_expand_BN', 'block_3_expand_relu', 'block_3_pad', 'block_3_depthwise', 'block_3_depthwise_BN', 'block_3_depthwise_relu', 'block_3_project', 'block_3_project_BN', 'block_4_expand', 'block_4_expand_BN', 'block_4_expand_relu', 'block_4_depthwise', 'block_4_depthwise_BN', 'block_4_depthwise_relu', 'block_4_project', 'block_4_project_BN', 'block_4_add', 'block_5_expand', 'block_5_expand_BN', 'block_5_expand_relu', 'block_5_depthwise', 'block_5_depthwise_BN', 'block_5_depthwise_relu', 'block_5_project', 'block_5_project_BN', 'block_5_add', 'block_6_expand', 'block_6_expand_BN', 'block_6_expand_relu', 'block_6_pad', 'block_6_depthwise', 'block_6_depthwise_BN', 'block_6_depthwise_relu', 'block_6_project', 'block_6_project_BN', 'block_7_expand', 'block_7_expand_BN', 'block_7_expand_relu', 'block_7_depthwise', 'block_7_depthwise_BN', 'block_7_depthwise_relu', 'block_7_project', 'block_7_project_BN', 'block_7_add', 'block_8_expand', 'block_8_expand_BN', 'block_8_expand_relu', 'block_8_depthwise', 'block_8_depthwise_BN', 'block_8_depthwise_relu', 'block_8_project', 'block_8_project_BN', 'block_8_add', 'block_9_expand', 'block_9_expand_BN', 'block_9_expand_relu', 'block_9_depthwise', 'block_9_depthwise_BN', 'block_9_depthwise_relu', 'block_9_project', 'block_9_project_BN', 'block_9_add', 'block_10_expand', 'block_10_expand_BN', 'block_10_expand_relu', 'block_10_depthwise', 'block_10_depthwise_BN', 'block_10_depthwise_relu', 'block_10_project', 'block_10_project_BN', 'block_11_expand', 'block_11_expand_BN', 'block_11_expand_relu', 'block_11_depthwise', 'block_11_depthwise_BN', 'block_11_depthwise_relu', 'block_11_project', 'block_11_project_BN', 'block_11_add', 'block_12_expand', 'block_12_expand_BN', 'block_12_expand_relu', 'block_12_depthwise', 'block_12_depthwise_BN', 'block_12_depthwise_relu', 'block_12_project', 'block_12_project_BN', 'block_12_add', 'block_13_expand', 'block_13_expand_BN', 'block_13_expand_relu', 'block_13_pad', 'block_13_depthwise', 'block_13_depthwise_BN', 'block_13_depthwise_relu', 'block_13_project', 'block_13_project_BN', 'block_14_expand', 'block_14_expand_BN', 'block_14_expand_relu', 'block_14_depthwise', 'block_14_depthwise_BN', 'block_14_depthwise_relu', 'block_14_project', 'block_14_project_BN', 'block_14_add', 'block_15_expand', 'block_15_expand_BN', 'block_15_expand_relu', 'block_15_depthwise', 'block_15_depthwise_BN', 'block_15_depthwise_relu', 'block_15_project', 'block_15_project_BN', 'block_15_add', 'block_16_expand', 'block_16_expand_BN', 'block_16_expand_relu', 'block_16_depthwise', 'block_16_depthwise_BN', 'block_16_depthwise_relu', 'block_16_project', 'block_16_project_BN', 'Conv_1', 'Conv_1_bn', 'out_relu', 'global_average_pooling2d', 'predictions', 'dense', 'dense_1']

А затем вы можете взять любой слой из вашей модели и сохраните его в .tflite:

# PART OF MODEL
tflite_model = tf.keras.models.load_model('face_recog.weights.best.hdf5')
converter = tf.lite.TFLiteConverter.from_keras_model(tflite_model.get_layer('model'))
tflite_save = converter.convert()
open("face_recog.tflite", "wb").write(tflite_save)

Таким образом, используя приведенный выше код, файл .tflite будет иметь input tensor = "input_1" и output = "density_1"

Затем внутри android вы должны использовать входные данные для указанной "модели" слоя c, и вы получите выходные данные указанной формы c, как при печати деталей вывода в python:

interpreter = tf.lite.Interpreter('face_recog.tflite')
print(interpreter.get_output_details())
interpreter.get_tensor_details()

Android часть:

// Initialize interpreter
@Throws(IOException::class)
private suspend fun initializeInterpreter(app: Application) = withContext(Dispatchers.IO) {
    // Load the TF Lite model from asset folder and initialize TF Lite Interpreter without NNAPI enabled.
    val assetManager = app.assets
    val model = loadModelFile(assetManager, "face_recog_model_layer.tflite")
    val options = Interpreter.Options()
    options.setUseNNAPI(false)
    interpreter = Interpreter(model, options)
    // Reads type and shape of input and output tensors, respectively.
    val imageTensorIndex = 0
    val imageShape: IntArray =
        interpreter.getInputTensor(imageTensorIndex).shape() 
    Log.i("INPUT_TENSOR_WHOLE", Arrays.toString(imageShape))
    val imageDataType: DataType =
        interpreter.getInputTensor(imageTensorIndex).dataType()
    Log.i("INPUT_DATA_TYPE", imageDataType.toString())
    val probabilityTensorIndex = 0
    val probabilityShape: IntArray =
        interpreter.getOutputTensor(probabilityTensorIndex).shape()
    Log.i("OUTPUT_TENSOR_SHAPE", Arrays.toString(probabilityShape))
    val probabilityDataType: DataType =
        interpreter.getOutputTensor(probabilityTensorIndex).dataType()
    Log.i("OUTPUT_DATA_TYPE", probabilityDataType.toString())
    Log.i(TAG, "Initialized TFLite interpreter.")

}

@Throws(IOException::class)
private fun loadModelFile(assetManager: AssetManager, filename: String): MappedByteBuffer {
    val fileDescriptor = assetManager.openFd(filename)
    val inputStream = FileInputStream(fileDescriptor.fileDescriptor)
    val fileChannel = inputStream.channel
    val startOffset = fileDescriptor.startOffset
    val declaredLength = fileDescriptor.declaredLength
    return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength)
}

Надеюсь, это кому-то поможет. И, конечно, если вам нужно что-то еще, отметьте меня:)

...