Opencv Mat как AppSrc для srtclientsink - PullRequest
0 голосов
/ 17 марта 2019

Я пытаюсь использовать коврик Opencv в качестве приложения в моем конвейере и передать его через srt на локальный сервер, но не открывается окно для воспроизведения видеопотока.

Моя системаMac OS 10.14 с gstreamer 1.15.

Конвейер состоит из следующих элементов:

appsrc -> videoconvert -> videoencoder -> mpegtsmux -> srtclientsink

Я хочу получитьпоток srt и показать его с помощью следующей команды:

gst-launch-1.0 srtserversrc uri=srt://:8888 ! decodebin3 ! autovideosink

В журналах отладки он говорит:

GST_BUFFER gstbuffer.c: 445: void _memory_add (GstBuffer *, gint, GstMemory*): буфер 0x7fd1aca38500, IDX -1, мем 0x7fd1aca3a2b0 0: 00: +08,150919000 974 0x7fd1ac864b20 DEBUG tsdemux tsdemux.c: 2980: gst_ts_demux_push_pending_data: Не достаточно информации, чтобы выдвинуть буферы еще, хранения буфера 0: 00: +08,150931000 974 0x7fd1ac864b20 LOG tsdemux tsdemux.c: 3098: gst_ts_demux_push_pending_data: сброс в EMPTY, возвращение в порядке 0: 00: 08.150942000 974 0x7fd1ac864b20 LOG mpegtspacketizer mpegtspacketizer.c:689: mpegts_packetizer_flush_bytes: очистка 564 байта от адаптера 0: 00: 08.151214000 974 0x7fd1ac864b20 Адаптер LOG gstadapter.c: 634: void gst_adapter_flush_unchecked (GstAdapter *, gsize) 0: 0 0 0 0 0 0 0 0 0 0 0 0 0 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 5 6 5 5 5 6 5 5 5 6 5 5 5 6 5 6 5 5 бд бд: 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8: void gst_adapter_unmap (GstAdapter *): буфер памяти unmap 0x7fd1aca383f0 0: 00: 08.151247000 974 0x7fd1ac864b20 Адаптер LOG gstadapter.c: 655: void gst_adapter_flush_unchecked (буфер GstAdapter 101 *

) * * * * * * * *Я предполагаю, что есть проблема с демультиплексором, возможно, потому что я использую только видеоданные и не аудиоданные, но без mpegtsmuxer в моем коде я получаю ошибку, что размер полезной нагрузки превышает максимально допустимые 1316 байт в протоколе srt.

Вот код:

main.cpp

#include <iostream>
#include <string>
#include <mutex>
#include <thread>
#include <time.h>

#include <opencv2/opencv.hpp>
#include <opencv2/imgproc/types_c.h>
#include <opencv2/highgui/highgui.hpp>

#include <gstreamer-1.0/gst/gstelement.h>
#include <gstreamer-1.0/gst/gstpipeline.h>
#include <gstreamer-1.0/gst/gstutils.h>
#include <gstreamer-1.0/gst/app/gstappsrc.h>
#include <gstreamer-1.0/gst/base/gstbasesrc.h>
#include <gstreamer-1.0/gst/video/video.h>
#include <gstreamer-1.0/gst/gst.h>
#include <gstreamer-1.0/gst/check/gstbufferstraw.h>

#include <glib.h>

#define GST_CAT_DEFAULT appsrc_pipeline_debug
GST_DEBUG_CATEGORY(appsrc_pipeline_debug);

using namespace std;

/*
 * bus: simple system for forwarding messages from streaming threads to app in own thread context
 * pad:
 * caps:
 * signal:
 * callback:
 *
 */

static std::mutex m;
GMainLoop *loop;

typedef struct _App App;
struct _App {
    GstElement *videoenc;
    GstElement *appsrc;
    GstElement *videoconvert;
    GstElement *sink;
    guint sourceid;
    GstElement *mpegts;
};
App s_app;

int counter = 0;

static gboolean cb_need_data(App *app) {
    static GstClockTime timestamp = 0;
    GstBuffer *buffer;
    guint buffersize;
    GstFlowReturn ret;
    GstMapInfo info;

    counter++;
    m.lock();

    cv::Mat image_mat = cv::imread("./../data/squat.jpg");
    cv::Mat resized_mat;

    cv::resize(image_mat, resized_mat, cv::Size(640, 480));

    buffersize = guint(resized_mat.cols * resized_mat.rows * resized_mat.channels());
    buffer = gst_buffer_new_and_alloc(buffersize);

    uchar *img_data = image_mat.data;
    m.unlock();

    if (gst_buffer_map(buffer, &info, (GstMapFlags) GST_MAP_WRITE)) {
        memcpy(info.data, img_data, buffersize);
        gst_buffer_unmap(buffer, &info);
    } else {
        g_print("error at memcpy");
    }

    g_signal_emit_by_name(app->appsrc, "push-buffer", buffer, &ret);

    if (ret != GST_FLOW_OK) {
        g_print("Ops\n");
        GST_DEBUG ("something wrong in cb_need_data");
        g_main_loop_quit(loop);
    }

    gst_buffer_unref(buffer);

    return TRUE;
}

static void start_feed(GstElement *pipeline, guint size, App *app) {
    if (app->sourceid == 0) {
        app->sourceid = g_timeout_add(67, (GSourceFunc) cb_need_data, app);
    }
}

static void stop_feed(GstElement *pipeline, App *app) {
    if (app->sourceid != 0) {
        g_source_remove(app->sourceid);
        app->sourceid = 0;
    }
}

static gboolean bus_call(GstBus *bus, GstMessage *message, gpointer data) {
    GError *err = nullptr;
    gchar *dbg_info = nullptr;
    GST_DEBUG ("got message %s", gst_message_type_get_name(GST_MESSAGE_TYPE(message)));

    switch (GST_MESSAGE_TYPE (message)) {
        case GST_MESSAGE_ERROR: {
            gst_message_parse_error(message, &err, &dbg_info);
            g_printerr("ERROR from element %s: %s\n",
                       GST_OBJECT_NAME (message->src), err->message);
            g_printerr("Debugging info: %s\n", (dbg_info) ? dbg_info : "none");
            g_error_free(err);
            g_free(dbg_info);
            g_main_loop_quit(loop);
            break;
        }
        case GST_MESSAGE_EOS:
            g_main_loop_quit(loop);
            break;
        default:
            break;
    }
    return TRUE;
}

void startStream() {

    App *app = &s_app;
    GstCaps *caps2;
    GstCaps *caps3;
    GstBus *bus;
    GstElement *pipeline;

    gst_init(nullptr, nullptr);

    loop = g_main_loop_new(nullptr, TRUE);

    /*
     * pipeline elements:
     * appsrc -> videoconvert -> videoencoder -> mpegtsmux -> srtsink
     */

    // create pipeline
    pipeline = gst_pipeline_new("gstreamer-encoder");
    if (!pipeline) {
        g_print("Error creating  pipeline");
    }

    // create appsrc element
    app->appsrc = gst_element_factory_make("appsrc", "appsrc");
    if (!app->appsrc) {
        g_print("Error creating appsrc");
    }

    // create videoconvert element
    app->videoconvert = gst_element_factory_make("videoconvert", "videoconvert");
    if (!app->videoconvert) {
        g_print("Error creating videoconvert element");
    }

    // create videoencoder element
    app->videoenc = gst_element_factory_make("x264enc", "encoder");
    if (!app->videoenc) {
        g_print("Error creating encoder");
    }

    app->mpegts = gst_element_factory_make("mpegtsmux", "mpegtsmux");
    if (!app->mpegts) {
        g_print("Error creating mpegtsmuxer");
    }

    app->sink = gst_element_factory_make("srtclientsink", "sink");
    if (!app->sink) {
        g_print("Error creating sink");
    }

    g_print("Elements are created\n");

    g_object_set(G_OBJECT(app->sink), "uri", "srt://127.0.0.1:8888", nullptr);
    g_object_set(G_OBJECT(app->sink), "msg-size", 1316, nullptr);
    g_object_set(G_OBJECT(app->sink), "latency", 120, nullptr);

    g_object_set(G_OBJECT(app->videoenc), "bitrate", 256, nullptr);

    g_print("End of settings\n");

    caps2 = gst_caps_new_simple("video/x-raw",
                                "format", G_TYPE_STRING, "RGB",
                                "width", G_TYPE_INT, 640,
                                "height", G_TYPE_INT, 480,
                                "framerate", GST_TYPE_FRACTION, 25, 1,
                                "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
                                nullptr);

    gst_app_src_set_caps(GST_APP_SRC(app->appsrc), caps2);

    g_object_set(G_OBJECT (app->appsrc), "stream-type", 0, "format", GST_FORMAT_TIME, nullptr);

    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
    g_assert(bus);
    gst_bus_add_watch(bus, (GstBusFunc) bus_call, app);

    gst_bin_add_many(GST_BIN(pipeline), app->appsrc, app->videoconvert, app->videoenc,
                     app->mpegts, app->sink, nullptr);

    g_print("Added all the elements to the pipeline\n");

    int ok = FALSE;
    ok = gst_element_link_many(app->appsrc, app->videoconvert, app->videoenc,
                               app->sink, nullptr);

    if (ok)
        g_print("Linked all elements together\n");
    else
        g_print("Linking error\n");

    g_assert(app->appsrc);
    g_assert(GST_IS_APP_SRC(app->appsrc));

    g_signal_connect(app->appsrc, "need-data", G_CALLBACK(start_feed), app);
    g_signal_connect(app->appsrc, "enough-data", G_CALLBACK(stop_feed), app);

    g_print("Playing the video\n");
    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    g_print("Running...\n");
    g_main_loop_run(loop);

    g_print("Returned, stopping playback\n");
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(bus);
    g_main_loop_unref(loop);
    g_print("Deleting pipeline\n");
}


int main(int argc, char **argv) {

    startStream();

    return 0;
} 

CMakeLists.txt

    cmake_minimum_required(VERSION 3.13)
project(opencv_gstreamer)

set(CMAKE_CXX_STANDARD 14)

find_package(PkgConfig REQUIRED)

pkg_search_module(OPENCV opencv4 REQUIRED)

pkg_search_module(GSTREAMER gstreamer-1.0 REQUIRED)
pkg_search_module(APP_GSTREAMER gstreamer-app-1.0 REQUIRED)
pkg_search_module(SRT srt REQUIRED)
pkg_search_module(GLIB glib-2.0 REQUIRED)

include_directories(
        ${OPENCV_INCLUDE_DIRS}
        ${GSTREAMER_INCLUDE_DIRS}
        ${APP_GSTREAMER_INCLUDE_DIRS}
        ${GLIB_INCLUDE_DIRS}
        ${SRT_INCLUDE_DIRS})

link_directories(
        ${OPENCV_LIBRARY_DIRS}
        ${GSTREAMER_LIBRARY_DIRS}
        ${APP_GSTREAMER_LIBRARY_DIRS}
        ${GLIB_LIBRARY_DIRS}
        ${SRT_LIBRARY_DIRS})

link_libraries(
        ${OPENCV_LDFLAGS}
        pthread
        ${GSTREAMER_LDFLAGS}
        ${APP_GSTREAMER_LDFLAGS}
        ${GLIB_LDFLAGS}
        ${SRT_LDFLAGS})

add_compile_options(
        ${OPENCV_CFLAGS}
        ${GSTREAMER_CFLAGS}
        ${APP_GSTREAMER_CFLAGS}
        ${GLIB_CFLAGS}
        ${SRT_CFLAGS})

add_executable(opencv_gstreamer src/main.cpp) 
...