用于需要采集当前设备camera数据,并做后续处理 // 采集导数据后处理 GstFlowReturn handle_sample(GstElement *sink, gpointer user_data, gboolean isH264) { ICameraSdkEvent *_event = (ICameraSdkEvent *)user_data; if (!_event) { return GST_FLOW_OK; } GstSample *sample = NULL; GstBuffer *buffer = NULL; g_signal_emit_by_name(sink, "pull-sample", &sample); if (!sample) { g_printerr("[camera_sdk_impl.cpp][handle_sample], sample is NULL!!!"); return GST_FLOW_OK; } GstBuffer *buf = gst_sample_get_buffer(sample); GstMapInfo map_info; if (!gst_buffer_map(buf, &map_info, GST_MAP_READ)) { g_printerr("[camera_sdk_impl.cpp][handle_sample], frameData map error"); return GST_FLOW_OK; } // 发送或做其他处理,数据map_info.data,数据大小map_info.size gst_buffer_unmap(buf, &map_info); gst_sample_unref(sample); return GST_FLOW_OK; } // 初始化 pipeline // |------------------pipeline----------------| // | v4l2src->capsfilter->mpph264enc->appsink | // |------------------------------------------| bool init(const char *dev, int pictureWidth, int pictureHeight, int frameRate) { m_v4l2_src = gst_element_factory_make("v4l2src", "v4l2_src"); if (!m_v4l2_src) { g_printerr("[init], v4l2src elements could be created.\n"); return false; } m_mpph264_enc = gst_element_factory_make("mpph264enc", "mpph264_enc"); if (!m_mpph264_enc) { g_printerr("[init], mpph264enc elements could be created.\n"); return false; } m_app_sink_video_1 = gst_element_factory_make("appsink", "app_sink_video_1"); if (!m_app_sink_video_1) { g_printerr("[init], app_sink_video elements could be created.\n"); return false; } m_pipeline = gst_pipeline_new("camera-pipeline"); if (!m_pipeline) { g_printerr("[init], pipeline elements could be created.\n"); return false; } g_object_set(G_OBJECT(m_v4l2_src), "device", dev, NULL); g_object_set(G_OBJECT(m_app_sink_video_1), "enable-last-sample", FALSE, NULL); g_object_set(G_OBJECT(m_app_sink_video_1), "sync", FALSE, "async", FALSE, NULL); g_object_set(G_OBJECT(m_app_sink_video_1), "max-lateness", 0, NULL); g_object_set(G_OBJECT(m_app_sink_video_1), "drop", TRUE, "max-buffers", 1, NULL); g_object_set(G_OBJECT(m_app_sink_video_1), "emit-signals", TRUE, NULL); g_signal_connect(m_app_sink_video_1, "new-sample", G_CALLBACK(cb_appsink_video_new_sample_1), m_cameraEvent); g_object_set(G_OBJECT(m_mpph264_enc), "gop", 100, NULL); g_object_set(G_OBJECT(m_mpph264_enc), "header-mode", 1, NULL); g_object_set(G_OBJECT(m_mpph264_enc), "qos", TRUE, NULL); m_caps_filter_1 = gst_element_factory_make("capsfilter", "caps_filter_1"); if (!m_caps_filter_1) { g_printerr("[init], caps_filter elements could be created.\n"); return false; } // 320 x 240 GstCaps *caps_1 = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "NV12", "width", G_TYPE_INT, pictureWidth, "height", G_TYPE_INT, pictureHeight, "framerate", GST_TYPE_FRACTION, frameRate, 1, NULL); if (!caps_1) { g_printerr("[init], caps new fail.\n"); return false; } g_object_set(G_OBJECT(m_caps_filter_1), "caps", caps_1, NULL); gst_caps_unref(caps_1); gst_bin_add_many(GST_BIN(m_pipeline), m_v4l2_src, NULL); gst_bin_add_many(GST_BIN(m_pipeline), m_caps_filter_1, NULL); gst_bin_add_many(GST_BIN(m_pipeline), m_mpph264_enc, NULL); gst_bin_add_many(GST_BIN(m_pipeline), m_app_sink_video_1, NULL); if (gst_element_link_many(m_v4l2_src, m_caps_filter_1, m_mpph264_enc, m_app_sink_video_1, NULL) != TRUE) { g_printerr("[init], element all link fail.\n"); return false; } if (gst_element_set_state(m_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) { g_printerr("[init], play failed...\n"); return false; } printf("[init], open camera devcie successfully..\n"); return true; } |
|
来自: 开花结果 > 《Gstreamer》