123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699 |
- /*
- * @Description: 推理模块
- * @Version: 1.0
- * @Autor: lishengyin
- * @Date: 2021-10-13 09:35:37
- * @LastEditors: lishengyin
- * @LastEditTime: 2021-11-29 09:52:01
- */
- #include "inference.h"
- #define MAX_DISPLAY_LEN 64
- #define PGIE_CLASS_ID_VEHICLE 0
- #define PGIE_CLASS_ID_PERSON 2
- /* By default, OSD process-mode is set to CPU_MODE. To change mode, set as:
- * 1: GPU mode (for Tesla only)
- * 2: HW mode (For Jetson only)
- */
- #define OSD_PROCESS_MODE 0
- /* By default, OSD will not display text. To display text, change this to 1 */
- #define OSD_DISPLAY_TEXT 1
- /* The muxer output resolution must be set if the input streams will be of
- * different resolution. The muxer will scale all the input frames to this
- * resolution. */
- #define MUXER_OUTPUT_WIDTH 1920
- #define MUXER_OUTPUT_HEIGHT 1080
- /* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set
- * based on the fastest source's framerate. */
- #define MUXER_BATCH_TIMEOUT_USEC 5000
- #define TILED_OUTPUT_WIDTH 1280
- #define TILED_OUTPUT_HEIGHT 720
- /* NVIDIA Decoder source pad memory feature. This feature signifies that source
- * pads having this capability will push GstBuffers containing cuda buffers. */
- #define GST_CAPS_FEATURES_NVMM "memory:NVMM"
- #define MAX_NUM_SOURCES 30
- gint frame_number = 0;
- gint g_num_sources = 0;
- gint g_source_id_list[MAX_NUM_SOURCES];
- gboolean g_eos_list[MAX_NUM_SOURCES];
- gboolean g_source_enabled[MAX_NUM_SOURCES];
- GMutex eos_lock;
- GstElement *g_streammux = NULL;
- std::shared_ptr<InferInfo> g_InferInfo = NULL;
- namespace MIVA{
- std::shared_ptr<Inference> infer = NULL;
- ThreadPool pool(2,ThreadPool::PRIORITY_HIGHEST, false);
- /**
- * @description: 创建对象
- * @param {*}
- * @return {*} 智能指针
- * @author: lishengyin
- */
- std::shared_ptr<Inference> Inference::CreateNew()
- {
- if(infer == NULL) infer = std::make_shared<Inference>();
- return infer;
- }
- Inference::Inference()
- {
-
- }
- Inference::~Inference()
- {
- DebugL << "Returned, stopping playback";
- this->m_InferInfo = NULL;
- gst_element_set_state(this->pipeline, GST_STATE_NULL);
- g_main_loop_quit (this->loop);
- DebugL << "Deleting pipeline";
- gst_object_unref(GST_OBJECT(this->pipeline));
- g_source_remove(this->bus_watch_id);
- g_main_loop_unref(this->loop);
- g_mutex_clear (&eos_lock);
- }
- /**
- * @description: 推理模块初始化
- * @param {vector<DataSource>} DataList 数据源集合
- * @return {*} 是否初始化成功
- * @author: lishengyin
- */
- int32_t Inference::Init(std::shared_ptr<InferInfo> InferInfo)
- {
- // init
- this->loop = g_main_loop_new (NULL, FALSE);
- // 创建管道
- this->pipeline = gst_pipeline_new("dstest3-pipeline");
- // 创建批处理器
- this->streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");
- g_streammux = this->streammux;
- if(this->pipeline == NULL || this->streammux == NULL){
- ErrorL << "One element could not be created. Exiting.";
- return ERR;
- }
- gst_bin_add (GST_BIN (this->pipeline), streammux);
- this->m_InferInfo = InferInfo;
- g_InferInfo = InferInfo;
- // 创建数据源
- std::vector<DataSource>::iterator iter;
- g_num_sources = 0;
- for(iter = this->m_InferInfo->DataSources.begin(); iter != this->m_InferInfo->DataSources.end(); iter++){
- GstElement *source_bin = create_uridecode_bin (g_num_sources, (gchar*)((*iter).uri).c_str());
- if (!source_bin) {
- ErrorL << "Failed to create source bin. Exiting.";
- return ERR;
- }
- gst_bin_add(GST_BIN (this->pipeline), source_bin);
- iter->source_bin = source_bin;
- iter->Play = true;
- g_num_sources++;
- }
- /* Use nvinfer to infer on batched frame. */
- this->pgie = gst_element_factory_make("nvinfer", "primary-nvinference-engine");
- /* Add queue elements between every two elements */
- this->queue1 = gst_element_factory_make ("queue", "queue1");
- this->queue2 = gst_element_factory_make ("queue", "queue2");
- this->queue3 = gst_element_factory_make ("queue", "queue3");
- this->queue4 = gst_element_factory_make ("queue", "queue4");
- this->queue5 = gst_element_factory_make ("queue", "queue5");
- /* Use nvtiler to composite the batched frames into a 2D tiled array based
- * on the source of the frames. */
- this->tiler = gst_element_factory_make ("nvmultistreamtiler", "nvtiler");
- /* Use convertor to convert from NV12 to RGBA as required by nvosd */
- this->nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");
- this->nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");
- #ifdef PLATFORM_TEGRA
- this->transform = gst_element_factory_make ("nvegltransform", "nvegl-transform");
- #endif
- this->sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
-
- if (!this->pgie || !this->tiler || !this->nvvidconv || !this->nvosd || !this->sink) {
- ErrorL << "One element could not be created. Exiting.";
- return -1;
- }
- #ifdef PLATFORM_TEGRA
- if(!this->transform) {
- ErrorL << "One tegra element could not be created. Exiting.";
- return -1;
- }
- #endif
- g_object_set(G_OBJECT(streammux), "batch-size", g_num_sources, NULL);
- g_object_set(G_OBJECT(streammux), "live-source", 1, NULL);
- g_object_set (G_OBJECT (streammux), "width", MUXER_OUTPUT_WIDTH, "height",MUXER_OUTPUT_HEIGHT,
- "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
- /* Configure the nvinfer element using the nvinfer config file. */
- g_object_set (G_OBJECT (this->pgie),
- "config-file-path", "config_infer_primary_yoloV5.txt", NULL);
- /* Override the batch-size set in the config file with the number of sources. */
- g_object_get (G_OBJECT (this->pgie), "batch-size", &(this->pgie_batch_size), NULL);
- if ((int)this->pgie_batch_size != g_num_sources) {
- WarnL << "WARNING: Overriding infer-config batch-size:" << this->pgie_batch_size << "with number of sources ("<< g_num_sources << ")";
- g_object_set (G_OBJECT (this->pgie), "batch-size", g_num_sources, NULL);
- }
- this->tiler_rows = (guint) sqrt (g_num_sources);
- this->tiler_columns = (guint) ceil (1.0 * g_num_sources / this->tiler_rows);
- /* we set the tiler properties here */
- g_object_set (G_OBJECT (this->tiler), "rows", this->tiler_rows, "columns", this->tiler_columns,
- "width", TILED_OUTPUT_WIDTH, "height", TILED_OUTPUT_HEIGHT, NULL);
- g_object_set (G_OBJECT (this->nvosd), "process-mode", OSD_PROCESS_MODE,
- "display-text", OSD_DISPLAY_TEXT, NULL);
- g_object_set (G_OBJECT (this->sink), "qos", 0, NULL);
-
- this->bus = gst_pipeline_get_bus (GST_PIPELINE (this->pipeline));
- this->bus_watch_id = gst_bus_add_watch (this->bus, bus_call, this->loop);
- gst_object_unref (this->bus);
- gst_bin_add_many (GST_BIN (this->pipeline), this->queue1, this->pgie, this->queue2, this->tiler, this->queue3,
- this->nvvidconv, this->queue4, this->nvosd, this->queue5, this->transform, this->sink, NULL);
- if (!gst_element_link_many (streammux, this->queue1, this->pgie, this->queue2, this->tiler, this->queue3,
- this->nvvidconv, this->queue4, this->nvosd, this->queue5, this->transform, this->sink, NULL)) {
- ErrorL << "Elements could not be linked. Exiting.";
- return -1;
- }
-
- this->tiler_src_pad = gst_element_get_static_pad(this->pgie, "src");
- if (!this->tiler_src_pad)
- InfoL << "Unable to get src pad";
- else
- gst_pad_add_probe (this->tiler_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
- this->tiler_src_pad_buffer_probe, NULL, NULL);
- gst_object_unref (this->tiler_src_pad);
- return OK;
- }
- /**
- * @description: 准备任务
- * @param {*}
- * @return {*}
- * @author: lishengyin
- */
- void Inference::ReadyTask()
- {
- InfoL << "Now ReadyTask";
- gst_element_set_state(this->pipeline, GST_STATE_READY);
- }
- /**
- * @description: 启动任务
- * @param {*}
- * @return {*}
- * @author: lishengyin
- */
- int32_t Inference::StartTask()
- {
- static int ret = 0;
- InfoL << "Now palying";
- // 设置成异步启动
- if(ret != 0){
- pool.async([&](){
- this->RestartTask();
- });
- }else{
- ret++;
- pool.async([&](){
- gst_element_set_state(this->pipeline, GST_STATE_PLAYING);
- g_main_loop_run(this->loop);
- });
- }
- pool.start();
- return OK;
- }
-
- /**
- * @description: 暂停任务
- * @param {*}
- * @return {*}
- * @author: lishengyin
- */
- void Inference::PauseTask()
- {
- InfoL << "Now Pause";
- std::vector<DataSource>::iterator iter;
- for(iter = this->m_InferInfo->DataSources.begin(); iter != this->m_InferInfo->DataSources.end(); iter++){
- if(iter->Play && iter->source_bin != NULL){
- gst_element_set_state(iter->source_bin, GST_STATE_PAUSED);
- }
- }
- }
- /**
- * @description: 停止任务
- * @param {*}
- * @return {*}
- * @author: lishengyin
- */
- void Inference::StopTask()
- {
- int sourceId = 0;
- // gst_element_set_state(this->pipeline, GST_STATE_PAUSED);
- GstStateChangeReturn state_return;
- std::vector<DataSource>::iterator iter;
- for (iter = this->m_InferInfo->DataSources.begin(); iter != this->m_InferInfo->DataSources.end(); iter++){
- if(iter->Play && iter->source_bin != NULL){
- state_return = gst_element_set_state(iter->source_bin, GST_STATE_NULL);
- switch (state_return) {
- case GST_STATE_CHANGE_SUCCESS:
- InfoL << "STATE CHANGE SUCCESS";
- break;
- case GST_STATE_CHANGE_FAILURE:
- ErrorL << "STATE CHANGE FAILURE";
- break;
- case GST_STATE_CHANGE_ASYNC:
- InfoL << "STATE CHANGE ASYNC";
- break;
- case GST_STATE_CHANGE_NO_PREROLL:
- InfoL << "STATE CHANGE NO PREROLL";
- break;
- default:
- break;
- }
- }
- sourceId++;
- }
- NoticeCenter::Instance().emitEvent(NOTICE_RELEASE);
- }
- /**
- * @description: 重启任务
- * @param {*}
- * @return {*}
- * @author: lishengyin
- */
- int32_t Inference::RestartTask()
- {
- int num = 0;
- std::vector<DataSource>::iterator iter;
- for(iter=this->m_InferInfo->DataSources.begin(); iter!=this->m_InferInfo->DataSources.end();iter++){
- if(iter->Play && iter->source_bin != NULL) num++;
- }
- if(num == 0){
- DebugL << "没有数据源需要播放" << endl;
- return ERR;
- }
- DebugL << "RestartTask" << endl;
- for (iter = this->m_InferInfo->DataSources.begin(); iter != this->m_InferInfo->DataSources.end(); iter++){
- if(iter->Play && iter->source_bin != NULL){
- gst_element_set_state(iter->source_bin, GST_STATE_PLAYING);
- }
- }
- gst_element_set_state(this->pipeline, GST_STATE_PAUSED);
- gst_element_set_state(this->pipeline, GST_STATE_PLAYING);
- return OK;
- }
- /**
- * @description: 释放数据
- * @param {*}
- * @return {*}
- * @author: lishengyin
- */
- void Inference::Destory()
- {
- infer = NULL;
- }
- /**
- * @description: 获取推理结果
- * @param {GstPad *} pad 推理模块的pad
- * @param {GstPadProbeInfo *} info 推理结果数据集
- * @param {gpointer} u_data NULL
- * @return {*}
- * @author: lishengyin
- */
- GstPadProbeReturn
- Inference::tiler_src_pad_buffer_probe(GstPad * pad, GstPadProbeInfo * info, gpointer u_data)
- {
- //获取从管道中获取推理结果
- GstBuffer *buf = (GstBuffer *) info->data;
- NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta (buf);
- //初始化要使用的数据结构
- NvDsObjectMeta *obj_meta = NULL; //目标检测元数据类型变量
- NvDsMetaList * l_frame = NULL;
- NvDsMetaList * l_obj = NULL;
- NvDsDisplayMeta *display_meta = NULL;
- for (l_frame = batch_meta->frame_meta_list; l_frame != NULL;l_frame = l_frame->next) //从批量中获取某一帧图
- {
- NvDsFrameMeta *frame_meta = (NvDsFrameMeta *) (l_frame->data);
- int num = 0;
- for (l_obj = frame_meta->obj_meta_list; l_obj != NULL;l_obj = l_obj->next)
- {
- obj_meta = (NvDsObjectMeta *) (l_obj->data);
- if (obj_meta->class_id == 0) {
- num++;
- }
- }
- // 绘制识别区域
- std::string json = g_InferInfo->DataSources[frame_meta->source_id].range;
- InferRange inferRange;
- if(inferRange.jsonToObject(json)){
- if(inferRange.m_points.size() > 0){
- display_meta = nvds_acquire_display_meta_from_pool(batch_meta);
- for(int i = 0; i < (int)(inferRange.m_points.size()); i++)
- {
- NvOSD_LineParams *line_params = &display_meta -> line_params[i];
- line_params -> x1 = inferRange.m_points[i].x;
- line_params -> y1 = inferRange.m_points[i].y;
- if((i + 1) < (int)(inferRange.m_points.size())){
- line_params -> x2 = inferRange.m_points[i+1].x;
- line_params -> y2 = inferRange.m_points[i+1].y;
- }else{
- line_params -> x2 = inferRange.m_points[0].x;
- line_params -> y2 = inferRange.m_points[0].y;
- }
- line_params -> line_width = 2;
- line_params -> line_color.red = 0.0;
- line_params -> line_color.green = 1.0;
- line_params -> line_color.blue = 0.0;
- line_params -> line_color.alpha = 1.0;
- }
- display_meta->num_lines = inferRange.m_points.size();
- nvds_add_display_meta_to_frame(frame_meta, display_meta);
- }
- }
- // 判断物体中心点是否在区域内
- if(inferRange.m_points.size() > 0){
- num = 0;
- // 重新计算
- for (l_obj = frame_meta->obj_meta_list; l_obj != NULL;l_obj = l_obj->next)
- {
- obj_meta = (NvDsObjectMeta *) (l_obj->data);
- if (obj_meta->class_id == 0) {
- NvDsComp_BboxInfo boxInfo;
- boxInfo = obj_meta->detector_bbox_info;
- NvBbox_Coords box_Coord;
- box_Coord = boxInfo.org_bbox_coords;
- Point p;
- p.x = box_Coord.left + box_Coord.width / 2;
- p.y = box_Coord.top + box_Coord.height / 2;
- if(Inference::PtInPolygon(p, inferRange.m_points, inferRange.m_points.size())) num++;
- }
- }
- }
- // 推理广播
- NoticeCenter::Instance().emitEvent(NOTICE_INFER,frame_meta->source_id, num);
- }
- return GST_PAD_PROBE_OK;
- }
- /**
- * @description: 区域过滤
- * @param {Point} p
- * @param {int} nCount
- * @return {*}
- */
- int Inference::PtInPolygon (Point p, vector<Point>& ptPolygon, int nCount)
- {
- // 交点个数
- int nCross = 0;
- for (int i = 0; i < nCount; i++)
- {
- Point p1 = ptPolygon[i];
- Point p2 = ptPolygon[(i + 1) % nCount];// 点P1与P2形成连线
-
- if ( p1.y == p2.y )
- continue;
- if ( p.y < min(p1.y, p2.y) )
- continue;
- if ( p.y >= max(p1.y, p2.y) )
- continue;
- // 求交点的x坐标(由直线两点式方程转化而来)
-
- double x = (double)(p.y - p1.y) * (double)(p2.x - p1.x) / (double)(p2.y - p1.y) + p1.x;
-
- // 只统计p1p2与p向右射线的交点
- if ( x > p.x )
- {
- nCross++;
- }
- }
- // 交点为偶数,点在多边形之外
- // 交点为奇数,点在多边形之内
- if ((nCross % 2) == 1)
- {
- return 1;
- }
- else
- {
- return 0;
- }
- }
- /**
- * @description: 监听bus
- * @param {GstBus *} bus Deepstream的bus
- * @param {GstMessage *} msg 传递到bus中的msg
- * @param {gpointer} data NULL
- * @return {*}
- * @author: lishengyin
- */
- gboolean Inference::bus_call (GstBus * bus, GstMessage * msg, gpointer data)
- {
- switch (GST_MESSAGE_TYPE (msg)) {
- case GST_MESSAGE_EOS:
- InfoL << "End of stream";
- break;
- case GST_MESSAGE_WARNING:
- {
- gchar *debug;
- GError *error;
- gst_message_parse_warning (msg, &error, &debug);
- WarnL << "WARNING from element " << GST_OBJECT_NAME (msg->src) << ": " << error->message;
- g_free (debug);
- WarnL << "Warning: " << error->message;
- g_error_free (error);
- break;
- }
- case GST_MESSAGE_ERROR:
- {
- gchar *debug;
- GError *error;
- gst_message_parse_error (msg, &error, &debug);
- ErrorL << "ERROR from element" << GST_OBJECT_NAME (msg->src) << ":" << error->message;
- if (debug)
- ErrorL << "Error details:" << debug;
- g_free (debug);
- g_error_free (error);
- break;
- }
- #ifndef PLATFORM_TEGRA
- case GST_MESSAGE_ELEMENT:
- {
- if (gst_nvmessage_is_stream_eos (msg)) {
- guint stream_id;
- if (gst_nvmessage_parse_stream_eos (msg, &stream_id)) {
- InfoL << "Got EOS from stream " << stream_id;
- }
- }
- break;
- }
- #endif
- default:
- break;
- }
- return TRUE;
- }
- /**
- * @description: 解码模块监听child_added
- * @param {GstChildProxy *} 参数
- * @return {*}
- * @author: lishengyin
- */
- void Inference::decodebin_child_added (GstChildProxy * child_proxy, GObject * object,
- gchar * name, gpointer user_data)
- {
- WarnL << "Decodebin child added: " << name;
- if (g_strrstr (name, "decodebin") == name) {
-
- g_signal_connect (G_OBJECT (object), "child-added",
- G_CALLBACK (decodebin_child_added), user_data);
- }
- if (g_strrstr (name, "nvv4l2decoder") == name) {
- #ifdef PLATFORM_TEGRA
- g_object_set (object, "enable-max-performance", TRUE, NULL);
- g_object_set (object, "bufapi-version", TRUE, NULL);
- g_object_set (object, "drop-frame-interval", 0, NULL);
- g_object_set (object, "num-extra-surfaces", 0, NULL);
- #else
- g_object_set (object, "gpu-id", 0, NULL);
- #endif
- }
- }
- /**
- * @description: 删除数据源
- * @param {gint} source_id 数据源ID
- * @return {*}
- * @author: lishengyin
- */
- void Inference::stop_release_source (gint source_id)
- {
- g_mutex_lock (&eos_lock);
- GstState state;
- gst_element_get_state(this->m_InferInfo->DataSources[source_id].source_bin,&state,NULL, GST_CLOCK_TIME_NONE);
- DebugL << "state:" << state << endl;
- DebugL << "ID:(" << source_id << "," << this->m_InferInfo->DataSources[source_id].uri << ")" << endl;
- if(state == GST_STATE_NULL){
- gst_bin_remove (GST_BIN (this->pipeline), this->m_InferInfo->DataSources[source_id].source_bin);
- }else{
- ErrorL << "该source_bin未设置为NULL" << endl;
- }
- g_mutex_unlock (&eos_lock);
- }
- /**
- * @description: 解码模块监听pad-added
- * @param {GstElement *} decodebin
- * @param {GstPad *} pad
- * @param {gpointer} data
- * @return {*}
- * @author: lishengyin
- */
- void Inference::cb_newpad (GstElement * decodebin, GstPad * pad, gpointer data)
- {
- GstCaps *caps = gst_pad_query_caps (pad, NULL);
- const GstStructure *str = gst_caps_get_structure (caps, 0);
- const gchar *name = gst_structure_get_name (str);
- WarnL << "decodebin new pad " << name;
- if (!strncmp (name, "video", 5)) {
- gint source_id = (*(gint *) data);
- gchar pad_name[16] = { 0 };
- GstPad *sinkpad = NULL;
- g_snprintf (pad_name, 15, "sink_%u", source_id);
- sinkpad = gst_element_get_static_pad (g_streammux, pad_name);
- if(sinkpad == NULL) sinkpad = gst_element_get_request_pad (g_streammux, pad_name);
- if (gst_pad_link (pad, sinkpad) != GST_PAD_LINK_OK) {
- ErrorL << "Failed to link decodebin to pipeline: " << pad_name;
- } else {
- InfoL << "Decodebin linked to pipeline";
- }
- gst_object_unref (sinkpad);
- }
- }
-
- /**
- * @description: 创建解码bin
- * @param {guint} index 数据源索引
- * @param {gchar *} filename 数据源Url
- * @return {GstElement *} 返回解码模块bin
- * @author: lishengyin
- */
- GstElement* Inference::create_uridecode_bin (guint index, gchar * filename)
- {
- GstElement *bin = NULL;
- gchar bin_name[16] = { };
- DebugL << "creating uridecodebin for [" << filename << "]";
- g_source_id_list[index] = index;
- g_snprintf (bin_name, 15, "source-bin-%02d", index);
- bin = gst_element_factory_make ("uridecodebin", bin_name);
- g_object_set (G_OBJECT (bin), "uri", filename, NULL);
- g_signal_connect (G_OBJECT (bin), "pad-added",
- G_CALLBACK (cb_newpad), &g_source_id_list[index]);
- g_signal_connect (G_OBJECT (bin), "child-added",
- G_CALLBACK (decodebin_child_added), &g_source_id_list[index]);
- g_source_enabled[index] = TRUE;
- return bin;
- }
- /**
- * @description: 添加数据源
- * @param {int} source_Id 数据源ID
- * @param {string} uri 数据源Url
- * @return {*} 是否添加成功
- * @author: lishengyin
- */
- gboolean Inference::add_sources (int source_Id, std::string uri)
- {
- g_mutex_lock (&eos_lock);
- GstElement *source_bin;
- InfoL << "Calling Start " << source_Id;
- source_bin = create_uridecode_bin (source_Id, (gchar *)uri.c_str());
- if (!source_bin) {
- ErrorL << "Failed to create source bin. Exiting.";
- return -1;
- }
- this->m_InferInfo->DataSources[source_Id].source_bin = source_bin;
- gst_bin_add (GST_BIN (this->pipeline), source_bin);
-
- g_mutex_unlock (&eos_lock);
- return TRUE;
- }
- /**
- * @description: 修改数据源地址
- * @param {GstElement} *source_bin
- * @param {string} uri
- * @return {*}
- */
- void Inference::ModifyUri(GstElement *source_bin, std::string uri)
- {
- g_object_set(G_OBJECT(source_bin), "uri", uri.c_str(), NULL);
- }
- /**
- * @description: 释放资源
- * @param {GstElement} *source_bin
- * @return {*}
- */
- void Inference::FreeElement(int source_Id,GstElement *source_bin)
- {
- gst_object_unref(GST_OBJECT(source_bin));
- //g_source_remove(source_Id);
- }
- /**
- * @description: 添加数据源
- * @param {*}
- * @return {*}
- */
- void Inference::AddSources(int sourceId)
- {
- if(this->m_InferInfo->DataSources[sourceId].source_bin != NULL)
- gst_bin_add (GST_BIN (this->pipeline), this->m_InferInfo->DataSources[sourceId].source_bin);
- }
- }
|