inference.cpp 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699
  1. /*
  2. * @Description: 推理模块
  3. * @Version: 1.0
  4. * @Autor: lishengyin
  5. * @Date: 2021-10-13 09:35:37
  6. * @LastEditors: lishengyin
  7. * @LastEditTime: 2021-11-29 09:52:01
  8. */
  9. #include "inference.h"
  10. #define MAX_DISPLAY_LEN 64
  11. #define PGIE_CLASS_ID_VEHICLE 0
  12. #define PGIE_CLASS_ID_PERSON 2
  13. /* By default, OSD process-mode is set to CPU_MODE. To change mode, set as:
  14. * 1: GPU mode (for Tesla only)
  15. * 2: HW mode (For Jetson only)
  16. */
  17. #define OSD_PROCESS_MODE 0
  18. /* By default, OSD will not display text. To display text, change this to 1 */
  19. #define OSD_DISPLAY_TEXT 1
  20. /* The muxer output resolution must be set if the input streams will be of
  21. * different resolution. The muxer will scale all the input frames to this
  22. * resolution. */
  23. #define MUXER_OUTPUT_WIDTH 1920
  24. #define MUXER_OUTPUT_HEIGHT 1080
  25. /* Muxer batch formation timeout, for e.g. 40 millisec. Should ideally be set
  26. * based on the fastest source's framerate. */
  27. #define MUXER_BATCH_TIMEOUT_USEC 5000
  28. #define TILED_OUTPUT_WIDTH 1280
  29. #define TILED_OUTPUT_HEIGHT 720
  30. /* NVIDIA Decoder source pad memory feature. This feature signifies that source
  31. * pads having this capability will push GstBuffers containing cuda buffers. */
  32. #define GST_CAPS_FEATURES_NVMM "memory:NVMM"
  33. #define MAX_NUM_SOURCES 30
  34. gint frame_number = 0;
  35. gint g_num_sources = 0;
  36. gint g_source_id_list[MAX_NUM_SOURCES];
  37. gboolean g_eos_list[MAX_NUM_SOURCES];
  38. gboolean g_source_enabled[MAX_NUM_SOURCES];
  39. GMutex eos_lock;
  40. GstElement *g_streammux = NULL;
  41. std::shared_ptr<InferInfo> g_InferInfo = NULL;
  42. namespace MIVA{
  43. std::shared_ptr<Inference> infer = NULL;
  44. ThreadPool pool(2,ThreadPool::PRIORITY_HIGHEST, false);
  45. /**
  46. * @description: 创建对象
  47. * @param {*}
  48. * @return {*} 智能指针
  49. * @author: lishengyin
  50. */
  51. std::shared_ptr<Inference> Inference::CreateNew()
  52. {
  53. if(infer == NULL) infer = std::make_shared<Inference>();
  54. return infer;
  55. }
  56. Inference::Inference()
  57. {
  58. }
  59. Inference::~Inference()
  60. {
  61. DebugL << "Returned, stopping playback";
  62. this->m_InferInfo = NULL;
  63. gst_element_set_state(this->pipeline, GST_STATE_NULL);
  64. g_main_loop_quit (this->loop);
  65. DebugL << "Deleting pipeline";
  66. gst_object_unref(GST_OBJECT(this->pipeline));
  67. g_source_remove(this->bus_watch_id);
  68. g_main_loop_unref(this->loop);
  69. g_mutex_clear (&eos_lock);
  70. }
  71. /**
  72. * @description: 推理模块初始化
  73. * @param {vector<DataSource>} DataList 数据源集合
  74. * @return {*} 是否初始化成功
  75. * @author: lishengyin
  76. */
  77. int32_t Inference::Init(std::shared_ptr<InferInfo> InferInfo)
  78. {
  79. // init
  80. this->loop = g_main_loop_new (NULL, FALSE);
  81. // 创建管道
  82. this->pipeline = gst_pipeline_new("dstest3-pipeline");
  83. // 创建批处理器
  84. this->streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");
  85. g_streammux = this->streammux;
  86. if(this->pipeline == NULL || this->streammux == NULL){
  87. ErrorL << "One element could not be created. Exiting.";
  88. return ERR;
  89. }
  90. gst_bin_add (GST_BIN (this->pipeline), streammux);
  91. this->m_InferInfo = InferInfo;
  92. g_InferInfo = InferInfo;
  93. // 创建数据源
  94. std::vector<DataSource>::iterator iter;
  95. g_num_sources = 0;
  96. for(iter = this->m_InferInfo->DataSources.begin(); iter != this->m_InferInfo->DataSources.end(); iter++){
  97. GstElement *source_bin = create_uridecode_bin (g_num_sources, (gchar*)((*iter).uri).c_str());
  98. if (!source_bin) {
  99. ErrorL << "Failed to create source bin. Exiting.";
  100. return ERR;
  101. }
  102. gst_bin_add(GST_BIN (this->pipeline), source_bin);
  103. iter->source_bin = source_bin;
  104. iter->Play = true;
  105. g_num_sources++;
  106. }
  107. /* Use nvinfer to infer on batched frame. */
  108. this->pgie = gst_element_factory_make("nvinfer", "primary-nvinference-engine");
  109. /* Add queue elements between every two elements */
  110. this->queue1 = gst_element_factory_make ("queue", "queue1");
  111. this->queue2 = gst_element_factory_make ("queue", "queue2");
  112. this->queue3 = gst_element_factory_make ("queue", "queue3");
  113. this->queue4 = gst_element_factory_make ("queue", "queue4");
  114. this->queue5 = gst_element_factory_make ("queue", "queue5");
  115. /* Use nvtiler to composite the batched frames into a 2D tiled array based
  116. * on the source of the frames. */
  117. this->tiler = gst_element_factory_make ("nvmultistreamtiler", "nvtiler");
  118. /* Use convertor to convert from NV12 to RGBA as required by nvosd */
  119. this->nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");
  120. this->nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");
  121. #ifdef PLATFORM_TEGRA
  122. this->transform = gst_element_factory_make ("nvegltransform", "nvegl-transform");
  123. #endif
  124. this->sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
  125. if (!this->pgie || !this->tiler || !this->nvvidconv || !this->nvosd || !this->sink) {
  126. ErrorL << "One element could not be created. Exiting.";
  127. return -1;
  128. }
  129. #ifdef PLATFORM_TEGRA
  130. if(!this->transform) {
  131. ErrorL << "One tegra element could not be created. Exiting.";
  132. return -1;
  133. }
  134. #endif
  135. g_object_set(G_OBJECT(streammux), "batch-size", g_num_sources, NULL);
  136. g_object_set(G_OBJECT(streammux), "live-source", 1, NULL);
  137. g_object_set (G_OBJECT (streammux), "width", MUXER_OUTPUT_WIDTH, "height",MUXER_OUTPUT_HEIGHT,
  138. "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);
  139. /* Configure the nvinfer element using the nvinfer config file. */
  140. g_object_set (G_OBJECT (this->pgie),
  141. "config-file-path", "config_infer_primary_yoloV5.txt", NULL);
  142. /* Override the batch-size set in the config file with the number of sources. */
  143. g_object_get (G_OBJECT (this->pgie), "batch-size", &(this->pgie_batch_size), NULL);
  144. if ((int)this->pgie_batch_size != g_num_sources) {
  145. WarnL << "WARNING: Overriding infer-config batch-size:" << this->pgie_batch_size << "with number of sources ("<< g_num_sources << ")";
  146. g_object_set (G_OBJECT (this->pgie), "batch-size", g_num_sources, NULL);
  147. }
  148. this->tiler_rows = (guint) sqrt (g_num_sources);
  149. this->tiler_columns = (guint) ceil (1.0 * g_num_sources / this->tiler_rows);
  150. /* we set the tiler properties here */
  151. g_object_set (G_OBJECT (this->tiler), "rows", this->tiler_rows, "columns", this->tiler_columns,
  152. "width", TILED_OUTPUT_WIDTH, "height", TILED_OUTPUT_HEIGHT, NULL);
  153. g_object_set (G_OBJECT (this->nvosd), "process-mode", OSD_PROCESS_MODE,
  154. "display-text", OSD_DISPLAY_TEXT, NULL);
  155. g_object_set (G_OBJECT (this->sink), "qos", 0, NULL);
  156. this->bus = gst_pipeline_get_bus (GST_PIPELINE (this->pipeline));
  157. this->bus_watch_id = gst_bus_add_watch (this->bus, bus_call, this->loop);
  158. gst_object_unref (this->bus);
  159. gst_bin_add_many (GST_BIN (this->pipeline), this->queue1, this->pgie, this->queue2, this->tiler, this->queue3,
  160. this->nvvidconv, this->queue4, this->nvosd, this->queue5, this->transform, this->sink, NULL);
  161. if (!gst_element_link_many (streammux, this->queue1, this->pgie, this->queue2, this->tiler, this->queue3,
  162. this->nvvidconv, this->queue4, this->nvosd, this->queue5, this->transform, this->sink, NULL)) {
  163. ErrorL << "Elements could not be linked. Exiting.";
  164. return -1;
  165. }
  166. this->tiler_src_pad = gst_element_get_static_pad(this->pgie, "src");
  167. if (!this->tiler_src_pad)
  168. InfoL << "Unable to get src pad";
  169. else
  170. gst_pad_add_probe (this->tiler_src_pad, GST_PAD_PROBE_TYPE_BUFFER,
  171. this->tiler_src_pad_buffer_probe, NULL, NULL);
  172. gst_object_unref (this->tiler_src_pad);
  173. return OK;
  174. }
  175. /**
  176. * @description: 准备任务
  177. * @param {*}
  178. * @return {*}
  179. * @author: lishengyin
  180. */
  181. void Inference::ReadyTask()
  182. {
  183. InfoL << "Now ReadyTask";
  184. gst_element_set_state(this->pipeline, GST_STATE_READY);
  185. }
  186. /**
  187. * @description: 启动任务
  188. * @param {*}
  189. * @return {*}
  190. * @author: lishengyin
  191. */
  192. int32_t Inference::StartTask()
  193. {
  194. static int ret = 0;
  195. InfoL << "Now palying";
  196. // 设置成异步启动
  197. if(ret != 0){
  198. pool.async([&](){
  199. this->RestartTask();
  200. });
  201. }else{
  202. ret++;
  203. pool.async([&](){
  204. gst_element_set_state(this->pipeline, GST_STATE_PLAYING);
  205. g_main_loop_run(this->loop);
  206. });
  207. }
  208. pool.start();
  209. return OK;
  210. }
  211. /**
  212. * @description: 暂停任务
  213. * @param {*}
  214. * @return {*}
  215. * @author: lishengyin
  216. */
  217. void Inference::PauseTask()
  218. {
  219. InfoL << "Now Pause";
  220. std::vector<DataSource>::iterator iter;
  221. for(iter = this->m_InferInfo->DataSources.begin(); iter != this->m_InferInfo->DataSources.end(); iter++){
  222. if(iter->Play && iter->source_bin != NULL){
  223. gst_element_set_state(iter->source_bin, GST_STATE_PAUSED);
  224. }
  225. }
  226. }
  227. /**
  228. * @description: 停止任务
  229. * @param {*}
  230. * @return {*}
  231. * @author: lishengyin
  232. */
  233. void Inference::StopTask()
  234. {
  235. int sourceId = 0;
  236. // gst_element_set_state(this->pipeline, GST_STATE_PAUSED);
  237. GstStateChangeReturn state_return;
  238. std::vector<DataSource>::iterator iter;
  239. for (iter = this->m_InferInfo->DataSources.begin(); iter != this->m_InferInfo->DataSources.end(); iter++){
  240. if(iter->Play && iter->source_bin != NULL){
  241. state_return = gst_element_set_state(iter->source_bin, GST_STATE_NULL);
  242. switch (state_return) {
  243. case GST_STATE_CHANGE_SUCCESS:
  244. InfoL << "STATE CHANGE SUCCESS";
  245. break;
  246. case GST_STATE_CHANGE_FAILURE:
  247. ErrorL << "STATE CHANGE FAILURE";
  248. break;
  249. case GST_STATE_CHANGE_ASYNC:
  250. InfoL << "STATE CHANGE ASYNC";
  251. break;
  252. case GST_STATE_CHANGE_NO_PREROLL:
  253. InfoL << "STATE CHANGE NO PREROLL";
  254. break;
  255. default:
  256. break;
  257. }
  258. }
  259. sourceId++;
  260. }
  261. NoticeCenter::Instance().emitEvent(NOTICE_RELEASE);
  262. }
  263. /**
  264. * @description: 重启任务
  265. * @param {*}
  266. * @return {*}
  267. * @author: lishengyin
  268. */
  269. int32_t Inference::RestartTask()
  270. {
  271. int num = 0;
  272. std::vector<DataSource>::iterator iter;
  273. for(iter=this->m_InferInfo->DataSources.begin(); iter!=this->m_InferInfo->DataSources.end();iter++){
  274. if(iter->Play && iter->source_bin != NULL) num++;
  275. }
  276. if(num == 0){
  277. DebugL << "没有数据源需要播放" << endl;
  278. return ERR;
  279. }
  280. DebugL << "RestartTask" << endl;
  281. for (iter = this->m_InferInfo->DataSources.begin(); iter != this->m_InferInfo->DataSources.end(); iter++){
  282. if(iter->Play && iter->source_bin != NULL){
  283. gst_element_set_state(iter->source_bin, GST_STATE_PLAYING);
  284. }
  285. }
  286. gst_element_set_state(this->pipeline, GST_STATE_PAUSED);
  287. gst_element_set_state(this->pipeline, GST_STATE_PLAYING);
  288. return OK;
  289. }
  290. /**
  291. * @description: 释放数据
  292. * @param {*}
  293. * @return {*}
  294. * @author: lishengyin
  295. */
  296. void Inference::Destory()
  297. {
  298. infer = NULL;
  299. }
  300. /**
  301. * @description: 获取推理结果
  302. * @param {GstPad *} pad 推理模块的pad
  303. * @param {GstPadProbeInfo *} info 推理结果数据集
  304. * @param {gpointer} u_data NULL
  305. * @return {*}
  306. * @author: lishengyin
  307. */
  308. GstPadProbeReturn
  309. Inference::tiler_src_pad_buffer_probe(GstPad * pad, GstPadProbeInfo * info, gpointer u_data)
  310. {
  311. //获取从管道中获取推理结果
  312. GstBuffer *buf = (GstBuffer *) info->data;
  313. NvDsBatchMeta *batch_meta = gst_buffer_get_nvds_batch_meta (buf);
  314. //初始化要使用的数据结构
  315. NvDsObjectMeta *obj_meta = NULL; //目标检测元数据类型变量
  316. NvDsMetaList * l_frame = NULL;
  317. NvDsMetaList * l_obj = NULL;
  318. NvDsDisplayMeta *display_meta = NULL;
  319. for (l_frame = batch_meta->frame_meta_list; l_frame != NULL;l_frame = l_frame->next) //从批量中获取某一帧图
  320. {
  321. NvDsFrameMeta *frame_meta = (NvDsFrameMeta *) (l_frame->data);
  322. int num = 0;
  323. for (l_obj = frame_meta->obj_meta_list; l_obj != NULL;l_obj = l_obj->next)
  324. {
  325. obj_meta = (NvDsObjectMeta *) (l_obj->data);
  326. if (obj_meta->class_id == 0) {
  327. num++;
  328. }
  329. }
  330. // 绘制识别区域
  331. std::string json = g_InferInfo->DataSources[frame_meta->source_id].range;
  332. InferRange inferRange;
  333. if(inferRange.jsonToObject(json)){
  334. if(inferRange.m_points.size() > 0){
  335. display_meta = nvds_acquire_display_meta_from_pool(batch_meta);
  336. for(int i = 0; i < (int)(inferRange.m_points.size()); i++)
  337. {
  338. NvOSD_LineParams *line_params = &display_meta -> line_params[i];
  339. line_params -> x1 = inferRange.m_points[i].x;
  340. line_params -> y1 = inferRange.m_points[i].y;
  341. if((i + 1) < (int)(inferRange.m_points.size())){
  342. line_params -> x2 = inferRange.m_points[i+1].x;
  343. line_params -> y2 = inferRange.m_points[i+1].y;
  344. }else{
  345. line_params -> x2 = inferRange.m_points[0].x;
  346. line_params -> y2 = inferRange.m_points[0].y;
  347. }
  348. line_params -> line_width = 2;
  349. line_params -> line_color.red = 0.0;
  350. line_params -> line_color.green = 1.0;
  351. line_params -> line_color.blue = 0.0;
  352. line_params -> line_color.alpha = 1.0;
  353. }
  354. display_meta->num_lines = inferRange.m_points.size();
  355. nvds_add_display_meta_to_frame(frame_meta, display_meta);
  356. }
  357. }
  358. // 判断物体中心点是否在区域内
  359. if(inferRange.m_points.size() > 0){
  360. num = 0;
  361. // 重新计算
  362. for (l_obj = frame_meta->obj_meta_list; l_obj != NULL;l_obj = l_obj->next)
  363. {
  364. obj_meta = (NvDsObjectMeta *) (l_obj->data);
  365. if (obj_meta->class_id == 0) {
  366. NvDsComp_BboxInfo boxInfo;
  367. boxInfo = obj_meta->detector_bbox_info;
  368. NvBbox_Coords box_Coord;
  369. box_Coord = boxInfo.org_bbox_coords;
  370. Point p;
  371. p.x = box_Coord.left + box_Coord.width / 2;
  372. p.y = box_Coord.top + box_Coord.height / 2;
  373. if(Inference::PtInPolygon(p, inferRange.m_points, inferRange.m_points.size())) num++;
  374. }
  375. }
  376. }
  377. // 推理广播
  378. NoticeCenter::Instance().emitEvent(NOTICE_INFER,frame_meta->source_id, num);
  379. }
  380. return GST_PAD_PROBE_OK;
  381. }
  382. /**
  383. * @description: 区域过滤
  384. * @param {Point} p
  385. * @param {int} nCount
  386. * @return {*}
  387. */
  388. int Inference::PtInPolygon (Point p, vector<Point>& ptPolygon, int nCount)
  389. {
  390. // 交点个数
  391. int nCross = 0;
  392. for (int i = 0; i < nCount; i++)
  393. {
  394. Point p1 = ptPolygon[i];
  395. Point p2 = ptPolygon[(i + 1) % nCount];// 点P1与P2形成连线
  396. if ( p1.y == p2.y )
  397. continue;
  398. if ( p.y < min(p1.y, p2.y) )
  399. continue;
  400. if ( p.y >= max(p1.y, p2.y) )
  401. continue;
  402. // 求交点的x坐标(由直线两点式方程转化而来)
  403. double x = (double)(p.y - p1.y) * (double)(p2.x - p1.x) / (double)(p2.y - p1.y) + p1.x;
  404. // 只统计p1p2与p向右射线的交点
  405. if ( x > p.x )
  406. {
  407. nCross++;
  408. }
  409. }
  410. // 交点为偶数,点在多边形之外
  411. // 交点为奇数,点在多边形之内
  412. if ((nCross % 2) == 1)
  413. {
  414. return 1;
  415. }
  416. else
  417. {
  418. return 0;
  419. }
  420. }
  421. /**
  422. * @description: 监听bus
  423. * @param {GstBus *} bus Deepstream的bus
  424. * @param {GstMessage *} msg 传递到bus中的msg
  425. * @param {gpointer} data NULL
  426. * @return {*}
  427. * @author: lishengyin
  428. */
  429. gboolean Inference::bus_call (GstBus * bus, GstMessage * msg, gpointer data)
  430. {
  431. switch (GST_MESSAGE_TYPE (msg)) {
  432. case GST_MESSAGE_EOS:
  433. InfoL << "End of stream";
  434. break;
  435. case GST_MESSAGE_WARNING:
  436. {
  437. gchar *debug;
  438. GError *error;
  439. gst_message_parse_warning (msg, &error, &debug);
  440. WarnL << "WARNING from element " << GST_OBJECT_NAME (msg->src) << ": " << error->message;
  441. g_free (debug);
  442. WarnL << "Warning: " << error->message;
  443. g_error_free (error);
  444. break;
  445. }
  446. case GST_MESSAGE_ERROR:
  447. {
  448. gchar *debug;
  449. GError *error;
  450. gst_message_parse_error (msg, &error, &debug);
  451. ErrorL << "ERROR from element" << GST_OBJECT_NAME (msg->src) << ":" << error->message;
  452. if (debug)
  453. ErrorL << "Error details:" << debug;
  454. g_free (debug);
  455. g_error_free (error);
  456. break;
  457. }
  458. #ifndef PLATFORM_TEGRA
  459. case GST_MESSAGE_ELEMENT:
  460. {
  461. if (gst_nvmessage_is_stream_eos (msg)) {
  462. guint stream_id;
  463. if (gst_nvmessage_parse_stream_eos (msg, &stream_id)) {
  464. InfoL << "Got EOS from stream " << stream_id;
  465. }
  466. }
  467. break;
  468. }
  469. #endif
  470. default:
  471. break;
  472. }
  473. return TRUE;
  474. }
  475. /**
  476. * @description: 解码模块监听child_added
  477. * @param {GstChildProxy *} 参数
  478. * @return {*}
  479. * @author: lishengyin
  480. */
  481. void Inference::decodebin_child_added (GstChildProxy * child_proxy, GObject * object,
  482. gchar * name, gpointer user_data)
  483. {
  484. WarnL << "Decodebin child added: " << name;
  485. if (g_strrstr (name, "decodebin") == name) {
  486. g_signal_connect (G_OBJECT (object), "child-added",
  487. G_CALLBACK (decodebin_child_added), user_data);
  488. }
  489. if (g_strrstr (name, "nvv4l2decoder") == name) {
  490. #ifdef PLATFORM_TEGRA
  491. g_object_set (object, "enable-max-performance", TRUE, NULL);
  492. g_object_set (object, "bufapi-version", TRUE, NULL);
  493. g_object_set (object, "drop-frame-interval", 0, NULL);
  494. g_object_set (object, "num-extra-surfaces", 0, NULL);
  495. #else
  496. g_object_set (object, "gpu-id", 0, NULL);
  497. #endif
  498. }
  499. }
  500. /**
  501. * @description: 删除数据源
  502. * @param {gint} source_id 数据源ID
  503. * @return {*}
  504. * @author: lishengyin
  505. */
  506. void Inference::stop_release_source (gint source_id)
  507. {
  508. g_mutex_lock (&eos_lock);
  509. GstState state;
  510. gst_element_get_state(this->m_InferInfo->DataSources[source_id].source_bin,&state,NULL, GST_CLOCK_TIME_NONE);
  511. DebugL << "state:" << state << endl;
  512. DebugL << "ID:(" << source_id << "," << this->m_InferInfo->DataSources[source_id].uri << ")" << endl;
  513. if(state == GST_STATE_NULL){
  514. gst_bin_remove (GST_BIN (this->pipeline), this->m_InferInfo->DataSources[source_id].source_bin);
  515. }else{
  516. ErrorL << "该source_bin未设置为NULL" << endl;
  517. }
  518. g_mutex_unlock (&eos_lock);
  519. }
  520. /**
  521. * @description: 解码模块监听pad-added
  522. * @param {GstElement *} decodebin
  523. * @param {GstPad *} pad
  524. * @param {gpointer} data
  525. * @return {*}
  526. * @author: lishengyin
  527. */
  528. void Inference::cb_newpad (GstElement * decodebin, GstPad * pad, gpointer data)
  529. {
  530. GstCaps *caps = gst_pad_query_caps (pad, NULL);
  531. const GstStructure *str = gst_caps_get_structure (caps, 0);
  532. const gchar *name = gst_structure_get_name (str);
  533. WarnL << "decodebin new pad " << name;
  534. if (!strncmp (name, "video", 5)) {
  535. gint source_id = (*(gint *) data);
  536. gchar pad_name[16] = { 0 };
  537. GstPad *sinkpad = NULL;
  538. g_snprintf (pad_name, 15, "sink_%u", source_id);
  539. sinkpad = gst_element_get_static_pad (g_streammux, pad_name);
  540. if(sinkpad == NULL) sinkpad = gst_element_get_request_pad (g_streammux, pad_name);
  541. if (gst_pad_link (pad, sinkpad) != GST_PAD_LINK_OK) {
  542. ErrorL << "Failed to link decodebin to pipeline: " << pad_name;
  543. } else {
  544. InfoL << "Decodebin linked to pipeline";
  545. }
  546. gst_object_unref (sinkpad);
  547. }
  548. }
  549. /**
  550. * @description: 创建解码bin
  551. * @param {guint} index 数据源索引
  552. * @param {gchar *} filename 数据源Url
  553. * @return {GstElement *} 返回解码模块bin
  554. * @author: lishengyin
  555. */
  556. GstElement* Inference::create_uridecode_bin (guint index, gchar * filename)
  557. {
  558. GstElement *bin = NULL;
  559. gchar bin_name[16] = { };
  560. DebugL << "creating uridecodebin for [" << filename << "]";
  561. g_source_id_list[index] = index;
  562. g_snprintf (bin_name, 15, "source-bin-%02d", index);
  563. bin = gst_element_factory_make ("uridecodebin", bin_name);
  564. g_object_set (G_OBJECT (bin), "uri", filename, NULL);
  565. g_signal_connect (G_OBJECT (bin), "pad-added",
  566. G_CALLBACK (cb_newpad), &g_source_id_list[index]);
  567. g_signal_connect (G_OBJECT (bin), "child-added",
  568. G_CALLBACK (decodebin_child_added), &g_source_id_list[index]);
  569. g_source_enabled[index] = TRUE;
  570. return bin;
  571. }
  572. /**
  573. * @description: 添加数据源
  574. * @param {int} source_Id 数据源ID
  575. * @param {string} uri 数据源Url
  576. * @return {*} 是否添加成功
  577. * @author: lishengyin
  578. */
  579. gboolean Inference::add_sources (int source_Id, std::string uri)
  580. {
  581. g_mutex_lock (&eos_lock);
  582. GstElement *source_bin;
  583. InfoL << "Calling Start " << source_Id;
  584. source_bin = create_uridecode_bin (source_Id, (gchar *)uri.c_str());
  585. if (!source_bin) {
  586. ErrorL << "Failed to create source bin. Exiting.";
  587. return -1;
  588. }
  589. this->m_InferInfo->DataSources[source_Id].source_bin = source_bin;
  590. gst_bin_add (GST_BIN (this->pipeline), source_bin);
  591. g_mutex_unlock (&eos_lock);
  592. return TRUE;
  593. }
  594. /**
  595. * @description: 修改数据源地址
  596. * @param {GstElement} *source_bin
  597. * @param {string} uri
  598. * @return {*}
  599. */
  600. void Inference::ModifyUri(GstElement *source_bin, std::string uri)
  601. {
  602. g_object_set(G_OBJECT(source_bin), "uri", uri.c_str(), NULL);
  603. }
  604. /**
  605. * @description: 释放资源
  606. * @param {GstElement} *source_bin
  607. * @return {*}
  608. */
  609. void Inference::FreeElement(int source_Id,GstElement *source_bin)
  610. {
  611. gst_object_unref(GST_OBJECT(source_bin));
  612. //g_source_remove(source_Id);
  613. }
  614. /**
  615. * @description: 添加数据源
  616. * @param {*}
  617. * @return {*}
  618. */
  619. void Inference::AddSources(int sourceId)
  620. {
  621. if(this->m_InferInfo->DataSources[sourceId].source_bin != NULL)
  622. gst_bin_add (GST_BIN (this->pipeline), this->m_InferInfo->DataSources[sourceId].source_bin);
  623. }
  624. }