test_infer_handler.cpp 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296
  1. /*************************************************************************
  2. * Copyright (C) [2021] by Cambricon, Inc. All rights reserved
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * The above copyright notice and this permission notice shall be included in
  11. * all copies or substantial portions of the Software.
  12. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
  13. * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  14. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  15. * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  16. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  17. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  18. * THE SOFTWARE.
  19. *************************************************************************/
  20. #include <gtest/gtest.h>
  21. #include <memory>
  22. #include <string>
  23. #include "easyinfer/mlu_memory_op.h"
  24. #include "cnstream_logging.hpp"
  25. #include "inferencer2.hpp"
  26. #include "video_postproc.hpp"
  27. #include "video_preproc.hpp"
  28. #include "test_base.hpp"
  29. #include "infer_handler.hpp"
  30. #include "infer_params.hpp"
  31. namespace cnstream {
  32. static std::string GetModelPath() {
  33. edk::MluContext ctx;
  34. edk::CoreVersion core_ver = ctx.GetCoreVersion();
  35. std::string model_path = "";
  36. switch (core_ver) {
  37. case edk::CoreVersion::MLU220:
  38. model_path = "../../data/models/yolov3_b4c4_argb_mlu220.cambricon";
  39. break;
  40. case edk::CoreVersion::MLU270:
  41. default:
  42. model_path = "../../data/models/yolov3_b4c4_argb_mlu270.cambricon";
  43. break;
  44. }
  45. return model_path;
  46. }
  47. static std::string GetModelPathMM() { return "../../data/models/resnet50_nhwc.model"; }
  48. // the data is related to model
  49. static cnstream::CNFrameInfoPtr CreatData(std::string device_id, bool is_eos = false, bool mlu_data = true) {
  50. auto data = cnstream::CNFrameInfo::Create(device_id, is_eos);
  51. cv::Mat image = cv::imread(GetExePath() + "../../data/images/0.jpg");
  52. int width = image.cols;
  53. int height = image.rows;
  54. size_t nbytes = width * height * sizeof(uint8_t) * 3;
  55. data->stream_id = "1";
  56. std::shared_ptr<CNDataFrame> frame(new (std::nothrow) CNDataFrame());
  57. if (mlu_data) {
  58. void *frame_data = image.data;
  59. void *planes[CN_MAX_PLANES] = {nullptr, nullptr};
  60. edk::MluMemoryOp mem_op;
  61. frame_data = mem_op.AllocMlu(nbytes);
  62. planes[0] = frame_data; // y plane
  63. planes[1] = reinterpret_cast<void *>(reinterpret_cast<int64_t>(frame_data) + width * height); // uv plane
  64. void *ptr_mlu[2] = {planes[0], planes[1]};
  65. frame->ctx.dev_type = DevContext::DevType::MLU;
  66. frame->ctx.ddr_channel = std::stoi(device_id);
  67. frame->ctx.dev_id = std::stoi(device_id);
  68. frame->fmt = CNDataFormat::CN_PIXEL_FORMAT_YUV420_NV12;
  69. frame->dst_device_id = std::stoi(device_id);
  70. frame->frame_id = 1;
  71. data->timestamp = 1000;
  72. frame->width = width;
  73. frame->height = height;
  74. frame->stride[0] = frame->stride[1] = width;
  75. frame->CopyToSyncMem(ptr_mlu, true);
  76. std::shared_ptr<CNInferObjs> objs(new (std::nothrow) CNInferObjs());
  77. data->collection.Add(kCNDataFrameTag, frame);
  78. data->collection.Add(kCNInferObjsTag, objs);
  79. return data;
  80. } else {
  81. frame->frame_id = 1;
  82. data->timestamp = 1000;
  83. frame->width = width;
  84. frame->height = height;
  85. void *ptr_cpu[2] = {image.data, image.data + nbytes * 2 / 3};
  86. frame->stride[0] = frame->stride[1] = width;
  87. frame->fmt = CNDataFormat::CN_PIXEL_FORMAT_YUV420_NV12;
  88. frame->ctx.dev_type = DevContext::DevType::CPU;
  89. frame->dst_device_id = std::stoi(device_id);
  90. frame->ctx.dev_id = std::stoi(device_id);
  91. frame->CopyToSyncMem(ptr_cpu, true);
  92. std::shared_ptr<CNInferObjs> objs(new (std::nothrow) CNInferObjs());
  93. data->collection.Add(kCNDataFrameTag, frame);
  94. data->collection.Add(kCNInferObjsTag, objs);
  95. return data;
  96. }
  97. return nullptr;
  98. }
  99. TEST(Inferencer2, InferHandlerOpen) {
  100. std::string exe_path = GetExePath();
  101. std::string infer_name = "detector";
  102. std::unique_ptr<Inferencer2> infer(new Inferencer2(infer_name));
  103. Inferencer2 *Infer2;
  104. Infer2 = infer.get();
  105. std::string preproc_name = "VideoPreprocCpu";
  106. std::string postproc_name = "VideoPostprocSsd";
  107. std::shared_ptr<VideoPreproc> pre_processor(VideoPreproc::Create(preproc_name));
  108. std::shared_ptr<VideoPostproc> post_processor(VideoPostproc::Create(postproc_name));
  109. bool use_magicmind = infer_server::Predictor::Backend() == "magicmind";
  110. Infer2Param param;
  111. if (use_magicmind) {
  112. param.model_path = exe_path + GetModelPathMM();
  113. param.model_input_pixel_format = InferVideoPixelFmt::RGB24;
  114. } else {
  115. param.model_path = exe_path + GetModelPath();
  116. param.func_name = "subnet0";
  117. param.model_input_pixel_format = InferVideoPixelFmt::ARGB;
  118. }
  119. param.device_id = 0;
  120. param.batch_strategy = InferBatchStrategy::STATIC;
  121. param.batching_timeout = 300;
  122. param.priority = 0;
  123. param.show_stats = false;
  124. param.engine_num = 2;
  125. param.object_infer = false;
  126. { // open sucess, preproc = VideoPreprocCpu
  127. param.preproc_name = "VideoPreprocCpu";
  128. std::shared_ptr<InferHandler> infer_handler =
  129. std::make_shared<InferHandlerImpl>(Infer2, param, post_processor, pre_processor, nullptr);
  130. EXPECT_TRUE(infer_handler->Open());
  131. }
  132. { // preproc_name = RCOP
  133. if (!use_magicmind) {
  134. param.preproc_name = "RCOP";
  135. std::shared_ptr<InferHandler> infer_handler =
  136. std::make_shared<InferHandlerImpl>(Infer2, param, post_processor, pre_processor, nullptr);
  137. EXPECT_TRUE(infer_handler->Open());
  138. } else {
  139. param.preproc_name = "CNCV";
  140. std::shared_ptr<InferHandler> infer_handler =
  141. std::make_shared<InferHandlerImpl>(Infer2, param, post_processor, pre_processor, nullptr);
  142. EXPECT_TRUE(infer_handler->Open());
  143. }
  144. }
  145. { // preproc_name = SCALER
  146. if (!use_magicmind) {
  147. edk::MluContext ctx;
  148. edk::CoreVersion core_ver = ctx.GetCoreVersion();
  149. if (core_ver == edk::CoreVersion::MLU220) {
  150. param.preproc_name = "SCALER";
  151. std::shared_ptr<InferHandler> infer_handler =
  152. std::make_shared<InferHandlerImpl>(Infer2, param, post_processor, pre_processor, nullptr);
  153. EXPECT_TRUE(infer_handler->Open());
  154. }
  155. }
  156. }
  157. }
  158. TEST(Inferencer2, InferHandlerProcess) {
  159. std::string exe_path = GetExePath();
  160. std::string infer_name = "detector";
  161. std::unique_ptr<Inferencer2> infer(new Inferencer2(infer_name));
  162. Inferencer2 *Infer2;
  163. Infer2 = infer.get();
  164. std::string preproc_name = "VideoPreprocCpu";
  165. std::string postproc_name = "VideoPostprocSsd";
  166. std::string obj_filter_name = "VehicleFilter";
  167. std::shared_ptr<VideoPreproc> pre_processor(VideoPreproc::Create(preproc_name));
  168. std::shared_ptr<VideoPostproc> post_processor(VideoPostproc::Create(postproc_name));
  169. std::shared_ptr<ObjFilter> obj_filter(ObjFilter::Create(obj_filter_name));
  170. bool use_magicmind = infer_server::Predictor::Backend() == "magicmind";
  171. Infer2Param param;
  172. if (use_magicmind) {
  173. param.model_path = exe_path + GetModelPathMM();
  174. param.model_input_pixel_format = InferVideoPixelFmt::RGB24;
  175. } else {
  176. param.model_path = exe_path + GetModelPath();
  177. param.func_name = "subnet0";
  178. param.model_input_pixel_format = InferVideoPixelFmt::ARGB;
  179. }
  180. param.device_id = 0;
  181. param.batch_strategy = InferBatchStrategy::STATIC;
  182. param.batching_timeout = 300;
  183. param.priority = 0;
  184. param.show_stats = false;
  185. param.engine_num = 2;
  186. param.object_infer = false;
  187. { // data is eos
  188. param.preproc_name = "VideoPreprocCpu";
  189. std::shared_ptr<InferHandler> infer_handler =
  190. std::make_shared<InferHandlerImpl>(Infer2, param, post_processor, pre_processor, nullptr);
  191. ASSERT_TRUE(infer_handler->Open());
  192. bool is_eos = true;
  193. auto data = CreatData(std::to_string(param.device_id), is_eos);
  194. EXPECT_EQ(infer_handler->Process(data, param.object_infer), -1);
  195. }
  196. { // preproc name = rcop
  197. if (!use_magicmind) {
  198. param.preproc_name = "RCOP";
  199. std::shared_ptr<InferHandler> infer_handler =
  200. std::make_shared<InferHandlerImpl>(Infer2, param, post_processor, pre_processor, nullptr);
  201. ASSERT_TRUE(infer_handler->Open());
  202. bool is_eos = false;
  203. auto data = CreatData(std::to_string(param.device_id), is_eos);
  204. EXPECT_EQ(infer_handler->Process(data, param.object_infer), 0);
  205. infer_handler->WaitTaskDone(data->stream_id);
  206. } else {
  207. param.preproc_name = "CNCV";
  208. std::shared_ptr<InferHandler> infer_handler =
  209. std::make_shared<InferHandlerImpl>(Infer2, param, post_processor, pre_processor, nullptr);
  210. ASSERT_TRUE(infer_handler->Open());
  211. bool is_eos = false;
  212. auto data = CreatData(std::to_string(param.device_id), is_eos);
  213. EXPECT_EQ(infer_handler->Process(data, param.object_infer), 0);
  214. infer_handler->WaitTaskDone(data->stream_id);
  215. }
  216. }
  217. { // preproc name = SCALER
  218. if (!use_magicmind) {
  219. edk::MluContext ctx;
  220. edk::CoreVersion core_ver = ctx.GetCoreVersion();
  221. if (core_ver == edk::CoreVersion::MLU220) {
  222. param.preproc_name = "SCALER";
  223. std::shared_ptr<InferHandler> infer_handler =
  224. std::make_shared<InferHandlerImpl>(Infer2, param, post_processor, pre_processor, nullptr);
  225. ASSERT_TRUE(infer_handler->Open());
  226. bool is_eos = false;
  227. auto data = CreatData(std::to_string(param.device_id), is_eos);
  228. EXPECT_EQ(infer_handler->Process(data, param.object_infer), 0);
  229. infer_handler->WaitTaskDone(data->stream_id);
  230. }
  231. }
  232. }
  233. { // preproc name = usertype
  234. param.preproc_name = "VideoPreprocCpu";
  235. std::shared_ptr<InferHandler> infer_handler =
  236. std::make_shared<InferHandlerImpl>(Infer2, param, post_processor, pre_processor, nullptr);
  237. ASSERT_TRUE(infer_handler->Open());
  238. bool is_eos = false;
  239. auto data = CreatData(std::to_string(param.device_id), is_eos);
  240. EXPECT_EQ(infer_handler->Process(data, param.object_infer), 0);
  241. infer_handler->WaitTaskDone(data->stream_id);
  242. }
  243. { // object_infer = true, for secondary
  244. if (use_magicmind) {
  245. param.preproc_name = "CNCV";
  246. } else {
  247. param.preproc_name = "RCOP";
  248. }
  249. param.object_infer = true;
  250. bool is_eos = false;
  251. auto data = CreatData(std::to_string(param.device_id), is_eos);
  252. // make objs
  253. cnstream::CNObjsVec objs;
  254. std::shared_ptr<cnstream::CNInferObject> object = std::make_shared<cnstream::CNInferObject>();
  255. object->id = std::to_string(2);
  256. object->bbox.x = 0.2;
  257. object->bbox.y = 0.2;
  258. object->bbox.w = 0.3;
  259. object->bbox.h = 0.3;
  260. object->score = 0.8;
  261. objs.push_back(object);
  262. CNInferObjsPtr objs_holder = data->collection.Get<CNInferObjsPtr>(kCNInferObjsTag);
  263. objs_holder->objs_.insert(objs_holder->objs_.end(), objs.begin(), objs.end());
  264. std::shared_ptr<InferHandler> infer_handler =
  265. std::make_shared<InferHandlerImpl>(Infer2, param, post_processor, pre_processor, obj_filter);
  266. ASSERT_TRUE(infer_handler->Open());
  267. EXPECT_EQ(infer_handler->Process(data, param.object_infer), 0);
  268. infer_handler->WaitTaskDone(data->stream_id);
  269. }
  270. }
  271. } // namespace cnstream