test_inference2.cpp 9.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287
  1. /*************************************************************************
  2. * Copyright (C) [2021] by Cambricon, Inc. All rights reserved
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * The above copyright notice and this permission notice shall be included in
  11. * all copies or substantial portions of the Software.
  12. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
  13. * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  14. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  15. * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  16. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  17. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  18. * THE SOFTWARE.
  19. *************************************************************************/
  20. #include <gtest/gtest.h>
  21. #include <memory>
  22. #include <string>
  23. #include "opencv2/highgui/highgui.hpp"
  24. #include "opencv2/imgproc/imgproc.hpp"
  25. #if (CV_MAJOR_VERSION >= 3)
  26. #include "opencv2/imgcodecs/imgcodecs.hpp"
  27. #endif
  28. #include "easyinfer/mlu_memory_op.h"
  29. #include "cnstream_logging.hpp"
  30. #include "inferencer2.hpp"
  31. #include "postproc.hpp"
  32. #include "video_preproc.hpp"
  33. #include "test_base.hpp"
  34. namespace cnstream {
  35. class FakeVideoPostproc : public cnstream::VideoPostproc {
  36. public:
  37. bool Execute(infer_server::InferData* result, const infer_server::ModelIO& output,
  38. const infer_server::ModelInfo& model) override {
  39. return true;
  40. }
  41. private:
  42. DECLARE_REFLEX_OBJECT_EX(FakeVideoPostproc, cnstream::VideoPostproc);
  43. }; // class FakeVideoPostproc
  44. IMPLEMENT_REFLEX_OBJECT_EX(FakeVideoPostproc, cnstream::VideoPostproc);
  45. class FakeVideoPreproc : public cnstream::VideoPreproc {
  46. public:
  47. bool Execute(infer_server::ModelIO* model_input, const infer_server::InferData& input_data,
  48. const infer_server::ModelInfo& model_info) {
  49. return true;
  50. }
  51. private:
  52. DECLARE_REFLEX_OBJECT_EX(FakeVideoPreproc, cnstream::VideoPreproc);
  53. }; // class FakeVideoPreproc
  54. IMPLEMENT_REFLEX_OBJECT_EX(FakeVideoPreproc, cnstream::VideoPreproc);
  55. static std::string GetModelPath() {
  56. edk::MluContext ctx;
  57. edk::CoreVersion core_ver = ctx.GetCoreVersion();
  58. std::string model_path = "";
  59. switch (core_ver) {
  60. case edk::CoreVersion::MLU220:
  61. model_path = "../../data/models/yolov3_b4c4_argb_mlu220.cambricon";
  62. break;
  63. case edk::CoreVersion::MLU270:
  64. default:
  65. model_path = "../../data/models/yolov3_b4c4_argb_mlu270.cambricon";
  66. break;
  67. }
  68. return model_path;
  69. }
  70. static std::string GetModelPathMM() { return "../../data/models/yolov3_nhwc.model"; }
  71. // the data is related to model
  72. static cnstream::CNFrameInfoPtr CreatData(std::string device_id, bool is_eos = false, bool mlu_data = true) {
  73. auto data = cnstream::CNFrameInfo::Create(device_id, is_eos);
  74. cv::Mat image = cv::imread(GetExePath() + "../../data/images/0.jpg");
  75. int width = image.cols;
  76. int height = image.rows;
  77. size_t nbytes = width * height * sizeof(uint8_t) * 3;
  78. data->stream_id = "1";
  79. std::shared_ptr<CNDataFrame> frame(new (std::nothrow) CNDataFrame());
  80. if (mlu_data) {
  81. void* frame_data = image.data;
  82. void* planes[CN_MAX_PLANES] = {nullptr, nullptr};
  83. edk::MluMemoryOp mem_op;
  84. frame_data = mem_op.AllocMlu(nbytes);
  85. planes[0] = frame_data; // y plane
  86. planes[1] = reinterpret_cast<void*>(reinterpret_cast<int64_t>(frame_data) + width * height); // uv plane
  87. void* ptr_mlu[2] = {planes[0], planes[1]};
  88. frame->ctx.dev_type = DevContext::DevType::MLU;
  89. frame->ctx.ddr_channel = std::stoi(device_id);
  90. frame->ctx.dev_id = std::stoi(device_id);
  91. frame->fmt = CNDataFormat::CN_PIXEL_FORMAT_YUV420_NV12;
  92. frame->dst_device_id = std::stoi(device_id);
  93. frame->frame_id = 1;
  94. data->timestamp = 1000;
  95. frame->width = width;
  96. frame->height = height;
  97. frame->stride[0] = frame->stride[1] = width;
  98. frame->CopyToSyncMem(ptr_mlu, true);
  99. std::shared_ptr<CNInferObjs> objs(new (std::nothrow) CNInferObjs());
  100. data->collection.Add(kCNDataFrameTag, frame);
  101. data->collection.Add(kCNInferObjsTag, objs);
  102. return data;
  103. } else {
  104. frame->frame_id = 1;
  105. data->timestamp = 1000;
  106. frame->width = width;
  107. frame->height = height;
  108. void* ptr_cpu[2] = {image.data, image.data + nbytes * 2 / 3};
  109. frame->stride[0] = frame->stride[1] = width;
  110. frame->fmt = CNDataFormat::CN_PIXEL_FORMAT_YUV420_NV12;
  111. frame->ctx.dev_type = DevContext::DevType::CPU;
  112. frame->dst_device_id = std::stoi(device_id);
  113. frame->ctx.dev_id = std::stoi(device_id);
  114. frame->CopyToSyncMem(ptr_cpu, true);
  115. std::shared_ptr<CNInferObjs> objs(new (std::nothrow) CNInferObjs());
  116. data->collection.Add(kCNDataFrameTag, frame);
  117. data->collection.Add(kCNInferObjsTag, objs);
  118. return data;
  119. }
  120. return nullptr;
  121. }
  122. TEST(Inferencer2, Open) {
  123. bool use_magicmind = infer_server::Predictor::Backend() == "magicmind";
  124. std::string exe_path = GetExePath();
  125. std::string infer_name = "detector";
  126. { // open success but param is lack
  127. std::unique_ptr<Inferencer2> infer(new Inferencer2(infer_name));
  128. ModuleParamSet param;
  129. if (use_magicmind) {
  130. param["model_path"] = exe_path + GetModelPathMM();
  131. param["model_input_pixel_format"] = "RGB24";
  132. } else {
  133. param["model_path"] = exe_path + GetModelPath();
  134. param["model_input_pixel_format"] = "ARGB32";
  135. }
  136. param["preproc_name"] = "VideoPreprocCpu";
  137. param["postproc_name"] = "VideoPostprocSsd";
  138. EXPECT_TRUE(infer->Open(param));
  139. }
  140. { // param is empty
  141. std::unique_ptr<Inferencer2> infer(new Inferencer2(infer_name));
  142. ModuleParamSet param;
  143. EXPECT_FALSE(infer->Open(param));
  144. }
  145. { // param is no registered.
  146. std::unique_ptr<Inferencer2> infer(new Inferencer2(infer_name));
  147. ModuleParamSet param;
  148. if (use_magicmind) {
  149. param["model_path"] = exe_path + GetModelPathMM();
  150. } else {
  151. param["model_path"] = exe_path + GetModelPath();
  152. }
  153. param["preproc_name"] = "VideoPreprocCpu";
  154. param["postproc_name"] = "VideoPostprocSsd";
  155. param["no_such_key"] = "key";
  156. EXPECT_FALSE(infer->Open(param));
  157. }
  158. { // preproc_name is error
  159. std::unique_ptr<Inferencer2> infer(new Inferencer2(infer_name));
  160. ModuleParamSet param;
  161. if (use_magicmind) {
  162. param["model_path"] = exe_path + GetModelPathMM();
  163. } else {
  164. param["model_path"] = exe_path + GetModelPath();
  165. }
  166. param["preproc_name"] = "no_such_preproc_class";
  167. param["postproc_name"] = "VideoPostprocSsd";
  168. EXPECT_FALSE(infer->Open(param));
  169. }
  170. { // postproc_name is error
  171. std::unique_ptr<Inferencer2> infer(new Inferencer2(infer_name));
  172. ModuleParamSet param;
  173. if (use_magicmind) {
  174. param["model_path"] = exe_path + GetModelPathMM();
  175. param["preproc_name"] = "CNCV";
  176. } else {
  177. param["model_path"] = exe_path + GetModelPath();
  178. param["preproc_name"] = "RCOP";
  179. }
  180. param["postproc_name"] = "no_such_postproc_name";
  181. EXPECT_FALSE(infer->Open(param));
  182. }
  183. { // postproc_name is empty
  184. std::unique_ptr<Inferencer2> infer(new Inferencer2(infer_name));
  185. ModuleParamSet param;
  186. if (use_magicmind) {
  187. param["model_path"] = exe_path + GetModelPathMM();
  188. param["preproc_name"] = "CNCV";
  189. } else {
  190. param["model_path"] = exe_path + GetModelPath();
  191. param["preproc_name"] = "RCOP";
  192. }
  193. param["postproc_name"] = "";
  194. EXPECT_FALSE(infer->Open(param));
  195. }
  196. { // model_path is error
  197. std::unique_ptr<Inferencer2> infer(new Inferencer2(infer_name));
  198. ModuleParamSet param;
  199. if (use_magicmind) {
  200. param["model_path"] = "/home/no.model";
  201. param["preproc_name"] = "CNCV";
  202. } else {
  203. param["model_path"] = "/home/error_path";
  204. param["preproc_name"] = "RCOP";
  205. }
  206. param["postproc_name"] = "VideoPostprocSsd";
  207. EXPECT_FALSE(infer->Open(param)); // check model path in inference, infer_handler no check
  208. }
  209. }
  210. TEST(Inferencer2, Process) {
  211. bool use_magicmind = infer_server::Predictor::Backend() == "magicmind";
  212. std::string exe_path = GetExePath();
  213. std::string infer_name = "detector";
  214. std::unique_ptr<Inferencer2> infer(new Inferencer2(infer_name));
  215. ModuleParamSet param;
  216. if (use_magicmind) {
  217. param["model_path"] = exe_path + GetModelPathMM();
  218. param["model_input_pixel_format"] = "RGB24";
  219. } else {
  220. param["model_path"] = exe_path + GetModelPath();
  221. param["model_input_pixel_format"] = "ARGB32";
  222. }
  223. param["preproc_name"] = "FakeVideoPreproc";
  224. param["postproc_name"] = "FakeVideoPostproc";
  225. param["device_id"] = "0";
  226. { // CNFrameInfo empty
  227. ASSERT_TRUE(infer->Open(param));
  228. std::shared_ptr<CNFrameInfo> data = nullptr;
  229. EXPECT_EQ(infer->Process(data), -1);
  230. }
  231. { // CNFrameInfo is eos
  232. ASSERT_TRUE(infer->Open(param));
  233. std::string device_id = param["device_id"];
  234. bool is_eos = true;
  235. auto data = CreatData(device_id, is_eos);
  236. EXPECT_EQ(infer->Process(data), 0);
  237. }
  238. { // CNFrameInfo no eos and data in mlu
  239. ASSERT_TRUE(infer->Open(param));
  240. std::string device_id = param["device_id"];
  241. bool is_eos = false;
  242. bool mlu_data = true;
  243. auto data = CreatData(device_id, is_eos, mlu_data);
  244. EXPECT_EQ(infer->Process(data), 0);
  245. }
  246. { // CNFrameInfo no eos and data in cpu
  247. ASSERT_TRUE(infer->Open(param));
  248. std::string device_id = param["device_id"];
  249. bool is_eos = false;
  250. bool mlu_data = false;
  251. auto data = CreatData(device_id, is_eos, mlu_data);
  252. EXPECT_EQ(infer->Process(data), 0);
  253. }
  254. EXPECT_NO_THROW(infer->Close());
  255. }
  256. } // namespace cnstream