predictor.h 3.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899
  1. // Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #pragma once
  15. #include <ctime>
  16. #include <memory>
  17. #include <string>
  18. #include <utility>
  19. #include <vector>
  20. #include <opencv2/core/core.hpp>
  21. #include <opencv2/highgui/highgui.hpp>
  22. #include <opencv2/imgproc/imgproc.hpp>
  23. #include "paddle_inference_api.h" // NOLINT
  24. #include "include/config_parser.h"
  25. #include "include/jde_predictor.h"
  26. #include "include/preprocess_op.h"
  27. #include "include/sde_predictor.h"
  28. using namespace paddle_infer; // NOLINT
  29. namespace PaddleDetection {
  30. class Predictor {
  31. public:
  32. explicit Predictor(const std::string& device = "CPU",
  33. const std::string& track_model_dir = "",
  34. const std::string& det_model_dir = "",
  35. const std::string& reid_model_dir = "",
  36. const double threshold = -1.,
  37. const std::string& run_mode = "paddle",
  38. const int gpu_id = 0,
  39. const bool use_mkldnn = false,
  40. const int cpu_threads = 1,
  41. bool trt_calib_mode = false,
  42. const int min_box_area = 200) {
  43. if (track_model_dir.empty() && det_model_dir.empty()) {
  44. throw "Predictor must receive track_model or det_model!";
  45. }
  46. if (!track_model_dir.empty() && !det_model_dir.empty()) {
  47. throw "Predictor only receive one of track_model or det_model!";
  48. }
  49. if (!track_model_dir.empty()) {
  50. jde_sct_ =
  51. std::make_shared<PaddleDetection::JDEPredictor>(device,
  52. track_model_dir,
  53. threshold,
  54. run_mode,
  55. gpu_id,
  56. use_mkldnn,
  57. cpu_threads,
  58. trt_calib_mode,
  59. min_box_area);
  60. use_jde_ = true;
  61. }
  62. if (!det_model_dir.empty()) {
  63. sde_sct_ = std::make_shared<PaddleDetection::SDEPredictor>(device,
  64. det_model_dir,
  65. reid_model_dir,
  66. threshold,
  67. run_mode,
  68. gpu_id,
  69. use_mkldnn,
  70. cpu_threads,
  71. trt_calib_mode,
  72. min_box_area);
  73. use_jde_ = false;
  74. }
  75. }
  76. // Run predictor
  77. void Predict(const std::vector<cv::Mat> imgs,
  78. const double threshold = 0.5,
  79. MOTResult* result = nullptr,
  80. std::vector<double>* times = nullptr);
  81. private:
  82. std::shared_ptr<PaddleDetection::JDEPredictor> jde_sct_;
  83. std::shared_ptr<PaddleDetection::SDEPredictor> sde_sct_;
  84. bool use_jde_ = true;
  85. };
  86. } // namespace PaddleDetection