config_parser.h 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113
  1. // Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #pragma once
  15. #include <iostream>
  16. #include <vector>
  17. #include <string>
  18. #include <map>
  19. #include "yaml-cpp/yaml.h"
  20. #ifdef _WIN32
  21. #define OS_PATH_SEP "\\"
  22. #else
  23. #define OS_PATH_SEP "/"
  24. #endif
  25. namespace PaddleDetection {
  26. // Inference model configuration parser
  27. class ConfigPaser {
  28. public:
  29. ConfigPaser() {}
  30. ~ConfigPaser() {}
  31. bool load_config(const std::string& model_dir,
  32. const std::string& cfg = "infer_cfg.yml") {
  33. // Load as a YAML::Node
  34. YAML::Node config;
  35. config = YAML::LoadFile(model_dir + OS_PATH_SEP + cfg);
  36. // Get runtime mode : fluid, trt_fp16, trt_fp32
  37. if (config["mode"].IsDefined()) {
  38. mode_ = config["mode"].as<std::string>();
  39. } else {
  40. std::cerr << "Please set mode, "
  41. << "support value : fluid/trt_fp16/trt_fp32."
  42. << std::endl;
  43. return false;
  44. }
  45. // Get model arch : YOLO, SSD, RetinaNet, RCNN, Face
  46. if (config["arch"].IsDefined()) {
  47. arch_ = config["arch"].as<std::string>();
  48. } else {
  49. std::cerr << "Please set model arch,"
  50. << "support value : YOLO, SSD, RetinaNet, RCNN, Face."
  51. << std::endl;
  52. return false;
  53. }
  54. // Get min_subgraph_size for tensorrt
  55. if (config["min_subgraph_size"].IsDefined()) {
  56. min_subgraph_size_ = config["min_subgraph_size"].as<int>();
  57. } else {
  58. std::cerr << "Please set min_subgraph_size." << std::endl;
  59. return false;
  60. }
  61. // Get draw_threshold for visualization
  62. if (config["draw_threshold"].IsDefined()) {
  63. draw_threshold_ = config["draw_threshold"].as<float>();
  64. } else {
  65. std::cerr << "Please set draw_threshold." << std::endl;
  66. return false;
  67. }
  68. // Get with_background
  69. if (config["with_background"].IsDefined()) {
  70. with_background_ = config["with_background"].as<bool>();
  71. } else {
  72. std::cerr << "Please set with_background." << std::endl;
  73. return false;
  74. }
  75. // Get Preprocess for preprocessing
  76. if (config["Preprocess"].IsDefined()) {
  77. preprocess_info_ = config["Preprocess"];
  78. } else {
  79. std::cerr << "Please set Preprocess." << std::endl;
  80. return false;
  81. }
  82. // Get label_list for visualization
  83. if (config["label_list"].IsDefined()) {
  84. label_list_ = config["label_list"].as<std::vector<std::string>>();
  85. } else {
  86. std::cerr << "Please set label_list." << std::endl;
  87. return false;
  88. }
  89. return true;
  90. }
  91. std::string mode_;
  92. float draw_threshold_;
  93. std::string arch_;
  94. int min_subgraph_size_;
  95. bool with_background_;
  96. YAML::Node preprocess_info_;
  97. std::vector<std::string> label_list_;
  98. };
  99. } // namespace PaddleDetection