inferencer2.hpp 2.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697
  1. /*************************************************************************
  2. * Copyright (C) [2021] by Cambricon, Inc. All rights reserved
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * The above copyright notice and this permission notice shall be included in
  11. * all copies or substantial portions of the Software.
  12. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
  13. * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  14. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  15. * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  16. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  17. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  18. * THE SOFTWARE.
  19. *************************************************************************/
  20. #ifndef MODULES_INFER_HPP_
  21. #define MODULES_INFER_HPP_
  22. /**
  23. * This file contains a declaration of class Inferencer2
  24. */
  25. #include <memory>
  26. #include <string>
  27. #include <vector>
  28. #include "cnstream_frame.hpp"
  29. #include "cnstream_module.hpp"
  30. #include "device/mlu_context.h"
  31. #include "infer_base.hpp"
  32. namespace cnstream {
  33. class Infer2ParamManager;
  34. /**
  35. * @brief for inference based on infer_server.
  36. */
  37. class Inferencer2 : public Module, public ModuleCreator<Inferencer2> {
  38. public:
  39. /**
  40. * @brief Generate Inferencer2
  41. *
  42. * @param Name : Module name
  43. *
  44. * @return None
  45. */
  46. explicit Inferencer2(const std::string& name);
  47. /**
  48. * @brief Called by pipeline when pipeline start.
  49. *
  50. * @param paramSet: parameters for this module.
  51. *
  52. * @return whether module open succeed.
  53. */
  54. bool Open(ModuleParamSet paramSet) override;
  55. /**
  56. * @brief Called by pipeline when pipeline end.
  57. *
  58. * @return void.
  59. */
  60. void Close() override;
  61. /**
  62. * @brief Process each data frame.
  63. *
  64. * @param data : Pointer to the frame info.
  65. *
  66. * @return whether post data to communicate processor succeed.
  67. *
  68. */
  69. int Process(std::shared_ptr<CNFrameInfo> data) override;
  70. /**
  71. * @brief Check ParamSet for this module.
  72. *
  73. * @param paramSet Parameters for this module.
  74. *
  75. * @return Return true if this API run successfully. Otherwise, return false.
  76. */
  77. bool CheckParamSet(const ModuleParamSet& paramSet) const override;
  78. virtual ~Inferencer2();
  79. private:
  80. std::shared_ptr<InferHandler> infer_handler_ = nullptr; ///< inference2 handler
  81. Infer2Param infer_params_;
  82. std::shared_ptr<Infer2ParamManager> param_manager_ = nullptr;
  83. }; // class Inferencer2
  84. } // namespace cnstream
  85. #endif