test_model.cpp 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116
  1. /*************************************************************************
  2. * Copyright (C) [2020] by Cambricon, Inc. All rights reserved
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * The above copyright notice and this permission notice shall be included in
  11. * all copies or substantial portions of the Software.
  12. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
  13. * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  14. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  15. * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  16. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  17. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  18. * THE SOFTWARE.
  19. *************************************************************************/
  20. #include <gtest/gtest.h>
  21. #include <stdlib.h>
  22. #include <chrono>
  23. #include <fstream>
  24. #include <iostream>
  25. #include <memory>
  26. #include <sstream>
  27. #include <string>
  28. #include <thread>
  29. #include "cnis/infer_server.h"
  30. #include "cnrt.h"
  31. #include "fixture.h"
  32. #include "model/model.h"
  33. namespace infer_server {
  34. namespace {
  35. #ifdef CNIS_USE_MAGICMIND
  36. constexpr const char* g_model_path1 =
  37. "http://video.cambricon.com/models/MLU370/resnet50_nhwc_tfu_0.5_int8_fp16.model";
  38. constexpr const char* g_model_path2 =
  39. "http://video.cambricon.com/models/MLU370/yolov3_nhwc_tfu_0.5_int8_fp16.model";
  40. #else
  41. constexpr const char* g_model_path1 =
  42. "http://video.cambricon.com/models/MLU270/Primary_Detector/ssd/resnet34_ssd.cambricon";
  43. constexpr const char* g_model_path2 =
  44. "http://video.cambricon.com/models/MLU270/Classification/resnet50/resnet50_offline.cambricon";
  45. #endif
  46. TEST_F(InferServerTestAPI, ModelManager) {
  47. #ifdef CNIS_USE_MAGICMIND
  48. char env[] = "CNIS_MODEL_CACHE_LIMIT=2";
  49. putenv(env);
  50. InferServer::ClearModelCache();
  51. auto m = server_->LoadModel(g_model_path1);
  52. ASSERT_TRUE(m);
  53. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 1);
  54. auto n = server_->LoadModel(g_model_path1);
  55. ASSERT_TRUE(n);
  56. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 1);
  57. auto l = server_->LoadModel(g_model_path2);
  58. ASSERT_TRUE(l);
  59. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 2);
  60. server_->LoadModel("./resnet50_nhwc_tfu_0.5_int8_fp16.graph", "./resnet50_nhwc_tfu_0.5_int8_fp16.data");
  61. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 2);
  62. /************************************************************************************/
  63. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 2);
  64. ASSERT_TRUE(server_->UnloadModel(m));
  65. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 1);
  66. ASSERT_FALSE(server_->UnloadModel(n));
  67. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 1);
  68. ASSERT_TRUE(server_->UnloadModel(l));
  69. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 0);
  70. #else
  71. char env[] = "CNIS_MODEL_CACHE_LIMIT=3";
  72. putenv(env);
  73. InferServer::ClearModelCache();
  74. std::string model_file = g_model_path1;
  75. auto m = server_->LoadModel(model_file);
  76. ASSERT_TRUE(m);
  77. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 1);
  78. auto n = server_->LoadModel(model_file);
  79. ASSERT_TRUE(n);
  80. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 1);
  81. auto l = server_->LoadModel(g_model_path2);
  82. ASSERT_TRUE(l);
  83. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 2);
  84. server_->LoadModel("./resnet34_ssd.cambricon");
  85. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 2);
  86. /************************************************************************************/
  87. std::ifstream infile("./resnet34_ssd.cambricon", std::ios::binary);
  88. if (!infile.is_open()) {
  89. LOG(ERROR) << "file open failed";
  90. }
  91. std::filebuf* pbuf = infile.rdbuf();
  92. uint32_t filesize = static_cast<uint32_t>((pbuf->pubseekoff(0, std::ios::end, std::ios::in)));
  93. pbuf->pubseekpos(0, std::ios::in);
  94. char* modelptr = new char[filesize];
  95. pbuf->sgetn(modelptr, filesize);
  96. infile.close();
  97. server_->LoadModel(modelptr);
  98. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 3);
  99. ASSERT_TRUE(server_->UnloadModel(m));
  100. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 2);
  101. ASSERT_FALSE(server_->UnloadModel(n));
  102. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 2);
  103. ASSERT_TRUE(server_->UnloadModel(l));
  104. EXPECT_EQ(ModelManager::Instance()->CacheSize(), 1);
  105. delete[] modelptr;
  106. #endif
  107. }
  108. } // namespace
  109. } // namespace infer_server