autotuned_index.h 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591
  1. /***********************************************************************
  2. * Software License Agreement (BSD License)
  3. *
  4. * Copyright 2008-2009 Marius Muja (mariusm@cs.ubc.ca). All rights reserved.
  5. * Copyright 2008-2009 David G. Lowe (lowe@cs.ubc.ca). All rights reserved.
  6. *
  7. * THE BSD LICENSE
  8. *
  9. * Redistribution and use in source and binary forms, with or without
  10. * modification, are permitted provided that the following conditions
  11. * are met:
  12. *
  13. * 1. Redistributions of source code must retain the above copyright
  14. * notice, this list of conditions and the following disclaimer.
  15. * 2. Redistributions in binary form must reproduce the above copyright
  16. * notice, this list of conditions and the following disclaimer in the
  17. * documentation and/or other materials provided with the distribution.
  18. *
  19. * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
  20. * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
  21. * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
  22. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
  23. * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  24. * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  25. * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  26. * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  27. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
  28. * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  29. *************************************************************************/
  30. #ifndef OPENCV_FLANN_AUTOTUNED_INDEX_H_
  31. #define OPENCV_FLANN_AUTOTUNED_INDEX_H_
  32. #include <sstream>
  33. #include "general.h"
  34. #include "nn_index.h"
  35. #include "ground_truth.h"
  36. #include "index_testing.h"
  37. #include "sampling.h"
  38. #include "kdtree_index.h"
  39. #include "kdtree_single_index.h"
  40. #include "kmeans_index.h"
  41. #include "composite_index.h"
  42. #include "linear_index.h"
  43. #include "logger.h"
  44. namespace cvflann
  45. {
  46. template<typename Distance>
  47. NNIndex<Distance>* create_index_by_type(const Matrix<typename Distance::ElementType>& dataset, const IndexParams& params, const Distance& distance);
  48. struct AutotunedIndexParams : public IndexParams
  49. {
  50. AutotunedIndexParams(float target_precision = 0.8, float build_weight = 0.01, float memory_weight = 0, float sample_fraction = 0.1)
  51. {
  52. (*this)["algorithm"] = FLANN_INDEX_AUTOTUNED;
  53. // precision desired (used for autotuning, -1 otherwise)
  54. (*this)["target_precision"] = target_precision;
  55. // build tree time weighting factor
  56. (*this)["build_weight"] = build_weight;
  57. // index memory weighting factor
  58. (*this)["memory_weight"] = memory_weight;
  59. // what fraction of the dataset to use for autotuning
  60. (*this)["sample_fraction"] = sample_fraction;
  61. }
  62. };
  63. template <typename Distance>
  64. class AutotunedIndex : public NNIndex<Distance>
  65. {
  66. public:
  67. typedef typename Distance::ElementType ElementType;
  68. typedef typename Distance::ResultType DistanceType;
  69. AutotunedIndex(const Matrix<ElementType>& inputData, const IndexParams& params = AutotunedIndexParams(), Distance d = Distance()) :
  70. dataset_(inputData), distance_(d)
  71. {
  72. target_precision_ = get_param(params, "target_precision",0.8f);
  73. build_weight_ = get_param(params,"build_weight", 0.01f);
  74. memory_weight_ = get_param(params, "memory_weight", 0.0f);
  75. sample_fraction_ = get_param(params,"sample_fraction", 0.1f);
  76. bestIndex_ = NULL;
  77. speedup_ = 0;
  78. }
  79. AutotunedIndex(const AutotunedIndex&);
  80. AutotunedIndex& operator=(const AutotunedIndex&);
  81. virtual ~AutotunedIndex()
  82. {
  83. if (bestIndex_ != NULL) {
  84. delete bestIndex_;
  85. bestIndex_ = NULL;
  86. }
  87. }
  88. /**
  89. * Method responsible with building the index.
  90. */
  91. virtual void buildIndex() CV_OVERRIDE
  92. {
  93. std::ostringstream stream;
  94. bestParams_ = estimateBuildParams();
  95. print_params(bestParams_, stream);
  96. Logger::info("----------------------------------------------------\n");
  97. Logger::info("Autotuned parameters:\n");
  98. Logger::info("%s", stream.str().c_str());
  99. Logger::info("----------------------------------------------------\n");
  100. bestIndex_ = create_index_by_type(dataset_, bestParams_, distance_);
  101. bestIndex_->buildIndex();
  102. speedup_ = estimateSearchParams(bestSearchParams_);
  103. stream.str(std::string());
  104. print_params(bestSearchParams_, stream);
  105. Logger::info("----------------------------------------------------\n");
  106. Logger::info("Search parameters:\n");
  107. Logger::info("%s", stream.str().c_str());
  108. Logger::info("----------------------------------------------------\n");
  109. }
  110. /**
  111. * Saves the index to a stream
  112. */
  113. virtual void saveIndex(FILE* stream) CV_OVERRIDE
  114. {
  115. save_value(stream, (int)bestIndex_->getType());
  116. bestIndex_->saveIndex(stream);
  117. save_value(stream, get_param<int>(bestSearchParams_, "checks"));
  118. }
  119. /**
  120. * Loads the index from a stream
  121. */
  122. virtual void loadIndex(FILE* stream) CV_OVERRIDE
  123. {
  124. int index_type;
  125. load_value(stream, index_type);
  126. IndexParams params;
  127. params["algorithm"] = (flann_algorithm_t)index_type;
  128. bestIndex_ = create_index_by_type<Distance>(dataset_, params, distance_);
  129. bestIndex_->loadIndex(stream);
  130. int checks;
  131. load_value(stream, checks);
  132. bestSearchParams_["checks"] = checks;
  133. }
  134. /**
  135. * Method that searches for nearest-neighbors
  136. */
  137. virtual void findNeighbors(ResultSet<DistanceType>& result, const ElementType* vec, const SearchParams& searchParams) CV_OVERRIDE
  138. {
  139. int checks = get_param<int>(searchParams,"checks",FLANN_CHECKS_AUTOTUNED);
  140. if (checks == FLANN_CHECKS_AUTOTUNED) {
  141. bestIndex_->findNeighbors(result, vec, bestSearchParams_);
  142. }
  143. else {
  144. bestIndex_->findNeighbors(result, vec, searchParams);
  145. }
  146. }
  147. IndexParams getParameters() const CV_OVERRIDE
  148. {
  149. return bestIndex_->getParameters();
  150. }
  151. SearchParams getSearchParameters() const
  152. {
  153. return bestSearchParams_;
  154. }
  155. float getSpeedup() const
  156. {
  157. return speedup_;
  158. }
  159. /**
  160. * Number of features in this index.
  161. */
  162. virtual size_t size() const CV_OVERRIDE
  163. {
  164. return bestIndex_->size();
  165. }
  166. /**
  167. * The length of each vector in this index.
  168. */
  169. virtual size_t veclen() const CV_OVERRIDE
  170. {
  171. return bestIndex_->veclen();
  172. }
  173. /**
  174. * The amount of memory (in bytes) this index uses.
  175. */
  176. virtual int usedMemory() const CV_OVERRIDE
  177. {
  178. return bestIndex_->usedMemory();
  179. }
  180. /**
  181. * Algorithm name
  182. */
  183. virtual flann_algorithm_t getType() const CV_OVERRIDE
  184. {
  185. return FLANN_INDEX_AUTOTUNED;
  186. }
  187. private:
  188. struct CostData
  189. {
  190. float searchTimeCost;
  191. float buildTimeCost;
  192. float memoryCost;
  193. float totalCost;
  194. IndexParams params;
  195. };
  196. void evaluate_kmeans(CostData& cost)
  197. {
  198. StartStopTimer t;
  199. int checks;
  200. const int nn = 1;
  201. Logger::info("KMeansTree using params: max_iterations=%d, branching=%d\n",
  202. get_param<int>(cost.params,"iterations"),
  203. get_param<int>(cost.params,"branching"));
  204. KMeansIndex<Distance> kmeans(sampledDataset_, cost.params, distance_);
  205. // measure index build time
  206. t.start();
  207. kmeans.buildIndex();
  208. t.stop();
  209. float buildTime = (float)t.value;
  210. // measure search time
  211. float searchTime = test_index_precision(kmeans, sampledDataset_, testDataset_, gt_matches_, target_precision_, checks, distance_, nn);
  212. float datasetMemory = float(sampledDataset_.rows * sampledDataset_.cols * sizeof(float));
  213. cost.memoryCost = (kmeans.usedMemory() + datasetMemory) / datasetMemory;
  214. cost.searchTimeCost = searchTime;
  215. cost.buildTimeCost = buildTime;
  216. Logger::info("KMeansTree buildTime=%g, searchTime=%g, build_weight=%g\n", buildTime, searchTime, build_weight_);
  217. }
  218. void evaluate_kdtree(CostData& cost)
  219. {
  220. StartStopTimer t;
  221. int checks;
  222. const int nn = 1;
  223. Logger::info("KDTree using params: trees=%d\n", get_param<int>(cost.params,"trees"));
  224. KDTreeIndex<Distance> kdtree(sampledDataset_, cost.params, distance_);
  225. t.start();
  226. kdtree.buildIndex();
  227. t.stop();
  228. float buildTime = (float)t.value;
  229. //measure search time
  230. float searchTime = test_index_precision(kdtree, sampledDataset_, testDataset_, gt_matches_, target_precision_, checks, distance_, nn);
  231. float datasetMemory = float(sampledDataset_.rows * sampledDataset_.cols * sizeof(float));
  232. cost.memoryCost = (kdtree.usedMemory() + datasetMemory) / datasetMemory;
  233. cost.searchTimeCost = searchTime;
  234. cost.buildTimeCost = buildTime;
  235. Logger::info("KDTree buildTime=%g, searchTime=%g\n", buildTime, searchTime);
  236. }
  237. // struct KMeansSimpleDownhillFunctor {
  238. //
  239. // Autotune& autotuner;
  240. // KMeansSimpleDownhillFunctor(Autotune& autotuner_) : autotuner(autotuner_) {}
  241. //
  242. // float operator()(int* params) {
  243. //
  244. // float maxFloat = numeric_limits<float>::max();
  245. //
  246. // if (params[0]<2) return maxFloat;
  247. // if (params[1]<0) return maxFloat;
  248. //
  249. // CostData c;
  250. // c.params["algorithm"] = KMEANS;
  251. // c.params["centers-init"] = CENTERS_RANDOM;
  252. // c.params["branching"] = params[0];
  253. // c.params["max-iterations"] = params[1];
  254. //
  255. // autotuner.evaluate_kmeans(c);
  256. //
  257. // return c.timeCost;
  258. //
  259. // }
  260. // };
  261. //
  262. // struct KDTreeSimpleDownhillFunctor {
  263. //
  264. // Autotune& autotuner;
  265. // KDTreeSimpleDownhillFunctor(Autotune& autotuner_) : autotuner(autotuner_) {}
  266. //
  267. // float operator()(int* params) {
  268. // float maxFloat = numeric_limits<float>::max();
  269. //
  270. // if (params[0]<1) return maxFloat;
  271. //
  272. // CostData c;
  273. // c.params["algorithm"] = KDTREE;
  274. // c.params["trees"] = params[0];
  275. //
  276. // autotuner.evaluate_kdtree(c);
  277. //
  278. // return c.timeCost;
  279. //
  280. // }
  281. // };
  282. void optimizeKMeans(std::vector<CostData>& costs)
  283. {
  284. Logger::info("KMEANS, Step 1: Exploring parameter space\n");
  285. // explore kmeans parameters space using combinations of the parameters below
  286. int maxIterations[] = { 1, 5, 10, 15 };
  287. int branchingFactors[] = { 16, 32, 64, 128, 256 };
  288. int kmeansParamSpaceSize = FLANN_ARRAY_LEN(maxIterations) * FLANN_ARRAY_LEN(branchingFactors);
  289. costs.reserve(costs.size() + kmeansParamSpaceSize);
  290. // evaluate kmeans for all parameter combinations
  291. for (size_t i = 0; i < FLANN_ARRAY_LEN(maxIterations); ++i) {
  292. for (size_t j = 0; j < FLANN_ARRAY_LEN(branchingFactors); ++j) {
  293. CostData cost;
  294. cost.params["algorithm"] = FLANN_INDEX_KMEANS;
  295. cost.params["centers_init"] = FLANN_CENTERS_RANDOM;
  296. cost.params["iterations"] = maxIterations[i];
  297. cost.params["branching"] = branchingFactors[j];
  298. evaluate_kmeans(cost);
  299. costs.push_back(cost);
  300. }
  301. }
  302. // Logger::info("KMEANS, Step 2: simplex-downhill optimization\n");
  303. //
  304. // const int n = 2;
  305. // // choose initial simplex points as the best parameters so far
  306. // int kmeansNMPoints[n*(n+1)];
  307. // float kmeansVals[n+1];
  308. // for (int i=0;i<n+1;++i) {
  309. // kmeansNMPoints[i*n] = (int)kmeansCosts[i].params["branching"];
  310. // kmeansNMPoints[i*n+1] = (int)kmeansCosts[i].params["max-iterations"];
  311. // kmeansVals[i] = kmeansCosts[i].timeCost;
  312. // }
  313. // KMeansSimpleDownhillFunctor kmeans_cost_func(*this);
  314. // // run optimization
  315. // optimizeSimplexDownhill(kmeansNMPoints,n,kmeans_cost_func,kmeansVals);
  316. // // store results
  317. // for (int i=0;i<n+1;++i) {
  318. // kmeansCosts[i].params["branching"] = kmeansNMPoints[i*2];
  319. // kmeansCosts[i].params["max-iterations"] = kmeansNMPoints[i*2+1];
  320. // kmeansCosts[i].timeCost = kmeansVals[i];
  321. // }
  322. }
  323. void optimizeKDTree(std::vector<CostData>& costs)
  324. {
  325. Logger::info("KD-TREE, Step 1: Exploring parameter space\n");
  326. // explore kd-tree parameters space using the parameters below
  327. int testTrees[] = { 1, 4, 8, 16, 32 };
  328. // evaluate kdtree for all parameter combinations
  329. for (size_t i = 0; i < FLANN_ARRAY_LEN(testTrees); ++i) {
  330. CostData cost;
  331. cost.params["algorithm"] = FLANN_INDEX_KDTREE;
  332. cost.params["trees"] = testTrees[i];
  333. evaluate_kdtree(cost);
  334. costs.push_back(cost);
  335. }
  336. // Logger::info("KD-TREE, Step 2: simplex-downhill optimization\n");
  337. //
  338. // const int n = 1;
  339. // // choose initial simplex points as the best parameters so far
  340. // int kdtreeNMPoints[n*(n+1)];
  341. // float kdtreeVals[n+1];
  342. // for (int i=0;i<n+1;++i) {
  343. // kdtreeNMPoints[i] = (int)kdtreeCosts[i].params["trees"];
  344. // kdtreeVals[i] = kdtreeCosts[i].timeCost;
  345. // }
  346. // KDTreeSimpleDownhillFunctor kdtree_cost_func(*this);
  347. // // run optimization
  348. // optimizeSimplexDownhill(kdtreeNMPoints,n,kdtree_cost_func,kdtreeVals);
  349. // // store results
  350. // for (int i=0;i<n+1;++i) {
  351. // kdtreeCosts[i].params["trees"] = kdtreeNMPoints[i];
  352. // kdtreeCosts[i].timeCost = kdtreeVals[i];
  353. // }
  354. }
  355. /**
  356. * Chooses the best nearest-neighbor algorithm and estimates the optimal
  357. * parameters to use when building the index (for a given precision).
  358. * Returns a dictionary with the optimal parameters.
  359. */
  360. IndexParams estimateBuildParams()
  361. {
  362. std::vector<CostData> costs;
  363. int sampleSize = int(sample_fraction_ * dataset_.rows);
  364. int testSampleSize = std::min(sampleSize / 10, 1000);
  365. Logger::info("Entering autotuning, dataset size: %d, sampleSize: %d, testSampleSize: %d, target precision: %g\n", dataset_.rows, sampleSize, testSampleSize, target_precision_);
  366. // For a very small dataset, it makes no sense to build any fancy index, just
  367. // use linear search
  368. if (testSampleSize < 10) {
  369. Logger::info("Choosing linear, dataset too small\n");
  370. return LinearIndexParams();
  371. }
  372. // We use a fraction of the original dataset to speedup the autotune algorithm
  373. sampledDataset_ = random_sample(dataset_, sampleSize);
  374. // We use a cross-validation approach, first we sample a testset from the dataset
  375. testDataset_ = random_sample(sampledDataset_, testSampleSize, true);
  376. // We compute the ground truth using linear search
  377. Logger::info("Computing ground truth... \n");
  378. gt_matches_ = Matrix<int>(new int[testDataset_.rows], testDataset_.rows, 1);
  379. StartStopTimer t;
  380. t.start();
  381. compute_ground_truth<Distance>(sampledDataset_, testDataset_, gt_matches_, 0, distance_);
  382. t.stop();
  383. CostData linear_cost;
  384. linear_cost.searchTimeCost = (float)t.value;
  385. linear_cost.buildTimeCost = 0;
  386. linear_cost.memoryCost = 0;
  387. linear_cost.params["algorithm"] = FLANN_INDEX_LINEAR;
  388. costs.push_back(linear_cost);
  389. // Start parameter autotune process
  390. Logger::info("Autotuning parameters...\n");
  391. optimizeKMeans(costs);
  392. optimizeKDTree(costs);
  393. float bestTimeCost = costs[0].searchTimeCost;
  394. for (size_t i = 0; i < costs.size(); ++i) {
  395. float timeCost = costs[i].buildTimeCost * build_weight_ + costs[i].searchTimeCost;
  396. if (timeCost < bestTimeCost) {
  397. bestTimeCost = timeCost;
  398. }
  399. }
  400. float bestCost = costs[0].searchTimeCost / bestTimeCost;
  401. IndexParams bestParams = costs[0].params;
  402. if (bestTimeCost > 0) {
  403. for (size_t i = 0; i < costs.size(); ++i) {
  404. float crtCost = (costs[i].buildTimeCost * build_weight_ + costs[i].searchTimeCost) / bestTimeCost +
  405. memory_weight_ * costs[i].memoryCost;
  406. if (crtCost < bestCost) {
  407. bestCost = crtCost;
  408. bestParams = costs[i].params;
  409. }
  410. }
  411. }
  412. delete[] gt_matches_.data;
  413. delete[] testDataset_.data;
  414. delete[] sampledDataset_.data;
  415. return bestParams;
  416. }
  417. /**
  418. * Estimates the search time parameters needed to get the desired precision.
  419. * Precondition: the index is built
  420. * Postcondition: the searchParams will have the optimum params set, also the speedup obtained over linear search.
  421. */
  422. float estimateSearchParams(SearchParams& searchParams)
  423. {
  424. const int nn = 1;
  425. const size_t SAMPLE_COUNT = 1000;
  426. assert(bestIndex_ != NULL); // must have a valid index
  427. float speedup = 0;
  428. int samples = (int)std::min(dataset_.rows / 10, SAMPLE_COUNT);
  429. if (samples > 0) {
  430. Matrix<ElementType> testDataset = random_sample(dataset_, samples);
  431. Logger::info("Computing ground truth\n");
  432. // we need to compute the ground truth first
  433. Matrix<int> gt_matches(new int[testDataset.rows], testDataset.rows, 1);
  434. StartStopTimer t;
  435. t.start();
  436. compute_ground_truth<Distance>(dataset_, testDataset, gt_matches, 1, distance_);
  437. t.stop();
  438. float linear = (float)t.value;
  439. int checks;
  440. Logger::info("Estimating number of checks\n");
  441. float searchTime;
  442. float cb_index;
  443. if (bestIndex_->getType() == FLANN_INDEX_KMEANS) {
  444. Logger::info("KMeans algorithm, estimating cluster border factor\n");
  445. KMeansIndex<Distance>* kmeans = (KMeansIndex<Distance>*)bestIndex_;
  446. float bestSearchTime = -1;
  447. float best_cb_index = -1;
  448. int best_checks = -1;
  449. for (cb_index = 0; cb_index < 1.1f; cb_index += 0.2f) {
  450. kmeans->set_cb_index(cb_index);
  451. searchTime = test_index_precision(*kmeans, dataset_, testDataset, gt_matches, target_precision_, checks, distance_, nn, 1);
  452. if ((searchTime < bestSearchTime) || (bestSearchTime == -1)) {
  453. bestSearchTime = searchTime;
  454. best_cb_index = cb_index;
  455. best_checks = checks;
  456. }
  457. }
  458. searchTime = bestSearchTime;
  459. cb_index = best_cb_index;
  460. checks = best_checks;
  461. kmeans->set_cb_index(best_cb_index);
  462. Logger::info("Optimum cb_index: %g\n", cb_index);
  463. bestParams_["cb_index"] = cb_index;
  464. }
  465. else {
  466. searchTime = test_index_precision(*bestIndex_, dataset_, testDataset, gt_matches, target_precision_, checks, distance_, nn, 1);
  467. }
  468. Logger::info("Required number of checks: %d \n", checks);
  469. searchParams["checks"] = checks;
  470. speedup = linear / searchTime;
  471. delete[] gt_matches.data;
  472. delete[] testDataset.data;
  473. }
  474. return speedup;
  475. }
  476. private:
  477. NNIndex<Distance>* bestIndex_;
  478. IndexParams bestParams_;
  479. SearchParams bestSearchParams_;
  480. Matrix<ElementType> sampledDataset_;
  481. Matrix<ElementType> testDataset_;
  482. Matrix<int> gt_matches_;
  483. float speedup_;
  484. /**
  485. * The dataset used by this index
  486. */
  487. const Matrix<ElementType> dataset_;
  488. /**
  489. * Index parameters
  490. */
  491. float target_precision_;
  492. float build_weight_;
  493. float memory_weight_;
  494. float sample_fraction_;
  495. Distance distance_;
  496. };
  497. }
  498. #endif /* OPENCV_FLANN_AUTOTUNED_INDEX_H_ */