convert_test.cc 175 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236
  1. /*
  2. * Copyright 2011 The LibYuv Project Authors. All rights reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include <assert.h>
  11. #include <stdlib.h>
  12. #include <time.h>
  13. #include "libyuv/basic_types.h"
  14. #include "libyuv/compare.h"
  15. #include "libyuv/convert.h"
  16. #include "libyuv/convert_argb.h"
  17. #include "libyuv/convert_from.h"
  18. #include "libyuv/convert_from_argb.h"
  19. #include "libyuv/cpu_id.h"
  20. #ifdef HAVE_JPEG
  21. #include "libyuv/mjpeg_decoder.h"
  22. #endif
  23. #include "../unit_test/unit_test.h"
  24. #include "libyuv/planar_functions.h"
  25. #include "libyuv/rotate.h"
  26. #include "libyuv/video_common.h"
  27. #ifdef ENABLE_ROW_TESTS
  28. #include "libyuv/row.h" /* For ARGBToAR30Row_AVX2 */
  29. #endif
  30. // Some functions fail on big endian. Enable these tests on all cpus except
  31. // PowerPC, but they are not optimized so disabled by default.
  32. #if !defined(__powerpc__) && defined(ENABLE_SLOW_TESTS)
  33. #define LITTLE_ENDIAN_ONLY_TEST 1
  34. #endif
  35. namespace libyuv {
  36. // Alias to copy pixels as is
  37. #define AR30ToAR30 ARGBCopy
  38. #define ABGRToABGR ARGBCopy
  39. #define SUBSAMPLE(v, a) ((((v) + (a)-1)) / (a))
  40. // Planar test
  41. #define TESTPLANARTOPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, \
  42. SRC_SUBSAMP_Y, FMT_PLANAR, DST_T, DST_BPC, \
  43. DST_SUBSAMP_X, DST_SUBSAMP_Y, W1280, N, NEG, OFF) \
  44. TEST_F(LibYUVConvertTest, SRC_FMT_PLANAR##To##FMT_PLANAR##N) { \
  45. static_assert(SRC_BPC == 1 || SRC_BPC == 2, "SRC BPC unsupported"); \
  46. static_assert(DST_BPC == 1 || DST_BPC == 2, "DST BPC unsupported"); \
  47. static_assert(SRC_SUBSAMP_X == 1 || SRC_SUBSAMP_X == 2, \
  48. "DST SRC_SUBSAMP_X unsupported"); \
  49. static_assert(SRC_SUBSAMP_Y == 1 || SRC_SUBSAMP_Y == 2, \
  50. "DST SRC_SUBSAMP_Y unsupported"); \
  51. static_assert(DST_SUBSAMP_X == 1 || DST_SUBSAMP_X == 2, \
  52. "DST DST_SUBSAMP_X unsupported"); \
  53. static_assert(DST_SUBSAMP_Y == 1 || DST_SUBSAMP_Y == 2, \
  54. "DST DST_SUBSAMP_Y unsupported"); \
  55. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  56. const int kHeight = benchmark_height_; \
  57. const int kSrcHalfWidth = SUBSAMPLE(kWidth, SRC_SUBSAMP_X); \
  58. const int kSrcHalfHeight = SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); \
  59. const int kDstHalfWidth = SUBSAMPLE(kWidth, DST_SUBSAMP_X); \
  60. const int kDstHalfHeight = SUBSAMPLE(kHeight, DST_SUBSAMP_Y); \
  61. align_buffer_page_end(src_y, kWidth* kHeight* SRC_BPC + OFF); \
  62. align_buffer_page_end(src_u, \
  63. kSrcHalfWidth* kSrcHalfHeight* SRC_BPC + OFF); \
  64. align_buffer_page_end(src_v, \
  65. kSrcHalfWidth* kSrcHalfHeight* SRC_BPC + OFF); \
  66. align_buffer_page_end(dst_y_c, kWidth* kHeight* DST_BPC); \
  67. align_buffer_page_end(dst_u_c, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  68. align_buffer_page_end(dst_v_c, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  69. align_buffer_page_end(dst_y_opt, kWidth* kHeight* DST_BPC); \
  70. align_buffer_page_end(dst_u_opt, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  71. align_buffer_page_end(dst_v_opt, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  72. MemRandomize(src_y + OFF, kWidth * kHeight * SRC_BPC); \
  73. MemRandomize(src_u + OFF, kSrcHalfWidth * kSrcHalfHeight * SRC_BPC); \
  74. MemRandomize(src_v + OFF, kSrcHalfWidth * kSrcHalfHeight * SRC_BPC); \
  75. memset(dst_y_c, 1, kWidth* kHeight* DST_BPC); \
  76. memset(dst_u_c, 2, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  77. memset(dst_v_c, 3, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  78. memset(dst_y_opt, 101, kWidth* kHeight* DST_BPC); \
  79. memset(dst_u_opt, 102, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  80. memset(dst_v_opt, 103, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  81. MaskCpuFlags(disable_cpu_flags_); \
  82. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  83. reinterpret_cast<SRC_T*>(src_y + OFF), kWidth, \
  84. reinterpret_cast<SRC_T*>(src_u + OFF), kSrcHalfWidth, \
  85. reinterpret_cast<SRC_T*>(src_v + OFF), kSrcHalfWidth, \
  86. reinterpret_cast<DST_T*>(dst_y_c), kWidth, \
  87. reinterpret_cast<DST_T*>(dst_u_c), kDstHalfWidth, \
  88. reinterpret_cast<DST_T*>(dst_v_c), kDstHalfWidth, kWidth, \
  89. NEG kHeight); \
  90. MaskCpuFlags(benchmark_cpu_info_); \
  91. for (int i = 0; i < benchmark_iterations_; ++i) { \
  92. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  93. reinterpret_cast<SRC_T*>(src_y + OFF), kWidth, \
  94. reinterpret_cast<SRC_T*>(src_u + OFF), kSrcHalfWidth, \
  95. reinterpret_cast<SRC_T*>(src_v + OFF), kSrcHalfWidth, \
  96. reinterpret_cast<DST_T*>(dst_y_opt), kWidth, \
  97. reinterpret_cast<DST_T*>(dst_u_opt), kDstHalfWidth, \
  98. reinterpret_cast<DST_T*>(dst_v_opt), kDstHalfWidth, kWidth, \
  99. NEG kHeight); \
  100. } \
  101. for (int i = 0; i < kHeight * kWidth * DST_BPC; ++i) { \
  102. EXPECT_EQ(dst_y_c[i], dst_y_opt[i]); \
  103. } \
  104. for (int i = 0; i < kDstHalfWidth * kDstHalfHeight * DST_BPC; ++i) { \
  105. EXPECT_EQ(dst_u_c[i], dst_u_opt[i]); \
  106. EXPECT_EQ(dst_v_c[i], dst_v_opt[i]); \
  107. } \
  108. free_aligned_buffer_page_end(dst_y_c); \
  109. free_aligned_buffer_page_end(dst_u_c); \
  110. free_aligned_buffer_page_end(dst_v_c); \
  111. free_aligned_buffer_page_end(dst_y_opt); \
  112. free_aligned_buffer_page_end(dst_u_opt); \
  113. free_aligned_buffer_page_end(dst_v_opt); \
  114. free_aligned_buffer_page_end(src_y); \
  115. free_aligned_buffer_page_end(src_u); \
  116. free_aligned_buffer_page_end(src_v); \
  117. }
  118. #define TESTPLANARTOP(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, \
  119. SRC_SUBSAMP_Y, FMT_PLANAR, DST_T, DST_BPC, \
  120. DST_SUBSAMP_X, DST_SUBSAMP_Y) \
  121. TESTPLANARTOPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  122. FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, DST_SUBSAMP_Y, \
  123. benchmark_width_ - 4, _Any, +, 0) \
  124. TESTPLANARTOPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  125. FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, DST_SUBSAMP_Y, \
  126. benchmark_width_, _Unaligned, +, 1) \
  127. TESTPLANARTOPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  128. FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, DST_SUBSAMP_Y, \
  129. benchmark_width_, _Invert, -, 0) \
  130. TESTPLANARTOPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  131. FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, DST_SUBSAMP_Y, \
  132. benchmark_width_, _Opt, +, 0)
  133. TESTPLANARTOP(I420, uint8_t, 1, 2, 2, I420, uint8_t, 1, 2, 2)
  134. TESTPLANARTOP(I422, uint8_t, 1, 2, 1, I420, uint8_t, 1, 2, 2)
  135. TESTPLANARTOP(I444, uint8_t, 1, 1, 1, I420, uint8_t, 1, 2, 2)
  136. TESTPLANARTOP(I420, uint8_t, 1, 2, 2, I422, uint8_t, 1, 2, 1)
  137. TESTPLANARTOP(I420, uint8_t, 1, 2, 2, I444, uint8_t, 1, 1, 1)
  138. TESTPLANARTOP(I420, uint8_t, 1, 2, 2, I420Mirror, uint8_t, 1, 2, 2)
  139. TESTPLANARTOP(I422, uint8_t, 1, 2, 1, I422, uint8_t, 1, 2, 1)
  140. TESTPLANARTOP(I444, uint8_t, 1, 1, 1, I444, uint8_t, 1, 1, 1)
  141. TESTPLANARTOP(I010, uint16_t, 2, 2, 2, I010, uint16_t, 2, 2, 2)
  142. TESTPLANARTOP(I010, uint16_t, 2, 2, 2, I420, uint8_t, 1, 2, 2)
  143. TESTPLANARTOP(I420, uint8_t, 1, 2, 2, I010, uint16_t, 2, 2, 2)
  144. TESTPLANARTOP(H010, uint16_t, 2, 2, 2, H010, uint16_t, 2, 2, 2)
  145. TESTPLANARTOP(H010, uint16_t, 2, 2, 2, H420, uint8_t, 1, 2, 2)
  146. TESTPLANARTOP(H420, uint8_t, 1, 2, 2, H010, uint16_t, 2, 2, 2)
  147. // Test Android 420 to I420
  148. #define TESTAPLANARTOPI(SRC_FMT_PLANAR, PIXEL_STRIDE, SRC_SUBSAMP_X, \
  149. SRC_SUBSAMP_Y, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  150. W1280, N, NEG, OFF, PN, OFF_U, OFF_V) \
  151. TEST_F(LibYUVConvertTest, SRC_FMT_PLANAR##To##FMT_PLANAR##_##PN##N) { \
  152. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  153. const int kHeight = benchmark_height_; \
  154. const int kSizeUV = \
  155. SUBSAMPLE(kWidth, SRC_SUBSAMP_X) * SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); \
  156. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  157. align_buffer_page_end(src_uv, \
  158. kSizeUV*((PIXEL_STRIDE == 3) ? 3 : 2) + OFF); \
  159. align_buffer_page_end(dst_y_c, kWidth* kHeight); \
  160. align_buffer_page_end(dst_u_c, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  161. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  162. align_buffer_page_end(dst_v_c, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  163. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  164. align_buffer_page_end(dst_y_opt, kWidth* kHeight); \
  165. align_buffer_page_end(dst_u_opt, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  166. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  167. align_buffer_page_end(dst_v_opt, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  168. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  169. uint8_t* src_u = src_uv + OFF_U; \
  170. uint8_t* src_v = src_uv + (PIXEL_STRIDE == 1 ? kSizeUV : OFF_V); \
  171. int src_stride_uv = SUBSAMPLE(kWidth, SUBSAMP_X) * PIXEL_STRIDE; \
  172. for (int i = 0; i < kHeight; ++i) \
  173. for (int j = 0; j < kWidth; ++j) \
  174. src_y[i * kWidth + j + OFF] = (fastrand() & 0xff); \
  175. for (int i = 0; i < SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); ++i) { \
  176. for (int j = 0; j < SUBSAMPLE(kWidth, SRC_SUBSAMP_X); ++j) { \
  177. src_u[(i * src_stride_uv) + j * PIXEL_STRIDE + OFF] = \
  178. (fastrand() & 0xff); \
  179. src_v[(i * src_stride_uv) + j * PIXEL_STRIDE + OFF] = \
  180. (fastrand() & 0xff); \
  181. } \
  182. } \
  183. memset(dst_y_c, 1, kWidth* kHeight); \
  184. memset(dst_u_c, 2, \
  185. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  186. memset(dst_v_c, 3, \
  187. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  188. memset(dst_y_opt, 101, kWidth* kHeight); \
  189. memset(dst_u_opt, 102, \
  190. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  191. memset(dst_v_opt, 103, \
  192. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  193. MaskCpuFlags(disable_cpu_flags_); \
  194. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  195. src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), \
  196. src_v + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), PIXEL_STRIDE, dst_y_c, \
  197. kWidth, dst_u_c, SUBSAMPLE(kWidth, SUBSAMP_X), dst_v_c, \
  198. SUBSAMPLE(kWidth, SUBSAMP_X), kWidth, NEG kHeight); \
  199. MaskCpuFlags(benchmark_cpu_info_); \
  200. for (int i = 0; i < benchmark_iterations_; ++i) { \
  201. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  202. src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), \
  203. src_v + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), PIXEL_STRIDE, \
  204. dst_y_opt, kWidth, dst_u_opt, SUBSAMPLE(kWidth, SUBSAMP_X), \
  205. dst_v_opt, SUBSAMPLE(kWidth, SUBSAMP_X), kWidth, NEG kHeight); \
  206. } \
  207. for (int i = 0; i < kHeight; ++i) { \
  208. for (int j = 0; j < kWidth; ++j) { \
  209. EXPECT_EQ(dst_y_c[i * kWidth + j], dst_y_opt[i * kWidth + j]); \
  210. } \
  211. } \
  212. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  213. for (int j = 0; j < SUBSAMPLE(kWidth, SUBSAMP_X); ++j) { \
  214. EXPECT_EQ(dst_u_c[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j], \
  215. dst_u_opt[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j]); \
  216. } \
  217. } \
  218. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  219. for (int j = 0; j < SUBSAMPLE(kWidth, SUBSAMP_X); ++j) { \
  220. EXPECT_EQ(dst_v_c[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j], \
  221. dst_v_opt[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j]); \
  222. } \
  223. } \
  224. free_aligned_buffer_page_end(dst_y_c); \
  225. free_aligned_buffer_page_end(dst_u_c); \
  226. free_aligned_buffer_page_end(dst_v_c); \
  227. free_aligned_buffer_page_end(dst_y_opt); \
  228. free_aligned_buffer_page_end(dst_u_opt); \
  229. free_aligned_buffer_page_end(dst_v_opt); \
  230. free_aligned_buffer_page_end(src_y); \
  231. free_aligned_buffer_page_end(src_uv); \
  232. }
  233. #define TESTAPLANARTOP(SRC_FMT_PLANAR, PN, PIXEL_STRIDE, OFF_U, OFF_V, \
  234. SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, SUBSAMP_X, \
  235. SUBSAMP_Y) \
  236. TESTAPLANARTOPI(SRC_FMT_PLANAR, PIXEL_STRIDE, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  237. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, benchmark_width_ - 4, \
  238. _Any, +, 0, PN, OFF_U, OFF_V) \
  239. TESTAPLANARTOPI(SRC_FMT_PLANAR, PIXEL_STRIDE, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  240. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, benchmark_width_, \
  241. _Unaligned, +, 1, PN, OFF_U, OFF_V) \
  242. TESTAPLANARTOPI(SRC_FMT_PLANAR, PIXEL_STRIDE, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  243. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Invert, \
  244. -, 0, PN, OFF_U, OFF_V) \
  245. TESTAPLANARTOPI(SRC_FMT_PLANAR, PIXEL_STRIDE, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  246. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Opt, +, \
  247. 0, PN, OFF_U, OFF_V)
  248. TESTAPLANARTOP(Android420, I420, 1, 0, 0, 2, 2, I420, 2, 2)
  249. TESTAPLANARTOP(Android420, NV12, 2, 0, 1, 2, 2, I420, 2, 2)
  250. TESTAPLANARTOP(Android420, NV21, 2, 1, 0, 2, 2, I420, 2, 2)
  251. // wrapper to keep API the same
  252. int I400ToNV21(const uint8_t* src_y,
  253. int src_stride_y,
  254. const uint8_t* /* src_u */,
  255. int /* src_stride_u */,
  256. const uint8_t* /* src_v */,
  257. int /* src_stride_v */,
  258. uint8_t* dst_y,
  259. int dst_stride_y,
  260. uint8_t* dst_vu,
  261. int dst_stride_vu,
  262. int width,
  263. int height) {
  264. return I400ToNV21(src_y, src_stride_y, dst_y, dst_stride_y, dst_vu,
  265. dst_stride_vu, width, height);
  266. }
  267. #define TESTPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  268. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, W1280, N, NEG, OFF) \
  269. TEST_F(LibYUVConvertTest, SRC_FMT_PLANAR##To##FMT_PLANAR##N) { \
  270. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  271. const int kHeight = benchmark_height_; \
  272. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  273. align_buffer_page_end(src_u, SUBSAMPLE(kWidth, SRC_SUBSAMP_X) * \
  274. SUBSAMPLE(kHeight, SRC_SUBSAMP_Y) + \
  275. OFF); \
  276. align_buffer_page_end(src_v, SUBSAMPLE(kWidth, SRC_SUBSAMP_X) * \
  277. SUBSAMPLE(kHeight, SRC_SUBSAMP_Y) + \
  278. OFF); \
  279. align_buffer_page_end(dst_y_c, kWidth* kHeight); \
  280. align_buffer_page_end(dst_uv_c, SUBSAMPLE(kWidth, SUBSAMP_X) * 2 * \
  281. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  282. align_buffer_page_end(dst_y_opt, kWidth* kHeight); \
  283. align_buffer_page_end(dst_uv_opt, SUBSAMPLE(kWidth, SUBSAMP_X) * 2 * \
  284. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  285. for (int i = 0; i < kHeight; ++i) \
  286. for (int j = 0; j < kWidth; ++j) \
  287. src_y[i * kWidth + j + OFF] = (fastrand() & 0xff); \
  288. for (int i = 0; i < SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); ++i) { \
  289. for (int j = 0; j < SUBSAMPLE(kWidth, SRC_SUBSAMP_X); ++j) { \
  290. src_u[(i * SUBSAMPLE(kWidth, SRC_SUBSAMP_X)) + j + OFF] = \
  291. (fastrand() & 0xff); \
  292. src_v[(i * SUBSAMPLE(kWidth, SRC_SUBSAMP_X)) + j + OFF] = \
  293. (fastrand() & 0xff); \
  294. } \
  295. } \
  296. memset(dst_y_c, 1, kWidth* kHeight); \
  297. memset(dst_uv_c, 2, \
  298. SUBSAMPLE(kWidth, SUBSAMP_X) * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  299. memset(dst_y_opt, 101, kWidth* kHeight); \
  300. memset(dst_uv_opt, 102, \
  301. SUBSAMPLE(kWidth, SUBSAMP_X) * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  302. MaskCpuFlags(disable_cpu_flags_); \
  303. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  304. src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), \
  305. src_v + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), dst_y_c, kWidth, \
  306. dst_uv_c, SUBSAMPLE(kWidth, SUBSAMP_X) * 2, kWidth, NEG kHeight); \
  307. MaskCpuFlags(benchmark_cpu_info_); \
  308. for (int i = 0; i < benchmark_iterations_; ++i) { \
  309. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  310. src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), \
  311. src_v + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), dst_y_opt, kWidth, \
  312. dst_uv_opt, SUBSAMPLE(kWidth, SUBSAMP_X) * 2, kWidth, NEG kHeight); \
  313. } \
  314. for (int i = 0; i < kHeight; ++i) { \
  315. for (int j = 0; j < kWidth; ++j) { \
  316. EXPECT_EQ(dst_y_c[i * kWidth + j], dst_y_opt[i * kWidth + j]); \
  317. } \
  318. } \
  319. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  320. for (int j = 0; j < SUBSAMPLE(kWidth, SUBSAMP_X) * 2; ++j) { \
  321. EXPECT_EQ(dst_uv_c[i * SUBSAMPLE(kWidth, SUBSAMP_X) * 2 + j], \
  322. dst_uv_opt[i * SUBSAMPLE(kWidth, SUBSAMP_X) * 2 + j]); \
  323. } \
  324. } \
  325. free_aligned_buffer_page_end(dst_y_c); \
  326. free_aligned_buffer_page_end(dst_uv_c); \
  327. free_aligned_buffer_page_end(dst_y_opt); \
  328. free_aligned_buffer_page_end(dst_uv_opt); \
  329. free_aligned_buffer_page_end(src_y); \
  330. free_aligned_buffer_page_end(src_u); \
  331. free_aligned_buffer_page_end(src_v); \
  332. }
  333. #define TESTPLANARTOBP(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  334. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y) \
  335. TESTPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  336. SUBSAMP_X, SUBSAMP_Y, benchmark_width_ - 4, _Any, +, 0) \
  337. TESTPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  338. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Unaligned, +, 1) \
  339. TESTPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  340. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Invert, -, 0) \
  341. TESTPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  342. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Opt, +, 0)
  343. TESTPLANARTOBP(I420, 2, 2, NV12, 2, 2)
  344. TESTPLANARTOBP(I420, 2, 2, NV21, 2, 2)
  345. TESTPLANARTOBP(I422, 2, 1, NV21, 2, 2)
  346. TESTPLANARTOBP(I444, 1, 1, NV12, 2, 2)
  347. TESTPLANARTOBP(I444, 1, 1, NV21, 2, 2)
  348. TESTPLANARTOBP(I400, 2, 2, NV21, 2, 2)
  349. #define TESTBIPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  350. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, W1280, N, NEG, \
  351. OFF, DOY) \
  352. TEST_F(LibYUVConvertTest, SRC_FMT_PLANAR##To##FMT_PLANAR##N) { \
  353. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  354. const int kHeight = benchmark_height_; \
  355. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  356. align_buffer_page_end(src_uv, 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X) * \
  357. SUBSAMPLE(kHeight, SRC_SUBSAMP_Y) + \
  358. OFF); \
  359. align_buffer_page_end(dst_y_c, kWidth* kHeight); \
  360. align_buffer_page_end(dst_uv_c, 2 * SUBSAMPLE(kWidth, SUBSAMP_X) * \
  361. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  362. align_buffer_page_end(dst_y_opt, kWidth* kHeight); \
  363. align_buffer_page_end(dst_uv_opt, 2 * SUBSAMPLE(kWidth, SUBSAMP_X) * \
  364. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  365. for (int i = 0; i < kHeight; ++i) \
  366. for (int j = 0; j < kWidth; ++j) \
  367. src_y[i * kWidth + j + OFF] = (fastrand() & 0xff); \
  368. for (int i = 0; i < SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); ++i) { \
  369. for (int j = 0; j < 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X); ++j) { \
  370. src_uv[(i * 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X)) + j + OFF] = \
  371. (fastrand() & 0xff); \
  372. } \
  373. } \
  374. memset(dst_y_c, 1, kWidth* kHeight); \
  375. memset(dst_uv_c, 2, \
  376. 2 * SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  377. memset(dst_y_opt, 101, kWidth* kHeight); \
  378. memset(dst_uv_opt, 102, \
  379. 2 * SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  380. MaskCpuFlags(disable_cpu_flags_); \
  381. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  382. src_y + OFF, kWidth, src_uv + OFF, \
  383. 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X), DOY ? dst_y_c : NULL, kWidth, \
  384. dst_uv_c, 2 * SUBSAMPLE(kWidth, SUBSAMP_X), kWidth, NEG kHeight); \
  385. MaskCpuFlags(benchmark_cpu_info_); \
  386. for (int i = 0; i < benchmark_iterations_; ++i) { \
  387. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  388. src_y + OFF, kWidth, src_uv + OFF, \
  389. 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X), DOY ? dst_y_opt : NULL, \
  390. kWidth, dst_uv_opt, 2 * SUBSAMPLE(kWidth, SUBSAMP_X), kWidth, \
  391. NEG kHeight); \
  392. } \
  393. if (DOY) { \
  394. for (int i = 0; i < kHeight; ++i) { \
  395. for (int j = 0; j < kWidth; ++j) { \
  396. EXPECT_EQ(dst_y_c[i * kWidth + j], dst_y_opt[i * kWidth + j]); \
  397. } \
  398. } \
  399. } \
  400. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  401. for (int j = 0; j < 2 * SUBSAMPLE(kWidth, SUBSAMP_X); ++j) { \
  402. EXPECT_EQ(dst_uv_c[i * 2 * SUBSAMPLE(kWidth, SUBSAMP_X) + j], \
  403. dst_uv_opt[i * 2 * SUBSAMPLE(kWidth, SUBSAMP_X) + j]); \
  404. } \
  405. } \
  406. free_aligned_buffer_page_end(dst_y_c); \
  407. free_aligned_buffer_page_end(dst_uv_c); \
  408. free_aligned_buffer_page_end(dst_y_opt); \
  409. free_aligned_buffer_page_end(dst_uv_opt); \
  410. free_aligned_buffer_page_end(src_y); \
  411. free_aligned_buffer_page_end(src_uv); \
  412. }
  413. #define TESTBIPLANARTOBP(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  414. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y) \
  415. TESTBIPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  416. SUBSAMP_X, SUBSAMP_Y, benchmark_width_ - 4, _Any, +, 0, 1) \
  417. TESTBIPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  418. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Unaligned, +, 1, \
  419. 1) \
  420. TESTBIPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  421. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Invert, -, 0, 1) \
  422. TESTBIPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  423. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Opt, +, 0, 1) \
  424. TESTBIPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  425. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _NullY, +, 0, 0)
  426. TESTBIPLANARTOBP(NV21, 2, 2, NV12, 2, 2)
  427. TESTBIPLANARTOBP(NV12, 2, 2, NV12Mirror, 2, 2)
  428. #define TESTBIPLANARTOPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  429. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, W1280, N, NEG, OFF, \
  430. DOY) \
  431. TEST_F(LibYUVConvertTest, SRC_FMT_PLANAR##To##FMT_PLANAR##N) { \
  432. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  433. const int kHeight = benchmark_height_; \
  434. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  435. align_buffer_page_end(src_uv, 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X) * \
  436. SUBSAMPLE(kHeight, SRC_SUBSAMP_Y) + \
  437. OFF); \
  438. align_buffer_page_end(dst_y_c, kWidth* kHeight); \
  439. align_buffer_page_end(dst_u_c, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  440. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  441. align_buffer_page_end(dst_v_c, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  442. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  443. align_buffer_page_end(dst_y_opt, kWidth* kHeight); \
  444. align_buffer_page_end(dst_u_opt, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  445. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  446. align_buffer_page_end(dst_v_opt, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  447. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  448. for (int i = 0; i < kHeight; ++i) \
  449. for (int j = 0; j < kWidth; ++j) \
  450. src_y[i * kWidth + j + OFF] = (fastrand() & 0xff); \
  451. for (int i = 0; i < SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); ++i) { \
  452. for (int j = 0; j < 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X); ++j) { \
  453. src_uv[(i * 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X)) + j + OFF] = \
  454. (fastrand() & 0xff); \
  455. } \
  456. } \
  457. memset(dst_y_c, 1, kWidth* kHeight); \
  458. memset(dst_u_c, 2, \
  459. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  460. memset(dst_v_c, 3, \
  461. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  462. memset(dst_y_opt, 101, kWidth* kHeight); \
  463. memset(dst_u_opt, 102, \
  464. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  465. memset(dst_v_opt, 103, \
  466. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  467. MaskCpuFlags(disable_cpu_flags_); \
  468. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  469. src_y + OFF, kWidth, src_uv + OFF, \
  470. 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X), DOY ? dst_y_c : NULL, kWidth, \
  471. dst_u_c, SUBSAMPLE(kWidth, SUBSAMP_X), dst_v_c, \
  472. SUBSAMPLE(kWidth, SUBSAMP_X), kWidth, NEG kHeight); \
  473. MaskCpuFlags(benchmark_cpu_info_); \
  474. for (int i = 0; i < benchmark_iterations_; ++i) { \
  475. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  476. src_y + OFF, kWidth, src_uv + OFF, \
  477. 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X), DOY ? dst_y_opt : NULL, \
  478. kWidth, dst_u_opt, SUBSAMPLE(kWidth, SUBSAMP_X), dst_v_opt, \
  479. SUBSAMPLE(kWidth, SUBSAMP_X), kWidth, NEG kHeight); \
  480. } \
  481. if (DOY) { \
  482. for (int i = 0; i < kHeight; ++i) { \
  483. for (int j = 0; j < kWidth; ++j) { \
  484. EXPECT_EQ(dst_y_c[i * kWidth + j], dst_y_opt[i * kWidth + j]); \
  485. } \
  486. } \
  487. } \
  488. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  489. for (int j = 0; j < SUBSAMPLE(kWidth, SUBSAMP_X); ++j) { \
  490. EXPECT_EQ(dst_u_c[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j], \
  491. dst_u_opt[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j]); \
  492. } \
  493. } \
  494. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  495. for (int j = 0; j < SUBSAMPLE(kWidth, SUBSAMP_X); ++j) { \
  496. EXPECT_EQ(dst_v_c[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j], \
  497. dst_v_opt[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j]); \
  498. } \
  499. } \
  500. free_aligned_buffer_page_end(dst_y_c); \
  501. free_aligned_buffer_page_end(dst_u_c); \
  502. free_aligned_buffer_page_end(dst_v_c); \
  503. free_aligned_buffer_page_end(dst_y_opt); \
  504. free_aligned_buffer_page_end(dst_u_opt); \
  505. free_aligned_buffer_page_end(dst_v_opt); \
  506. free_aligned_buffer_page_end(src_y); \
  507. free_aligned_buffer_page_end(src_uv); \
  508. }
  509. #define TESTBIPLANARTOP(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  510. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y) \
  511. TESTBIPLANARTOPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  512. SUBSAMP_X, SUBSAMP_Y, benchmark_width_ - 4, _Any, +, 0, 1) \
  513. TESTBIPLANARTOPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  514. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Unaligned, +, 1, \
  515. 1) \
  516. TESTBIPLANARTOPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  517. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Invert, -, 0, 1) \
  518. TESTBIPLANARTOPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  519. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Opt, +, 0, 1) \
  520. TESTBIPLANARTOPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  521. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _NullY, +, 0, 0)
  522. TESTBIPLANARTOP(NV12, 2, 2, I420, 2, 2)
  523. TESTBIPLANARTOP(NV21, 2, 2, I420, 2, 2)
  524. #define ALIGNINT(V, ALIGN) (((V) + (ALIGN)-1) / (ALIGN) * (ALIGN))
  525. #define TESTPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  526. YALIGN, W1280, N, NEG, OFF) \
  527. TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
  528. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  529. const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
  530. const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
  531. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  532. const int kSizeUV = kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y); \
  533. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  534. align_buffer_page_end(src_u, kSizeUV + OFF); \
  535. align_buffer_page_end(src_v, kSizeUV + OFF); \
  536. align_buffer_page_end(dst_argb_c, kStrideB* kHeight + OFF); \
  537. align_buffer_page_end(dst_argb_opt, kStrideB* kHeight + OFF); \
  538. for (int i = 0; i < kWidth * kHeight; ++i) { \
  539. src_y[i + OFF] = (fastrand() & 0xff); \
  540. } \
  541. for (int i = 0; i < kSizeUV; ++i) { \
  542. src_u[i + OFF] = (fastrand() & 0xff); \
  543. src_v[i + OFF] = (fastrand() & 0xff); \
  544. } \
  545. memset(dst_argb_c + OFF, 1, kStrideB * kHeight); \
  546. memset(dst_argb_opt + OFF, 101, kStrideB * kHeight); \
  547. MaskCpuFlags(disable_cpu_flags_); \
  548. double time0 = get_time(); \
  549. FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
  550. src_v + OFF, kStrideUV, dst_argb_c + OFF, kStrideB, \
  551. kWidth, NEG kHeight); \
  552. double time1 = get_time(); \
  553. MaskCpuFlags(benchmark_cpu_info_); \
  554. for (int i = 0; i < benchmark_iterations_; ++i) { \
  555. FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
  556. src_v + OFF, kStrideUV, dst_argb_opt + OFF, \
  557. kStrideB, kWidth, NEG kHeight); \
  558. } \
  559. double time2 = get_time(); \
  560. printf(" %8d us C - %8d us OPT\n", \
  561. static_cast<int>((time1 - time0) * 1e6), \
  562. static_cast<int>((time2 - time1) * 1e6 / benchmark_iterations_)); \
  563. for (int i = 0; i < kWidth * BPP_B * kHeight; ++i) { \
  564. EXPECT_EQ(dst_argb_c[i + OFF], dst_argb_opt[i + OFF]); \
  565. } \
  566. free_aligned_buffer_page_end(src_y); \
  567. free_aligned_buffer_page_end(src_u); \
  568. free_aligned_buffer_page_end(src_v); \
  569. free_aligned_buffer_page_end(dst_argb_c); \
  570. free_aligned_buffer_page_end(dst_argb_opt); \
  571. }
  572. #define TESTPLANARTOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  573. YALIGN) \
  574. TESTPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  575. YALIGN, benchmark_width_ - 4, _Any, +, 0) \
  576. TESTPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  577. YALIGN, benchmark_width_, _Unaligned, +, 1) \
  578. TESTPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  579. YALIGN, benchmark_width_, _Invert, -, 0) \
  580. TESTPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  581. YALIGN, benchmark_width_, _Opt, +, 0)
  582. TESTPLANARTOB(I420, 2, 2, ARGB, 4, 4, 1)
  583. TESTPLANARTOB(I420, 2, 2, ABGR, 4, 4, 1)
  584. TESTPLANARTOB(J420, 2, 2, ARGB, 4, 4, 1)
  585. TESTPLANARTOB(J420, 2, 2, ABGR, 4, 4, 1)
  586. TESTPLANARTOB(H420, 2, 2, ARGB, 4, 4, 1)
  587. TESTPLANARTOB(H420, 2, 2, ABGR, 4, 4, 1)
  588. TESTPLANARTOB(U420, 2, 2, ARGB, 4, 4, 1)
  589. TESTPLANARTOB(U420, 2, 2, ABGR, 4, 4, 1)
  590. TESTPLANARTOB(I420, 2, 2, BGRA, 4, 4, 1)
  591. TESTPLANARTOB(I420, 2, 2, RGBA, 4, 4, 1)
  592. TESTPLANARTOB(I420, 2, 2, RAW, 3, 3, 1)
  593. TESTPLANARTOB(I420, 2, 2, RGB24, 3, 3, 1)
  594. TESTPLANARTOB(J420, 2, 2, RAW, 3, 3, 1)
  595. TESTPLANARTOB(J420, 2, 2, RGB24, 3, 3, 1)
  596. TESTPLANARTOB(H420, 2, 2, RAW, 3, 3, 1)
  597. TESTPLANARTOB(H420, 2, 2, RGB24, 3, 3, 1)
  598. #ifdef LITTLE_ENDIAN_ONLY_TEST
  599. TESTPLANARTOB(I420, 2, 2, RGB565, 2, 2, 1)
  600. TESTPLANARTOB(J420, 2, 2, RGB565, 2, 2, 1)
  601. TESTPLANARTOB(H420, 2, 2, RGB565, 2, 2, 1)
  602. TESTPLANARTOB(I420, 2, 2, ARGB1555, 2, 2, 1)
  603. TESTPLANARTOB(I420, 2, 2, ARGB4444, 2, 2, 1)
  604. TESTPLANARTOB(I422, 2, 1, RGB565, 2, 2, 1)
  605. #endif
  606. TESTPLANARTOB(I422, 2, 1, ARGB, 4, 4, 1)
  607. TESTPLANARTOB(I422, 2, 1, ABGR, 4, 4, 1)
  608. TESTPLANARTOB(J422, 2, 1, ARGB, 4, 4, 1)
  609. TESTPLANARTOB(J422, 2, 1, ABGR, 4, 4, 1)
  610. TESTPLANARTOB(H422, 2, 1, ARGB, 4, 4, 1)
  611. TESTPLANARTOB(H422, 2, 1, ABGR, 4, 4, 1)
  612. TESTPLANARTOB(U422, 2, 1, ARGB, 4, 4, 1)
  613. TESTPLANARTOB(U422, 2, 1, ABGR, 4, 4, 1)
  614. TESTPLANARTOB(I422, 2, 1, BGRA, 4, 4, 1)
  615. TESTPLANARTOB(I422, 2, 1, RGBA, 4, 4, 1)
  616. TESTPLANARTOB(I444, 1, 1, ARGB, 4, 4, 1)
  617. TESTPLANARTOB(I444, 1, 1, ABGR, 4, 4, 1)
  618. TESTPLANARTOB(J444, 1, 1, ARGB, 4, 4, 1)
  619. TESTPLANARTOB(J444, 1, 1, ABGR, 4, 4, 1)
  620. TESTPLANARTOB(H444, 1, 1, ARGB, 4, 4, 1)
  621. TESTPLANARTOB(H444, 1, 1, ABGR, 4, 4, 1)
  622. TESTPLANARTOB(U444, 1, 1, ARGB, 4, 4, 1)
  623. TESTPLANARTOB(U444, 1, 1, ABGR, 4, 4, 1)
  624. TESTPLANARTOB(I420, 2, 2, YUY2, 2, 4, 1)
  625. TESTPLANARTOB(I420, 2, 2, UYVY, 2, 4, 1)
  626. TESTPLANARTOB(I422, 2, 1, YUY2, 2, 4, 1)
  627. TESTPLANARTOB(I422, 2, 1, UYVY, 2, 4, 1)
  628. TESTPLANARTOB(I420, 2, 2, I400, 1, 1, 1)
  629. TESTPLANARTOB(J420, 2, 2, J400, 1, 1, 1)
  630. #ifdef LITTLE_ENDIAN_ONLY_TEST
  631. TESTPLANARTOB(I420, 2, 2, AR30, 4, 4, 1)
  632. TESTPLANARTOB(H420, 2, 2, AR30, 4, 4, 1)
  633. #endif
  634. #define TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  635. YALIGN, W1280, N, NEG, OFF, ATTEN) \
  636. TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
  637. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  638. const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
  639. const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
  640. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  641. const int kSizeUV = kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y); \
  642. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  643. align_buffer_page_end(src_u, kSizeUV + OFF); \
  644. align_buffer_page_end(src_v, kSizeUV + OFF); \
  645. align_buffer_page_end(src_a, kWidth* kHeight + OFF); \
  646. align_buffer_page_end(dst_argb_c, kStrideB* kHeight + OFF); \
  647. align_buffer_page_end(dst_argb_opt, kStrideB* kHeight + OFF); \
  648. for (int i = 0; i < kWidth * kHeight; ++i) { \
  649. src_y[i + OFF] = (fastrand() & 0xff); \
  650. src_a[i + OFF] = (fastrand() & 0xff); \
  651. } \
  652. for (int i = 0; i < kSizeUV; ++i) { \
  653. src_u[i + OFF] = (fastrand() & 0xff); \
  654. src_v[i + OFF] = (fastrand() & 0xff); \
  655. } \
  656. memset(dst_argb_c + OFF, 1, kStrideB * kHeight); \
  657. memset(dst_argb_opt + OFF, 101, kStrideB * kHeight); \
  658. MaskCpuFlags(disable_cpu_flags_); \
  659. FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
  660. src_v + OFF, kStrideUV, src_a + OFF, kWidth, \
  661. dst_argb_c + OFF, kStrideB, kWidth, NEG kHeight, \
  662. ATTEN); \
  663. MaskCpuFlags(benchmark_cpu_info_); \
  664. for (int i = 0; i < benchmark_iterations_; ++i) { \
  665. FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
  666. src_v + OFF, kStrideUV, src_a + OFF, kWidth, \
  667. dst_argb_opt + OFF, kStrideB, kWidth, NEG kHeight, \
  668. ATTEN); \
  669. } \
  670. for (int i = 0; i < kWidth * BPP_B * kHeight; ++i) { \
  671. EXPECT_EQ(dst_argb_c[i + OFF], dst_argb_opt[i + OFF]); \
  672. } \
  673. free_aligned_buffer_page_end(src_y); \
  674. free_aligned_buffer_page_end(src_u); \
  675. free_aligned_buffer_page_end(src_v); \
  676. free_aligned_buffer_page_end(src_a); \
  677. free_aligned_buffer_page_end(dst_argb_c); \
  678. free_aligned_buffer_page_end(dst_argb_opt); \
  679. }
  680. #define TESTQPLANARTOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  681. YALIGN) \
  682. TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  683. YALIGN, benchmark_width_ - 4, _Any, +, 0, 0) \
  684. TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  685. YALIGN, benchmark_width_, _Unaligned, +, 1, 0) \
  686. TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  687. YALIGN, benchmark_width_, _Invert, -, 0, 0) \
  688. TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  689. YALIGN, benchmark_width_, _Opt, +, 0, 0) \
  690. TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  691. YALIGN, benchmark_width_, _Premult, +, 0, 1)
  692. #define J420AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
  693. I420AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvJPEGConstants, k, \
  694. l, m)
  695. #define J420AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
  696. I420AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvJPEGConstants, k, \
  697. l, m)
  698. #define H420AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
  699. I420AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvH709Constants, k, \
  700. l, m)
  701. #define H420AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
  702. I420AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuvH709Constants, k, \
  703. l, m)
  704. #define U420AlphaToARGB(a, b, c, d, e, f, g, h, i, j, k, l, m) \
  705. I420AlphaToARGBMatrix(a, b, c, d, e, f, g, h, i, j, &kYuv2020Constants, k, \
  706. l, m)
  707. #define U420AlphaToABGR(a, b, c, d, e, f, g, h, i, j, k, l, m) \
  708. I420AlphaToABGRMatrix(a, b, c, d, e, f, g, h, i, j, &kYuv2020Constants, k, \
  709. l, m)
  710. TESTQPLANARTOB(I420Alpha, 2, 2, ARGB, 4, 4, 1)
  711. TESTQPLANARTOB(I420Alpha, 2, 2, ABGR, 4, 4, 1)
  712. TESTQPLANARTOB(J420Alpha, 2, 2, ARGB, 4, 4, 1)
  713. TESTQPLANARTOB(J420Alpha, 2, 2, ABGR, 4, 4, 1)
  714. TESTQPLANARTOB(H420Alpha, 2, 2, ARGB, 4, 4, 1)
  715. TESTQPLANARTOB(H420Alpha, 2, 2, ABGR, 4, 4, 1)
  716. TESTQPLANARTOB(U420Alpha, 2, 2, ARGB, 4, 4, 1)
  717. TESTQPLANARTOB(U420Alpha, 2, 2, ABGR, 4, 4, 1)
  718. #define TESTBIPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, \
  719. BPP_B, W1280, N, NEG, OFF) \
  720. TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
  721. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  722. const int kHeight = benchmark_height_; \
  723. const int kStrideB = kWidth * BPP_B; \
  724. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  725. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  726. align_buffer_page_end(src_uv, \
  727. kStrideUV* SUBSAMPLE(kHeight, SUBSAMP_Y) * 2 + OFF); \
  728. align_buffer_page_end(dst_argb_c, kStrideB* kHeight); \
  729. align_buffer_page_end(dst_argb_opt, kStrideB* kHeight); \
  730. for (int i = 0; i < kHeight; ++i) \
  731. for (int j = 0; j < kWidth; ++j) \
  732. src_y[i * kWidth + j + OFF] = (fastrand() & 0xff); \
  733. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  734. for (int j = 0; j < kStrideUV * 2; ++j) { \
  735. src_uv[i * kStrideUV * 2 + j + OFF] = (fastrand() & 0xff); \
  736. } \
  737. } \
  738. memset(dst_argb_c, 1, kStrideB* kHeight); \
  739. memset(dst_argb_opt, 101, kStrideB* kHeight); \
  740. MaskCpuFlags(disable_cpu_flags_); \
  741. FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_uv + OFF, kStrideUV * 2, \
  742. dst_argb_c, kWidth * BPP_B, kWidth, NEG kHeight); \
  743. MaskCpuFlags(benchmark_cpu_info_); \
  744. for (int i = 0; i < benchmark_iterations_; ++i) { \
  745. FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_uv + OFF, kStrideUV * 2, \
  746. dst_argb_opt, kWidth * BPP_B, kWidth, \
  747. NEG kHeight); \
  748. } \
  749. /* Convert to ARGB so 565 is expanded to bytes that can be compared. */ \
  750. align_buffer_page_end(dst_argb32_c, kWidth * 4 * kHeight); \
  751. align_buffer_page_end(dst_argb32_opt, kWidth * 4 * kHeight); \
  752. memset(dst_argb32_c, 2, kWidth * 4 * kHeight); \
  753. memset(dst_argb32_opt, 102, kWidth * 4 * kHeight); \
  754. FMT_C##ToARGB(dst_argb_c, kStrideB, dst_argb32_c, kWidth * 4, kWidth, \
  755. kHeight); \
  756. FMT_C##ToARGB(dst_argb_opt, kStrideB, dst_argb32_opt, kWidth * 4, kWidth, \
  757. kHeight); \
  758. for (int i = 0; i < kHeight; ++i) { \
  759. for (int j = 0; j < kWidth * 4; ++j) { \
  760. EXPECT_EQ(dst_argb32_c[i * kWidth * 4 + j], \
  761. dst_argb32_opt[i * kWidth * 4 + j]); \
  762. } \
  763. } \
  764. free_aligned_buffer_page_end(src_y); \
  765. free_aligned_buffer_page_end(src_uv); \
  766. free_aligned_buffer_page_end(dst_argb_c); \
  767. free_aligned_buffer_page_end(dst_argb_opt); \
  768. free_aligned_buffer_page_end(dst_argb32_c); \
  769. free_aligned_buffer_page_end(dst_argb32_opt); \
  770. }
  771. #define TESTBIPLANARTOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, BPP_B) \
  772. TESTBIPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, BPP_B, \
  773. benchmark_width_ - 4, _Any, +, 0) \
  774. TESTBIPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, BPP_B, \
  775. benchmark_width_, _Unaligned, +, 1) \
  776. TESTBIPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, BPP_B, \
  777. benchmark_width_, _Invert, -, 0) \
  778. TESTBIPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, BPP_B, \
  779. benchmark_width_, _Opt, +, 0)
  780. #define JNV12ToARGB(a, b, c, d, e, f, g, h) \
  781. NV12ToARGBMatrix(a, b, c, d, e, f, &kYuvJPEGConstants, g, h)
  782. #define JNV21ToARGB(a, b, c, d, e, f, g, h) \
  783. NV21ToARGBMatrix(a, b, c, d, e, f, &kYuvJPEGConstants, g, h)
  784. #define JNV12ToABGR(a, b, c, d, e, f, g, h) \
  785. NV21ToARGBMatrix(a, b, c, d, e, f, &kYvuJPEGConstants, g, h)
  786. #define JNV21ToABGR(a, b, c, d, e, f, g, h) \
  787. NV12ToARGBMatrix(a, b, c, d, e, f, &kYvuJPEGConstants, g, h)
  788. #define JNV12ToRGB24(a, b, c, d, e, f, g, h) \
  789. NV12ToRGB24Matrix(a, b, c, d, e, f, &kYuvJPEGConstants, g, h)
  790. #define JNV21ToRGB24(a, b, c, d, e, f, g, h) \
  791. NV21ToRGB24Matrix(a, b, c, d, e, f, &kYuvJPEGConstants, g, h)
  792. #define JNV12ToRAW(a, b, c, d, e, f, g, h) \
  793. NV21ToRGB24Matrix(a, b, c, d, e, f, &kYvuJPEGConstants, g, h)
  794. #define JNV21ToRAW(a, b, c, d, e, f, g, h) \
  795. NV12ToRGB24Matrix(a, b, c, d, e, f, &kYvuJPEGConstants, g, h)
  796. #define JNV12ToRGB565(a, b, c, d, e, f, g, h) \
  797. NV12ToRGB565Matrix(a, b, c, d, e, f, &kYuvJPEGConstants, g, h)
  798. TESTBIPLANARTOB(JNV12, 2, 2, ARGB, ARGB, 4)
  799. TESTBIPLANARTOB(JNV21, 2, 2, ARGB, ARGB, 4)
  800. TESTBIPLANARTOB(JNV12, 2, 2, ABGR, ABGR, 4)
  801. TESTBIPLANARTOB(JNV21, 2, 2, ABGR, ABGR, 4)
  802. TESTBIPLANARTOB(JNV12, 2, 2, RGB24, RGB24, 3)
  803. TESTBIPLANARTOB(JNV21, 2, 2, RGB24, RGB24, 3)
  804. TESTBIPLANARTOB(JNV12, 2, 2, RAW, RAW, 3)
  805. TESTBIPLANARTOB(JNV21, 2, 2, RAW, RAW, 3)
  806. #ifdef LITTLE_ENDIAN_ONLY_TEST
  807. TESTBIPLANARTOB(JNV12, 2, 2, RGB565, RGB565, 2)
  808. #endif
  809. TESTBIPLANARTOB(NV12, 2, 2, ARGB, ARGB, 4)
  810. TESTBIPLANARTOB(NV21, 2, 2, ARGB, ARGB, 4)
  811. TESTBIPLANARTOB(NV12, 2, 2, ABGR, ABGR, 4)
  812. TESTBIPLANARTOB(NV21, 2, 2, ABGR, ABGR, 4)
  813. TESTBIPLANARTOB(NV12, 2, 2, RGB24, RGB24, 3)
  814. TESTBIPLANARTOB(NV21, 2, 2, RGB24, RGB24, 3)
  815. TESTBIPLANARTOB(NV12, 2, 2, RAW, RAW, 3)
  816. TESTBIPLANARTOB(NV21, 2, 2, RAW, RAW, 3)
  817. TESTBIPLANARTOB(NV21, 2, 2, YUV24, RAW, 3)
  818. #ifdef LITTLE_ENDIAN_ONLY_TEST
  819. TESTBIPLANARTOB(NV12, 2, 2, RGB565, RGB565, 2)
  820. #endif
  821. #define TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  822. W1280, N, NEG, OFF) \
  823. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_PLANAR##N) { \
  824. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  825. const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
  826. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  827. const int kStride = (kStrideUV * SUBSAMP_X * 8 * BPP_A + 7) / 8; \
  828. align_buffer_page_end(src_argb, kStride* kHeight + OFF); \
  829. align_buffer_page_end(dst_y_c, kWidth* kHeight); \
  830. align_buffer_page_end(dst_uv_c, \
  831. kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  832. align_buffer_page_end(dst_y_opt, kWidth* kHeight); \
  833. align_buffer_page_end(dst_uv_opt, \
  834. kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  835. memset(dst_y_c, 1, kWidth* kHeight); \
  836. memset(dst_uv_c, 2, kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  837. memset(dst_y_opt, 101, kWidth* kHeight); \
  838. memset(dst_uv_opt, 102, kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  839. for (int i = 0; i < kHeight; ++i) \
  840. for (int j = 0; j < kStride; ++j) \
  841. src_argb[(i * kStride) + j + OFF] = (fastrand() & 0xff); \
  842. MaskCpuFlags(disable_cpu_flags_); \
  843. FMT_A##To##FMT_PLANAR(src_argb + OFF, kStride, dst_y_c, kWidth, dst_uv_c, \
  844. kStrideUV * 2, dst_uv_c + kStrideUV, kStrideUV * 2, \
  845. kWidth, NEG kHeight); \
  846. MaskCpuFlags(benchmark_cpu_info_); \
  847. for (int i = 0; i < benchmark_iterations_; ++i) { \
  848. FMT_A##To##FMT_PLANAR(src_argb + OFF, kStride, dst_y_opt, kWidth, \
  849. dst_uv_opt, kStrideUV * 2, dst_uv_opt + kStrideUV, \
  850. kStrideUV * 2, kWidth, NEG kHeight); \
  851. } \
  852. for (int i = 0; i < kHeight; ++i) { \
  853. for (int j = 0; j < kWidth; ++j) { \
  854. EXPECT_EQ(dst_y_c[i * kWidth + j], dst_y_opt[i * kWidth + j]); \
  855. } \
  856. } \
  857. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y) * 2; ++i) { \
  858. for (int j = 0; j < kStrideUV; ++j) { \
  859. EXPECT_EQ(dst_uv_c[i * kStrideUV + j], dst_uv_opt[i * kStrideUV + j]); \
  860. } \
  861. } \
  862. free_aligned_buffer_page_end(dst_y_c); \
  863. free_aligned_buffer_page_end(dst_uv_c); \
  864. free_aligned_buffer_page_end(dst_y_opt); \
  865. free_aligned_buffer_page_end(dst_uv_opt); \
  866. free_aligned_buffer_page_end(src_argb); \
  867. }
  868. #define TESTATOPLANAR(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y) \
  869. TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  870. benchmark_width_ - 4, _Any, +, 0) \
  871. TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  872. benchmark_width_, _Unaligned, +, 1) \
  873. TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  874. benchmark_width_, _Invert, -, 0) \
  875. TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  876. benchmark_width_, _Opt, +, 0)
  877. TESTATOPLANAR(ABGR, 4, 1, I420, 2, 2)
  878. TESTATOPLANAR(ARGB, 4, 1, I420, 2, 2)
  879. TESTATOPLANAR(ARGB, 4, 1, I422, 2, 1)
  880. TESTATOPLANAR(ARGB, 4, 1, I444, 1, 1)
  881. TESTATOPLANAR(ARGB, 4, 1, J420, 2, 2)
  882. TESTATOPLANAR(ARGB, 4, 1, J422, 2, 1)
  883. #ifdef LITTLE_ENDIAN_ONLY_TEST
  884. TESTATOPLANAR(ARGB4444, 2, 1, I420, 2, 2)
  885. TESTATOPLANAR(RGB565, 2, 1, I420, 2, 2)
  886. TESTATOPLANAR(ARGB1555, 2, 1, I420, 2, 2)
  887. #endif
  888. TESTATOPLANAR(BGRA, 4, 1, I420, 2, 2)
  889. TESTATOPLANAR(I400, 1, 1, I420, 2, 2)
  890. TESTATOPLANAR(J400, 1, 1, J420, 2, 2)
  891. TESTATOPLANAR(RAW, 3, 1, I420, 2, 2)
  892. TESTATOPLANAR(RGB24, 3, 1, I420, 2, 2)
  893. TESTATOPLANAR(RGB24, 3, 1, J420, 2, 2)
  894. TESTATOPLANAR(RGBA, 4, 1, I420, 2, 2)
  895. TESTATOPLANAR(UYVY, 2, 1, I420, 2, 2)
  896. TESTATOPLANAR(UYVY, 2, 1, I422, 2, 1)
  897. TESTATOPLANAR(YUY2, 2, 1, I420, 2, 2)
  898. TESTATOPLANAR(YUY2, 2, 1, I422, 2, 1)
  899. #define TESTATOBIPLANARI(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, \
  900. SUBSAMP_Y, W1280, N, NEG, OFF) \
  901. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_PLANAR##N) { \
  902. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  903. const int kHeight = benchmark_height_; \
  904. const int kStride = SUBSAMPLE(kWidth, SUB_A) * BPP_A; \
  905. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  906. align_buffer_page_end(src_argb, kStride* kHeight + OFF); \
  907. align_buffer_page_end(dst_y_c, kWidth* kHeight); \
  908. align_buffer_page_end(dst_uv_c, \
  909. kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  910. align_buffer_page_end(dst_y_opt, kWidth* kHeight); \
  911. align_buffer_page_end(dst_uv_opt, \
  912. kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  913. for (int i = 0; i < kHeight; ++i) \
  914. for (int j = 0; j < kStride; ++j) \
  915. src_argb[(i * kStride) + j + OFF] = (fastrand() & 0xff); \
  916. memset(dst_y_c, 1, kWidth* kHeight); \
  917. memset(dst_uv_c, 2, kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  918. memset(dst_y_opt, 101, kWidth* kHeight); \
  919. memset(dst_uv_opt, 102, kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  920. MaskCpuFlags(disable_cpu_flags_); \
  921. FMT_A##To##FMT_PLANAR(src_argb + OFF, kStride, dst_y_c, kWidth, dst_uv_c, \
  922. kStrideUV * 2, kWidth, NEG kHeight); \
  923. MaskCpuFlags(benchmark_cpu_info_); \
  924. for (int i = 0; i < benchmark_iterations_; ++i) { \
  925. FMT_A##To##FMT_PLANAR(src_argb + OFF, kStride, dst_y_opt, kWidth, \
  926. dst_uv_opt, kStrideUV * 2, kWidth, NEG kHeight); \
  927. } \
  928. for (int i = 0; i < kHeight; ++i) { \
  929. for (int j = 0; j < kWidth; ++j) { \
  930. EXPECT_EQ(dst_y_c[i * kWidth + j], dst_y_opt[i * kWidth + j]); \
  931. } \
  932. } \
  933. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  934. for (int j = 0; j < kStrideUV * 2; ++j) { \
  935. EXPECT_EQ(dst_uv_c[i * kStrideUV * 2 + j], \
  936. dst_uv_opt[i * kStrideUV * 2 + j]); \
  937. } \
  938. } \
  939. free_aligned_buffer_page_end(dst_y_c); \
  940. free_aligned_buffer_page_end(dst_uv_c); \
  941. free_aligned_buffer_page_end(dst_y_opt); \
  942. free_aligned_buffer_page_end(dst_uv_opt); \
  943. free_aligned_buffer_page_end(src_argb); \
  944. }
  945. #define TESTATOBIPLANAR(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y) \
  946. TESTATOBIPLANARI(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  947. benchmark_width_ - 4, _Any, +, 0) \
  948. TESTATOBIPLANARI(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  949. benchmark_width_, _Unaligned, +, 1) \
  950. TESTATOBIPLANARI(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  951. benchmark_width_, _Invert, -, 0) \
  952. TESTATOBIPLANARI(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  953. benchmark_width_, _Opt, +, 0)
  954. TESTATOBIPLANAR(ARGB, 1, 4, NV12, 2, 2)
  955. TESTATOBIPLANAR(ARGB, 1, 4, NV21, 2, 2)
  956. TESTATOBIPLANAR(ABGR, 1, 4, NV12, 2, 2)
  957. TESTATOBIPLANAR(ABGR, 1, 4, NV21, 2, 2)
  958. TESTATOBIPLANAR(YUY2, 2, 4, NV12, 2, 2)
  959. TESTATOBIPLANAR(UYVY, 2, 4, NV12, 2, 2)
  960. TESTATOBIPLANAR(AYUV, 1, 4, NV12, 2, 2)
  961. TESTATOBIPLANAR(AYUV, 1, 4, NV21, 2, 2)
  962. #define TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  963. HEIGHT_B, W1280, N, NEG, OFF) \
  964. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##N) { \
  965. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  966. const int kHeight = benchmark_height_; \
  967. const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
  968. const int kHeightB = (kHeight + HEIGHT_B - 1) / HEIGHT_B * HEIGHT_B; \
  969. const int kStrideA = \
  970. (kWidth * BPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
  971. const int kStrideB = \
  972. (kWidth * BPP_B + STRIDE_B - 1) / STRIDE_B * STRIDE_B; \
  973. align_buffer_page_end(src_argb, kStrideA* kHeightA + OFF); \
  974. align_buffer_page_end(dst_argb_c, kStrideB* kHeightB); \
  975. align_buffer_page_end(dst_argb_opt, kStrideB* kHeightB); \
  976. for (int i = 0; i < kStrideA * kHeightA; ++i) { \
  977. src_argb[i + OFF] = (fastrand() & 0xff); \
  978. } \
  979. memset(dst_argb_c, 1, kStrideB* kHeightB); \
  980. memset(dst_argb_opt, 101, kStrideB* kHeightB); \
  981. MaskCpuFlags(disable_cpu_flags_); \
  982. FMT_A##To##FMT_B(src_argb + OFF, kStrideA, dst_argb_c, kStrideB, kWidth, \
  983. NEG kHeight); \
  984. MaskCpuFlags(benchmark_cpu_info_); \
  985. for (int i = 0; i < benchmark_iterations_; ++i) { \
  986. FMT_A##To##FMT_B(src_argb + OFF, kStrideA, dst_argb_opt, kStrideB, \
  987. kWidth, NEG kHeight); \
  988. } \
  989. for (int i = 0; i < kStrideB * kHeightB; ++i) { \
  990. EXPECT_EQ(dst_argb_c[i], dst_argb_opt[i]); \
  991. } \
  992. free_aligned_buffer_page_end(src_argb); \
  993. free_aligned_buffer_page_end(dst_argb_c); \
  994. free_aligned_buffer_page_end(dst_argb_opt); \
  995. }
  996. #define TESTATOBRANDOM(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, \
  997. STRIDE_B, HEIGHT_B) \
  998. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##_Random) { \
  999. for (int times = 0; times < benchmark_iterations_; ++times) { \
  1000. const int kWidth = (fastrand() & 63) + 1; \
  1001. const int kHeight = (fastrand() & 31) + 1; \
  1002. const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
  1003. const int kHeightB = (kHeight + HEIGHT_B - 1) / HEIGHT_B * HEIGHT_B; \
  1004. const int kStrideA = \
  1005. (kWidth * BPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
  1006. const int kStrideB = \
  1007. (kWidth * BPP_B + STRIDE_B - 1) / STRIDE_B * STRIDE_B; \
  1008. align_buffer_page_end(src_argb, kStrideA* kHeightA); \
  1009. align_buffer_page_end(dst_argb_c, kStrideB* kHeightB); \
  1010. align_buffer_page_end(dst_argb_opt, kStrideB* kHeightB); \
  1011. for (int i = 0; i < kStrideA * kHeightA; ++i) { \
  1012. src_argb[i] = (fastrand() & 0xff); \
  1013. } \
  1014. memset(dst_argb_c, 123, kStrideB* kHeightB); \
  1015. memset(dst_argb_opt, 123, kStrideB* kHeightB); \
  1016. MaskCpuFlags(disable_cpu_flags_); \
  1017. FMT_A##To##FMT_B(src_argb, kStrideA, dst_argb_c, kStrideB, kWidth, \
  1018. kHeight); \
  1019. MaskCpuFlags(benchmark_cpu_info_); \
  1020. FMT_A##To##FMT_B(src_argb, kStrideA, dst_argb_opt, kStrideB, kWidth, \
  1021. kHeight); \
  1022. for (int i = 0; i < kStrideB * kHeightB; ++i) { \
  1023. EXPECT_EQ(dst_argb_c[i], dst_argb_opt[i]); \
  1024. } \
  1025. free_aligned_buffer_page_end(src_argb); \
  1026. free_aligned_buffer_page_end(dst_argb_c); \
  1027. free_aligned_buffer_page_end(dst_argb_opt); \
  1028. } \
  1029. }
  1030. #define TESTATOB(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1031. HEIGHT_B) \
  1032. TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1033. HEIGHT_B, benchmark_width_ - 4, _Any, +, 0) \
  1034. TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1035. HEIGHT_B, benchmark_width_, _Unaligned, +, 1) \
  1036. TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1037. HEIGHT_B, benchmark_width_, _Invert, -, 0) \
  1038. TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1039. HEIGHT_B, benchmark_width_, _Opt, +, 0) \
  1040. TESTATOBRANDOM(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1041. HEIGHT_B)
  1042. TESTATOB(AB30, 4, 4, 1, ABGR, 4, 4, 1)
  1043. TESTATOB(AB30, 4, 4, 1, ARGB, 4, 4, 1)
  1044. #ifdef LITTLE_ENDIAN_ONLY_TEST
  1045. TESTATOB(ABGR, 4, 4, 1, AR30, 4, 4, 1)
  1046. #endif
  1047. TESTATOB(ABGR, 4, 4, 1, ARGB, 4, 4, 1)
  1048. #ifdef LITTLE_ENDIAN_ONLY_TEST
  1049. TESTATOB(AR30, 4, 4, 1, AB30, 4, 4, 1)
  1050. #endif
  1051. TESTATOB(AR30, 4, 4, 1, ABGR, 4, 4, 1)
  1052. #ifdef LITTLE_ENDIAN_ONLY_TEST
  1053. TESTATOB(AR30, 4, 4, 1, AR30, 4, 4, 1)
  1054. TESTATOB(AR30, 4, 4, 1, ARGB, 4, 4, 1)
  1055. #endif
  1056. TESTATOB(ARGB, 4, 4, 1, ABGR, 4, 4, 1)
  1057. #ifdef LITTLE_ENDIAN_ONLY_TEST
  1058. TESTATOB(ARGB, 4, 4, 1, AR30, 4, 4, 1)
  1059. #endif
  1060. TESTATOB(ARGB, 4, 4, 1, ARGB, 4, 4, 1)
  1061. TESTATOB(ARGB, 4, 4, 1, ARGB1555, 2, 2, 1)
  1062. TESTATOB(ARGB, 4, 4, 1, ARGB4444, 2, 2, 1)
  1063. TESTATOB(ARGB, 4, 4, 1, ARGBMirror, 4, 4, 1)
  1064. TESTATOB(ARGB, 4, 4, 1, BGRA, 4, 4, 1)
  1065. TESTATOB(ARGB, 4, 4, 1, I400, 1, 1, 1)
  1066. TESTATOB(ARGB, 4, 4, 1, J400, 1, 1, 1)
  1067. TESTATOB(RGBA, 4, 4, 1, J400, 1, 1, 1)
  1068. TESTATOB(ARGB, 4, 4, 1, RAW, 3, 3, 1)
  1069. TESTATOB(ARGB, 4, 4, 1, RGB24, 3, 3, 1)
  1070. #ifdef LITTLE_ENDIAN_ONLY_TEST
  1071. TESTATOB(ARGB, 4, 4, 1, RGB565, 2, 2, 1)
  1072. #endif
  1073. TESTATOB(ARGB, 4, 4, 1, RGBA, 4, 4, 1)
  1074. TESTATOB(ARGB, 4, 4, 1, UYVY, 2, 4, 1)
  1075. TESTATOB(ARGB, 4, 4, 1, YUY2, 2, 4, 1) // 4
  1076. TESTATOB(ARGB1555, 2, 2, 1, ARGB, 4, 4, 1)
  1077. TESTATOB(ARGB4444, 2, 2, 1, ARGB, 4, 4, 1)
  1078. TESTATOB(BGRA, 4, 4, 1, ARGB, 4, 4, 1)
  1079. TESTATOB(I400, 1, 1, 1, ARGB, 4, 4, 1)
  1080. TESTATOB(I400, 1, 1, 1, I400, 1, 1, 1)
  1081. TESTATOB(I400, 1, 1, 1, I400Mirror, 1, 1, 1)
  1082. TESTATOB(J400, 1, 1, 1, ARGB, 4, 4, 1)
  1083. TESTATOB(J400, 1, 1, 1, J400, 1, 1, 1)
  1084. TESTATOB(RAW, 3, 3, 1, ARGB, 4, 4, 1)
  1085. TESTATOB(RAW, 3, 3, 1, RGBA, 4, 4, 1)
  1086. TESTATOB(RAW, 3, 3, 1, RGB24, 3, 3, 1)
  1087. TESTATOB(RGB24, 3, 3, 1, ARGB, 4, 4, 1)
  1088. TESTATOB(RGB24, 3, 3, 1, J400, 1, 1, 1)
  1089. TESTATOB(RGB24, 3, 3, 1, RGB24Mirror, 3, 3, 1)
  1090. TESTATOB(RAW, 3, 3, 1, J400, 1, 1, 1)
  1091. #ifdef LITTLE_ENDIAN_ONLY_TEST
  1092. TESTATOB(RGB565, 2, 2, 1, ARGB, 4, 4, 1)
  1093. #endif
  1094. TESTATOB(RGBA, 4, 4, 1, ARGB, 4, 4, 1)
  1095. TESTATOB(UYVY, 2, 4, 1, ARGB, 4, 4, 1)
  1096. TESTATOB(YUY2, 2, 4, 1, ARGB, 4, 4, 1)
  1097. TESTATOB(YUY2, 2, 4, 1, Y, 1, 1, 1)
  1098. #define TESTATOBDI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1099. HEIGHT_B, W1280, N, NEG, OFF) \
  1100. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##Dither##N) { \
  1101. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  1102. const int kHeight = benchmark_height_; \
  1103. const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
  1104. const int kHeightB = (kHeight + HEIGHT_B - 1) / HEIGHT_B * HEIGHT_B; \
  1105. const int kStrideA = \
  1106. (kWidth * BPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
  1107. const int kStrideB = \
  1108. (kWidth * BPP_B + STRIDE_B - 1) / STRIDE_B * STRIDE_B; \
  1109. align_buffer_page_end(src_argb, kStrideA* kHeightA + OFF); \
  1110. align_buffer_page_end(dst_argb_c, kStrideB* kHeightB); \
  1111. align_buffer_page_end(dst_argb_opt, kStrideB* kHeightB); \
  1112. for (int i = 0; i < kStrideA * kHeightA; ++i) { \
  1113. src_argb[i + OFF] = (fastrand() & 0xff); \
  1114. } \
  1115. memset(dst_argb_c, 1, kStrideB* kHeightB); \
  1116. memset(dst_argb_opt, 101, kStrideB* kHeightB); \
  1117. MaskCpuFlags(disable_cpu_flags_); \
  1118. FMT_A##To##FMT_B##Dither(src_argb + OFF, kStrideA, dst_argb_c, kStrideB, \
  1119. NULL, kWidth, NEG kHeight); \
  1120. MaskCpuFlags(benchmark_cpu_info_); \
  1121. for (int i = 0; i < benchmark_iterations_; ++i) { \
  1122. FMT_A##To##FMT_B##Dither(src_argb + OFF, kStrideA, dst_argb_opt, \
  1123. kStrideB, NULL, kWidth, NEG kHeight); \
  1124. } \
  1125. for (int i = 0; i < kStrideB * kHeightB; ++i) { \
  1126. EXPECT_EQ(dst_argb_c[i], dst_argb_opt[i]); \
  1127. } \
  1128. free_aligned_buffer_page_end(src_argb); \
  1129. free_aligned_buffer_page_end(dst_argb_c); \
  1130. free_aligned_buffer_page_end(dst_argb_opt); \
  1131. }
  1132. #define TESTATOBDRANDOM(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, \
  1133. STRIDE_B, HEIGHT_B) \
  1134. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##Dither_Random) { \
  1135. for (int times = 0; times < benchmark_iterations_; ++times) { \
  1136. const int kWidth = (fastrand() & 63) + 1; \
  1137. const int kHeight = (fastrand() & 31) + 1; \
  1138. const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
  1139. const int kHeightB = (kHeight + HEIGHT_B - 1) / HEIGHT_B * HEIGHT_B; \
  1140. const int kStrideA = \
  1141. (kWidth * BPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
  1142. const int kStrideB = \
  1143. (kWidth * BPP_B + STRIDE_B - 1) / STRIDE_B * STRIDE_B; \
  1144. align_buffer_page_end(src_argb, kStrideA* kHeightA); \
  1145. align_buffer_page_end(dst_argb_c, kStrideB* kHeightB); \
  1146. align_buffer_page_end(dst_argb_opt, kStrideB* kHeightB); \
  1147. for (int i = 0; i < kStrideA * kHeightA; ++i) { \
  1148. src_argb[i] = (fastrand() & 0xff); \
  1149. } \
  1150. memset(dst_argb_c, 123, kStrideB* kHeightB); \
  1151. memset(dst_argb_opt, 123, kStrideB* kHeightB); \
  1152. MaskCpuFlags(disable_cpu_flags_); \
  1153. FMT_A##To##FMT_B##Dither(src_argb, kStrideA, dst_argb_c, kStrideB, NULL, \
  1154. kWidth, kHeight); \
  1155. MaskCpuFlags(benchmark_cpu_info_); \
  1156. FMT_A##To##FMT_B##Dither(src_argb, kStrideA, dst_argb_opt, kStrideB, \
  1157. NULL, kWidth, kHeight); \
  1158. for (int i = 0; i < kStrideB * kHeightB; ++i) { \
  1159. EXPECT_EQ(dst_argb_c[i], dst_argb_opt[i]); \
  1160. } \
  1161. free_aligned_buffer_page_end(src_argb); \
  1162. free_aligned_buffer_page_end(dst_argb_c); \
  1163. free_aligned_buffer_page_end(dst_argb_opt); \
  1164. } \
  1165. }
  1166. #define TESTATOBD(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1167. HEIGHT_B) \
  1168. TESTATOBDI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1169. HEIGHT_B, benchmark_width_ - 4, _Any, +, 0) \
  1170. TESTATOBDI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1171. HEIGHT_B, benchmark_width_, _Unaligned, +, 1) \
  1172. TESTATOBDI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1173. HEIGHT_B, benchmark_width_, _Invert, -, 0) \
  1174. TESTATOBDI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1175. HEIGHT_B, benchmark_width_, _Opt, +, 0) \
  1176. TESTATOBDRANDOM(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1177. HEIGHT_B)
  1178. #ifdef LITTLE_ENDIAN_ONLY_TEST
  1179. TESTATOBD(ARGB, 4, 4, 1, RGB565, 2, 2, 1)
  1180. #endif
  1181. #define TESTSYMI(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A, W1280, N, NEG, OFF) \
  1182. TEST_F(LibYUVConvertTest, FMT_ATOB##_Symetric##N) { \
  1183. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  1184. const int kHeight = benchmark_height_; \
  1185. const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
  1186. const int kStrideA = \
  1187. (kWidth * BPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
  1188. align_buffer_page_end(src_argb, kStrideA* kHeightA + OFF); \
  1189. align_buffer_page_end(dst_argb_c, kStrideA* kHeightA); \
  1190. align_buffer_page_end(dst_argb_opt, kStrideA* kHeightA); \
  1191. for (int i = 0; i < kStrideA * kHeightA; ++i) { \
  1192. src_argb[i + OFF] = (fastrand() & 0xff); \
  1193. } \
  1194. memset(dst_argb_c, 1, kStrideA* kHeightA); \
  1195. memset(dst_argb_opt, 101, kStrideA* kHeightA); \
  1196. MaskCpuFlags(disable_cpu_flags_); \
  1197. FMT_ATOB(src_argb + OFF, kStrideA, dst_argb_c, kStrideA, kWidth, \
  1198. NEG kHeight); \
  1199. MaskCpuFlags(benchmark_cpu_info_); \
  1200. for (int i = 0; i < benchmark_iterations_; ++i) { \
  1201. FMT_ATOB(src_argb + OFF, kStrideA, dst_argb_opt, kStrideA, kWidth, \
  1202. NEG kHeight); \
  1203. } \
  1204. MaskCpuFlags(disable_cpu_flags_); \
  1205. FMT_ATOB(dst_argb_c, kStrideA, dst_argb_c, kStrideA, kWidth, NEG kHeight); \
  1206. MaskCpuFlags(benchmark_cpu_info_); \
  1207. FMT_ATOB(dst_argb_opt, kStrideA, dst_argb_opt, kStrideA, kWidth, \
  1208. NEG kHeight); \
  1209. for (int i = 0; i < kStrideA * kHeightA; ++i) { \
  1210. EXPECT_EQ(src_argb[i + OFF], dst_argb_opt[i]); \
  1211. EXPECT_EQ(dst_argb_c[i], dst_argb_opt[i]); \
  1212. } \
  1213. free_aligned_buffer_page_end(src_argb); \
  1214. free_aligned_buffer_page_end(dst_argb_c); \
  1215. free_aligned_buffer_page_end(dst_argb_opt); \
  1216. }
  1217. #define TESTSYM(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A) \
  1218. TESTSYMI(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A, benchmark_width_ - 4, _Any, +, \
  1219. 0) \
  1220. TESTSYMI(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A, benchmark_width_, _Unaligned, \
  1221. +, 1) \
  1222. TESTSYMI(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A, benchmark_width_, _Opt, +, 0)
  1223. TESTSYM(ARGBToARGB, 4, 4, 1)
  1224. TESTSYM(ARGBToBGRA, 4, 4, 1)
  1225. TESTSYM(ARGBToABGR, 4, 4, 1)
  1226. TESTSYM(BGRAToARGB, 4, 4, 1)
  1227. TESTSYM(ABGRToARGB, 4, 4, 1)
  1228. TEST_F(LibYUVConvertTest, Test565) {
  1229. SIMD_ALIGNED(uint8_t orig_pixels[256][4]);
  1230. SIMD_ALIGNED(uint8_t pixels565[256][2]);
  1231. for (int i = 0; i < 256; ++i) {
  1232. for (int j = 0; j < 4; ++j) {
  1233. orig_pixels[i][j] = i;
  1234. }
  1235. }
  1236. ARGBToRGB565(&orig_pixels[0][0], 0, &pixels565[0][0], 0, 256, 1);
  1237. uint32_t checksum = HashDjb2(&pixels565[0][0], sizeof(pixels565), 5381);
  1238. EXPECT_EQ(610919429u, checksum);
  1239. }
  1240. #ifdef HAVE_JPEG
  1241. TEST_F(LibYUVConvertTest, ValidateJpeg) {
  1242. const int kOff = 10;
  1243. const int kMinJpeg = 64;
  1244. const int kImageSize = benchmark_width_ * benchmark_height_ >= kMinJpeg
  1245. ? benchmark_width_ * benchmark_height_
  1246. : kMinJpeg;
  1247. const int kSize = kImageSize + kOff;
  1248. align_buffer_page_end(orig_pixels, kSize);
  1249. // No SOI or EOI. Expect fail.
  1250. memset(orig_pixels, 0, kSize);
  1251. EXPECT_FALSE(ValidateJpeg(orig_pixels, kSize));
  1252. // Test special value that matches marker start.
  1253. memset(orig_pixels, 0xff, kSize);
  1254. EXPECT_FALSE(ValidateJpeg(orig_pixels, kSize));
  1255. // EOI, SOI. Expect pass.
  1256. orig_pixels[0] = 0xff;
  1257. orig_pixels[1] = 0xd8; // SOI.
  1258. orig_pixels[2] = 0xff;
  1259. orig_pixels[kSize - kOff + 0] = 0xff;
  1260. orig_pixels[kSize - kOff + 1] = 0xd9; // EOI.
  1261. for (int times = 0; times < benchmark_iterations_; ++times) {
  1262. EXPECT_TRUE(ValidateJpeg(orig_pixels, kSize));
  1263. }
  1264. free_aligned_buffer_page_end(orig_pixels);
  1265. }
  1266. TEST_F(LibYUVConvertTest, ValidateJpegLarge) {
  1267. const int kOff = 10;
  1268. const int kMinJpeg = 64;
  1269. const int kImageSize = benchmark_width_ * benchmark_height_ >= kMinJpeg
  1270. ? benchmark_width_ * benchmark_height_
  1271. : kMinJpeg;
  1272. const int kSize = kImageSize + kOff;
  1273. const int kMultiple = 10;
  1274. const int kBufSize = kImageSize * kMultiple + kOff;
  1275. align_buffer_page_end(orig_pixels, kBufSize);
  1276. // No SOI or EOI. Expect fail.
  1277. memset(orig_pixels, 0, kBufSize);
  1278. EXPECT_FALSE(ValidateJpeg(orig_pixels, kBufSize));
  1279. // EOI, SOI. Expect pass.
  1280. orig_pixels[0] = 0xff;
  1281. orig_pixels[1] = 0xd8; // SOI.
  1282. orig_pixels[2] = 0xff;
  1283. orig_pixels[kSize - kOff + 0] = 0xff;
  1284. orig_pixels[kSize - kOff + 1] = 0xd9; // EOI.
  1285. for (int times = 0; times < benchmark_iterations_; ++times) {
  1286. EXPECT_TRUE(ValidateJpeg(orig_pixels, kBufSize));
  1287. }
  1288. free_aligned_buffer_page_end(orig_pixels);
  1289. }
  1290. TEST_F(LibYUVConvertTest, InvalidateJpeg) {
  1291. const int kOff = 10;
  1292. const int kMinJpeg = 64;
  1293. const int kImageSize = benchmark_width_ * benchmark_height_ >= kMinJpeg
  1294. ? benchmark_width_ * benchmark_height_
  1295. : kMinJpeg;
  1296. const int kSize = kImageSize + kOff;
  1297. align_buffer_page_end(orig_pixels, kSize);
  1298. // NULL pointer. Expect fail.
  1299. EXPECT_FALSE(ValidateJpeg(NULL, kSize));
  1300. // Negative size. Expect fail.
  1301. EXPECT_FALSE(ValidateJpeg(orig_pixels, -1));
  1302. // Too large size. Expect fail.
  1303. EXPECT_FALSE(ValidateJpeg(orig_pixels, 0xfb000000ull));
  1304. // No SOI or EOI. Expect fail.
  1305. memset(orig_pixels, 0, kSize);
  1306. EXPECT_FALSE(ValidateJpeg(orig_pixels, kSize));
  1307. // SOI but no EOI. Expect fail.
  1308. orig_pixels[0] = 0xff;
  1309. orig_pixels[1] = 0xd8; // SOI.
  1310. orig_pixels[2] = 0xff;
  1311. for (int times = 0; times < benchmark_iterations_; ++times) {
  1312. EXPECT_FALSE(ValidateJpeg(orig_pixels, kSize));
  1313. }
  1314. // EOI but no SOI. Expect fail.
  1315. orig_pixels[0] = 0;
  1316. orig_pixels[1] = 0;
  1317. orig_pixels[kSize - kOff + 0] = 0xff;
  1318. orig_pixels[kSize - kOff + 1] = 0xd9; // EOI.
  1319. EXPECT_FALSE(ValidateJpeg(orig_pixels, kSize));
  1320. free_aligned_buffer_page_end(orig_pixels);
  1321. }
  1322. TEST_F(LibYUVConvertTest, FuzzJpeg) {
  1323. // SOI but no EOI. Expect fail.
  1324. for (int times = 0; times < benchmark_iterations_; ++times) {
  1325. const int kSize = fastrand() % 5000 + 3;
  1326. align_buffer_page_end(orig_pixels, kSize);
  1327. MemRandomize(orig_pixels, kSize);
  1328. // Add SOI so frame will be scanned.
  1329. orig_pixels[0] = 0xff;
  1330. orig_pixels[1] = 0xd8; // SOI.
  1331. orig_pixels[2] = 0xff;
  1332. orig_pixels[kSize - 1] = 0xff;
  1333. ValidateJpeg(orig_pixels,
  1334. kSize); // Failure normally expected.
  1335. free_aligned_buffer_page_end(orig_pixels);
  1336. }
  1337. }
  1338. // Test data created in GIMP. In export jpeg, disable
  1339. // thumbnails etc, choose a subsampling, and use low quality
  1340. // (50) to keep size small. Generated with xxd -i test.jpg
  1341. // test 0 is J400
  1342. static const uint8_t kTest0Jpg[] = {
  1343. 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01,
  1344. 0x01, 0x01, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0xff, 0xdb, 0x00, 0x43,
  1345. 0x00, 0x10, 0x0b, 0x0c, 0x0e, 0x0c, 0x0a, 0x10, 0x0e, 0x0d, 0x0e, 0x12,
  1346. 0x11, 0x10, 0x13, 0x18, 0x28, 0x1a, 0x18, 0x16, 0x16, 0x18, 0x31, 0x23,
  1347. 0x25, 0x1d, 0x28, 0x3a, 0x33, 0x3d, 0x3c, 0x39, 0x33, 0x38, 0x37, 0x40,
  1348. 0x48, 0x5c, 0x4e, 0x40, 0x44, 0x57, 0x45, 0x37, 0x38, 0x50, 0x6d, 0x51,
  1349. 0x57, 0x5f, 0x62, 0x67, 0x68, 0x67, 0x3e, 0x4d, 0x71, 0x79, 0x70, 0x64,
  1350. 0x78, 0x5c, 0x65, 0x67, 0x63, 0xff, 0xc2, 0x00, 0x0b, 0x08, 0x00, 0x10,
  1351. 0x00, 0x20, 0x01, 0x01, 0x11, 0x00, 0xff, 0xc4, 0x00, 0x17, 0x00, 0x01,
  1352. 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1353. 0x00, 0x00, 0x00, 0x03, 0x04, 0x01, 0x02, 0xff, 0xda, 0x00, 0x08, 0x01,
  1354. 0x01, 0x00, 0x00, 0x00, 0x01, 0x43, 0x7e, 0xa7, 0x97, 0x57, 0xff, 0xc4,
  1355. 0x00, 0x1b, 0x10, 0x00, 0x03, 0x00, 0x02, 0x03, 0x00, 0x00, 0x00, 0x00,
  1356. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x11, 0x00, 0x03,
  1357. 0x10, 0x12, 0x13, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x05,
  1358. 0x02, 0x3b, 0xc0, 0x6f, 0x66, 0x76, 0x56, 0x23, 0x87, 0x99, 0x0d, 0x26,
  1359. 0x62, 0xf6, 0xbf, 0xff, 0xc4, 0x00, 0x1e, 0x10, 0x00, 0x02, 0x01, 0x03,
  1360. 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1361. 0x00, 0x11, 0x21, 0x02, 0x12, 0x32, 0x10, 0x31, 0x71, 0x81, 0xa1, 0xff,
  1362. 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x06, 0x3f, 0x02, 0x4b, 0xb3, 0x28,
  1363. 0x32, 0xd2, 0xed, 0xf9, 0x1d, 0x3e, 0x13, 0x51, 0x73, 0x83, 0xff, 0xc4,
  1364. 0x00, 0x1c, 0x10, 0x01, 0x01, 0x01, 0x00, 0x02, 0x03, 0x01, 0x00, 0x00,
  1365. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x11, 0x00, 0x21, 0x51,
  1366. 0x31, 0x61, 0x81, 0xf0, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01,
  1367. 0x3f, 0x21, 0x65, 0x6e, 0x31, 0x86, 0x28, 0xf9, 0x30, 0xdc, 0x27, 0xdb,
  1368. 0xa9, 0x01, 0xf3, 0xde, 0x02, 0xa0, 0xed, 0x1e, 0x34, 0x68, 0x23, 0xf9,
  1369. 0xc6, 0x48, 0x5d, 0x7a, 0x35, 0x02, 0xf5, 0x6f, 0xff, 0xda, 0x00, 0x08,
  1370. 0x01, 0x01, 0x00, 0x00, 0x00, 0x10, 0x35, 0xff, 0xc4, 0x00, 0x1f, 0x10,
  1371. 0x01, 0x00, 0x02, 0x01, 0x04, 0x03, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
  1372. 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x11, 0x31, 0x41, 0x61, 0x71, 0x91,
  1373. 0x21, 0x81, 0xd1, 0xb1, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01,
  1374. 0x3f, 0x10, 0x0b, 0x30, 0xe9, 0x58, 0xbe, 0x1a, 0xfd, 0x88, 0xab, 0x8b,
  1375. 0x34, 0x74, 0x80, 0x4b, 0xb5, 0xd5, 0xab, 0xcd, 0x46, 0x96, 0x2e, 0xec,
  1376. 0xbd, 0xaa, 0x78, 0x47, 0x5c, 0x47, 0xa7, 0x30, 0x49, 0xad, 0x88, 0x7c,
  1377. 0x40, 0x74, 0x30, 0xff, 0x00, 0x23, 0x1d, 0x03, 0x0b, 0xb7, 0xd4, 0xff,
  1378. 0xd9};
  1379. static const size_t kTest0JpgLen = 421;
  1380. // test 1 is J444
  1381. static const uint8_t kTest1Jpg[] = {
  1382. 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01,
  1383. 0x01, 0x01, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0xff, 0xdb, 0x00, 0x43,
  1384. 0x00, 0x10, 0x0b, 0x0c, 0x0e, 0x0c, 0x0a, 0x10, 0x0e, 0x0d, 0x0e, 0x12,
  1385. 0x11, 0x10, 0x13, 0x18, 0x28, 0x1a, 0x18, 0x16, 0x16, 0x18, 0x31, 0x23,
  1386. 0x25, 0x1d, 0x28, 0x3a, 0x33, 0x3d, 0x3c, 0x39, 0x33, 0x38, 0x37, 0x40,
  1387. 0x48, 0x5c, 0x4e, 0x40, 0x44, 0x57, 0x45, 0x37, 0x38, 0x50, 0x6d, 0x51,
  1388. 0x57, 0x5f, 0x62, 0x67, 0x68, 0x67, 0x3e, 0x4d, 0x71, 0x79, 0x70, 0x64,
  1389. 0x78, 0x5c, 0x65, 0x67, 0x63, 0xff, 0xdb, 0x00, 0x43, 0x01, 0x11, 0x12,
  1390. 0x12, 0x18, 0x15, 0x18, 0x2f, 0x1a, 0x1a, 0x2f, 0x63, 0x42, 0x38, 0x42,
  1391. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1392. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1393. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1394. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1395. 0x63, 0x63, 0xff, 0xc2, 0x00, 0x11, 0x08, 0x00, 0x10, 0x00, 0x20, 0x03,
  1396. 0x01, 0x11, 0x00, 0x02, 0x11, 0x01, 0x03, 0x11, 0x01, 0xff, 0xc4, 0x00,
  1397. 0x17, 0x00, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1398. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x04, 0x01, 0x02, 0xff, 0xc4,
  1399. 0x00, 0x16, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1400. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x01, 0x03, 0xff, 0xda,
  1401. 0x00, 0x0c, 0x03, 0x01, 0x00, 0x02, 0x10, 0x03, 0x10, 0x00, 0x00, 0x01,
  1402. 0x40, 0x8f, 0x26, 0xe8, 0xf4, 0xcc, 0xf9, 0x69, 0x2b, 0x1b, 0x2a, 0xcb,
  1403. 0xff, 0xc4, 0x00, 0x1b, 0x10, 0x00, 0x03, 0x00, 0x02, 0x03, 0x00, 0x00,
  1404. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x11,
  1405. 0x00, 0x03, 0x10, 0x12, 0x13, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00,
  1406. 0x01, 0x05, 0x02, 0x3b, 0x80, 0x6f, 0x56, 0x76, 0x56, 0x23, 0x87, 0x99,
  1407. 0x0d, 0x26, 0x62, 0xf6, 0xbf, 0xff, 0xc4, 0x00, 0x19, 0x11, 0x01, 0x00,
  1408. 0x03, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1409. 0x00, 0x00, 0x01, 0x00, 0x10, 0x11, 0x02, 0x12, 0xff, 0xda, 0x00, 0x08,
  1410. 0x01, 0x03, 0x01, 0x01, 0x3f, 0x01, 0xf1, 0x00, 0x27, 0x45, 0xbb, 0x31,
  1411. 0xaf, 0xff, 0xc4, 0x00, 0x1a, 0x11, 0x00, 0x02, 0x03, 0x01, 0x01, 0x00,
  1412. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  1413. 0x02, 0x10, 0x11, 0x41, 0x12, 0xff, 0xda, 0x00, 0x08, 0x01, 0x02, 0x01,
  1414. 0x01, 0x3f, 0x01, 0xf6, 0x4b, 0x5f, 0x48, 0xb3, 0x69, 0x63, 0x35, 0x72,
  1415. 0xbf, 0xff, 0xc4, 0x00, 0x1e, 0x10, 0x00, 0x02, 0x01, 0x03, 0x05, 0x00,
  1416. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11,
  1417. 0x21, 0x02, 0x12, 0x32, 0x10, 0x31, 0x71, 0x81, 0xa1, 0xff, 0xda, 0x00,
  1418. 0x08, 0x01, 0x01, 0x00, 0x06, 0x3f, 0x02, 0x4b, 0xb3, 0x28, 0x32, 0xd2,
  1419. 0xed, 0xf9, 0x1d, 0x3e, 0x13, 0x51, 0x73, 0x83, 0xff, 0xc4, 0x00, 0x1c,
  1420. 0x10, 0x01, 0x01, 0x01, 0x00, 0x02, 0x03, 0x01, 0x00, 0x00, 0x00, 0x00,
  1421. 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x11, 0x00, 0x21, 0x51, 0x31, 0x61,
  1422. 0x81, 0xf0, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x3f, 0x21,
  1423. 0x75, 0x6e, 0x31, 0x94, 0x28, 0xf9, 0x30, 0xdc, 0x27, 0xdb, 0xa9, 0x01,
  1424. 0xf3, 0xde, 0x02, 0xa0, 0xed, 0x1e, 0x34, 0x68, 0x23, 0xf9, 0xc6, 0x48,
  1425. 0x5d, 0x7a, 0x35, 0x02, 0xf5, 0x6f, 0xff, 0xda, 0x00, 0x0c, 0x03, 0x01,
  1426. 0x00, 0x02, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x26, 0x61, 0xd4, 0xff,
  1427. 0xc4, 0x00, 0x1a, 0x11, 0x00, 0x03, 0x01, 0x00, 0x03, 0x00, 0x00, 0x00,
  1428. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x11, 0x21,
  1429. 0x31, 0x41, 0x51, 0xff, 0xda, 0x00, 0x08, 0x01, 0x03, 0x01, 0x01, 0x3f,
  1430. 0x10, 0x54, 0xa8, 0xbf, 0x50, 0x87, 0xb0, 0x9d, 0x8b, 0xc4, 0x6a, 0x26,
  1431. 0x6b, 0x2a, 0x9c, 0x1f, 0xff, 0xc4, 0x00, 0x18, 0x11, 0x01, 0x01, 0x01,
  1432. 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1433. 0x00, 0x01, 0x00, 0x11, 0x21, 0x51, 0xff, 0xda, 0x00, 0x08, 0x01, 0x02,
  1434. 0x01, 0x01, 0x3f, 0x10, 0x70, 0xe1, 0x3e, 0xd1, 0x8e, 0x0d, 0xe1, 0xb5,
  1435. 0xd5, 0x91, 0x76, 0x43, 0x82, 0x45, 0x4c, 0x7b, 0x7f, 0xff, 0xc4, 0x00,
  1436. 0x1f, 0x10, 0x01, 0x00, 0x02, 0x01, 0x04, 0x03, 0x01, 0x00, 0x00, 0x00,
  1437. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x11, 0x31, 0x41, 0x61,
  1438. 0x71, 0x91, 0x21, 0x81, 0xd1, 0xb1, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01,
  1439. 0x00, 0x01, 0x3f, 0x10, 0x1b, 0x30, 0xe9, 0x58, 0xbe, 0x1a, 0xfd, 0x8a,
  1440. 0xeb, 0x8b, 0x34, 0x74, 0x80, 0x4b, 0xb5, 0xd5, 0xab, 0xcd, 0x46, 0x96,
  1441. 0x2e, 0xec, 0xbd, 0xaa, 0x78, 0x47, 0x5c, 0x47, 0xa7, 0x30, 0x49, 0xad,
  1442. 0x88, 0x7c, 0x40, 0x74, 0x30, 0xff, 0x00, 0x23, 0x1d, 0x03, 0x0b, 0xb7,
  1443. 0xd4, 0xff, 0xd9};
  1444. static const size_t kTest1JpgLen = 735;
  1445. // test 2 is J420
  1446. static const uint8_t kTest2Jpg[] = {
  1447. 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01,
  1448. 0x01, 0x01, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0xff, 0xdb, 0x00, 0x43,
  1449. 0x00, 0x10, 0x0b, 0x0c, 0x0e, 0x0c, 0x0a, 0x10, 0x0e, 0x0d, 0x0e, 0x12,
  1450. 0x11, 0x10, 0x13, 0x18, 0x28, 0x1a, 0x18, 0x16, 0x16, 0x18, 0x31, 0x23,
  1451. 0x25, 0x1d, 0x28, 0x3a, 0x33, 0x3d, 0x3c, 0x39, 0x33, 0x38, 0x37, 0x40,
  1452. 0x48, 0x5c, 0x4e, 0x40, 0x44, 0x57, 0x45, 0x37, 0x38, 0x50, 0x6d, 0x51,
  1453. 0x57, 0x5f, 0x62, 0x67, 0x68, 0x67, 0x3e, 0x4d, 0x71, 0x79, 0x70, 0x64,
  1454. 0x78, 0x5c, 0x65, 0x67, 0x63, 0xff, 0xdb, 0x00, 0x43, 0x01, 0x11, 0x12,
  1455. 0x12, 0x18, 0x15, 0x18, 0x2f, 0x1a, 0x1a, 0x2f, 0x63, 0x42, 0x38, 0x42,
  1456. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1457. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1458. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1459. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1460. 0x63, 0x63, 0xff, 0xc2, 0x00, 0x11, 0x08, 0x00, 0x10, 0x00, 0x20, 0x03,
  1461. 0x01, 0x22, 0x00, 0x02, 0x11, 0x01, 0x03, 0x11, 0x01, 0xff, 0xc4, 0x00,
  1462. 0x18, 0x00, 0x00, 0x02, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1463. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x05, 0x01, 0x02, 0x04, 0xff,
  1464. 0xc4, 0x00, 0x16, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
  1465. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x01, 0x02, 0xff,
  1466. 0xda, 0x00, 0x0c, 0x03, 0x01, 0x00, 0x02, 0x10, 0x03, 0x10, 0x00, 0x00,
  1467. 0x01, 0x20, 0xe7, 0x28, 0xa3, 0x0b, 0x2e, 0x2d, 0xcf, 0xff, 0xc4, 0x00,
  1468. 0x1b, 0x10, 0x00, 0x03, 0x00, 0x02, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
  1469. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x11, 0x00, 0x03, 0x10,
  1470. 0x12, 0x13, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x05, 0x02,
  1471. 0x3b, 0x80, 0x6f, 0x56, 0x76, 0x56, 0x23, 0x87, 0x99, 0x0d, 0x26, 0x62,
  1472. 0xf6, 0xbf, 0xff, 0xc4, 0x00, 0x17, 0x11, 0x01, 0x00, 0x03, 0x00, 0x00,
  1473. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1474. 0x01, 0x11, 0x21, 0xff, 0xda, 0x00, 0x08, 0x01, 0x03, 0x01, 0x01, 0x3f,
  1475. 0x01, 0xc8, 0x53, 0xff, 0xc4, 0x00, 0x16, 0x11, 0x01, 0x01, 0x01, 0x00,
  1476. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1477. 0x00, 0x11, 0x32, 0xff, 0xda, 0x00, 0x08, 0x01, 0x02, 0x01, 0x01, 0x3f,
  1478. 0x01, 0xd2, 0xc7, 0xff, 0xc4, 0x00, 0x1e, 0x10, 0x00, 0x02, 0x01, 0x03,
  1479. 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1480. 0x00, 0x11, 0x21, 0x02, 0x12, 0x32, 0x10, 0x31, 0x71, 0x81, 0xa1, 0xff,
  1481. 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x06, 0x3f, 0x02, 0x4b, 0xb3, 0x28,
  1482. 0x32, 0xd2, 0xed, 0xf9, 0x1d, 0x3e, 0x13, 0x51, 0x73, 0x83, 0xff, 0xc4,
  1483. 0x00, 0x1c, 0x10, 0x01, 0x01, 0x01, 0x00, 0x02, 0x03, 0x01, 0x00, 0x00,
  1484. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x11, 0x00, 0x21, 0x51,
  1485. 0x31, 0x61, 0x81, 0xf0, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01,
  1486. 0x3f, 0x21, 0x75, 0x6e, 0x31, 0x94, 0x28, 0xf9, 0x30, 0xdc, 0x27, 0xdb,
  1487. 0xa9, 0x01, 0xf3, 0xde, 0x02, 0xa0, 0xed, 0x1e, 0x34, 0x68, 0x23, 0xf9,
  1488. 0xc6, 0x48, 0x5d, 0x7a, 0x35, 0x02, 0xf5, 0x6f, 0xff, 0xda, 0x00, 0x0c,
  1489. 0x03, 0x01, 0x00, 0x02, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x13, 0x5f,
  1490. 0xff, 0xc4, 0x00, 0x17, 0x11, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00,
  1491. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x11,
  1492. 0x21, 0xff, 0xda, 0x00, 0x08, 0x01, 0x03, 0x01, 0x01, 0x3f, 0x10, 0x0e,
  1493. 0xa1, 0x3a, 0x76, 0xff, 0xc4, 0x00, 0x17, 0x11, 0x01, 0x01, 0x01, 0x01,
  1494. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1495. 0x01, 0x00, 0x21, 0x11, 0xff, 0xda, 0x00, 0x08, 0x01, 0x02, 0x01, 0x01,
  1496. 0x3f, 0x10, 0x57, 0x0b, 0x08, 0x70, 0xdb, 0xff, 0xc4, 0x00, 0x1f, 0x10,
  1497. 0x01, 0x00, 0x02, 0x01, 0x04, 0x03, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
  1498. 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x11, 0x31, 0x41, 0x61, 0x71, 0x91,
  1499. 0x21, 0x81, 0xd1, 0xb1, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01,
  1500. 0x3f, 0x10, 0x1b, 0x30, 0xe9, 0x58, 0xbe, 0x1a, 0xfd, 0x8a, 0xeb, 0x8b,
  1501. 0x34, 0x74, 0x80, 0x4b, 0xb5, 0xd5, 0xab, 0xcd, 0x46, 0x96, 0x2e, 0xec,
  1502. 0xbd, 0xaa, 0x78, 0x47, 0x5c, 0x47, 0xa7, 0x30, 0x49, 0xad, 0x88, 0x7c,
  1503. 0x40, 0x74, 0x30, 0xff, 0x00, 0x23, 0x1d, 0x03, 0x0b, 0xb7, 0xd4, 0xff,
  1504. 0xd9};
  1505. static const size_t kTest2JpgLen = 685;
  1506. // test 3 is J422
  1507. static const uint8_t kTest3Jpg[] = {
  1508. 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01,
  1509. 0x01, 0x01, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0xff, 0xdb, 0x00, 0x43,
  1510. 0x00, 0x10, 0x0b, 0x0c, 0x0e, 0x0c, 0x0a, 0x10, 0x0e, 0x0d, 0x0e, 0x12,
  1511. 0x11, 0x10, 0x13, 0x18, 0x28, 0x1a, 0x18, 0x16, 0x16, 0x18, 0x31, 0x23,
  1512. 0x25, 0x1d, 0x28, 0x3a, 0x33, 0x3d, 0x3c, 0x39, 0x33, 0x38, 0x37, 0x40,
  1513. 0x48, 0x5c, 0x4e, 0x40, 0x44, 0x57, 0x45, 0x37, 0x38, 0x50, 0x6d, 0x51,
  1514. 0x57, 0x5f, 0x62, 0x67, 0x68, 0x67, 0x3e, 0x4d, 0x71, 0x79, 0x70, 0x64,
  1515. 0x78, 0x5c, 0x65, 0x67, 0x63, 0xff, 0xdb, 0x00, 0x43, 0x01, 0x11, 0x12,
  1516. 0x12, 0x18, 0x15, 0x18, 0x2f, 0x1a, 0x1a, 0x2f, 0x63, 0x42, 0x38, 0x42,
  1517. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1518. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1519. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1520. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1521. 0x63, 0x63, 0xff, 0xc2, 0x00, 0x11, 0x08, 0x00, 0x10, 0x00, 0x20, 0x03,
  1522. 0x01, 0x21, 0x00, 0x02, 0x11, 0x01, 0x03, 0x11, 0x01, 0xff, 0xc4, 0x00,
  1523. 0x17, 0x00, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1524. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x04, 0x01, 0x02, 0xff, 0xc4,
  1525. 0x00, 0x17, 0x01, 0x00, 0x03, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1526. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03, 0x00, 0xff,
  1527. 0xda, 0x00, 0x0c, 0x03, 0x01, 0x00, 0x02, 0x10, 0x03, 0x10, 0x00, 0x00,
  1528. 0x01, 0x43, 0x8d, 0x1f, 0xa2, 0xb3, 0xca, 0x1b, 0x57, 0x0f, 0xff, 0xc4,
  1529. 0x00, 0x1b, 0x10, 0x00, 0x03, 0x00, 0x02, 0x03, 0x00, 0x00, 0x00, 0x00,
  1530. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x11, 0x00, 0x03,
  1531. 0x10, 0x12, 0x13, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x05,
  1532. 0x02, 0x3b, 0x80, 0x6f, 0x56, 0x76, 0x56, 0x23, 0x87, 0x99, 0x0d, 0x26,
  1533. 0x62, 0xf6, 0xbf, 0xff, 0xc4, 0x00, 0x19, 0x11, 0x00, 0x02, 0x03, 0x01,
  1534. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1535. 0x00, 0x01, 0x02, 0x10, 0x11, 0x21, 0xff, 0xda, 0x00, 0x08, 0x01, 0x03,
  1536. 0x01, 0x01, 0x3f, 0x01, 0x51, 0xce, 0x8c, 0x75, 0xff, 0xc4, 0x00, 0x18,
  1537. 0x11, 0x00, 0x03, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1538. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x61, 0x21, 0xff, 0xda,
  1539. 0x00, 0x08, 0x01, 0x02, 0x01, 0x01, 0x3f, 0x01, 0xa6, 0xd9, 0x2f, 0x84,
  1540. 0xe8, 0xf0, 0xff, 0xc4, 0x00, 0x1e, 0x10, 0x00, 0x02, 0x01, 0x03, 0x05,
  1541. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1542. 0x11, 0x21, 0x02, 0x12, 0x32, 0x10, 0x31, 0x71, 0x81, 0xa1, 0xff, 0xda,
  1543. 0x00, 0x08, 0x01, 0x01, 0x00, 0x06, 0x3f, 0x02, 0x4b, 0xb3, 0x28, 0x32,
  1544. 0xd2, 0xed, 0xf9, 0x1d, 0x3e, 0x13, 0x51, 0x73, 0x83, 0xff, 0xc4, 0x00,
  1545. 0x1c, 0x10, 0x01, 0x01, 0x01, 0x00, 0x02, 0x03, 0x01, 0x00, 0x00, 0x00,
  1546. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x11, 0x00, 0x21, 0x51, 0x31,
  1547. 0x61, 0x81, 0xf0, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x3f,
  1548. 0x21, 0x75, 0x6e, 0x31, 0x94, 0x28, 0xf9, 0x30, 0xdc, 0x27, 0xdb, 0xa9,
  1549. 0x01, 0xf3, 0xde, 0x02, 0xa0, 0xed, 0x1e, 0x34, 0x68, 0x23, 0xf9, 0xc6,
  1550. 0x48, 0x5d, 0x7a, 0x35, 0x02, 0xf5, 0x6f, 0xff, 0xda, 0x00, 0x0c, 0x03,
  1551. 0x01, 0x00, 0x02, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x2e, 0x45, 0xff,
  1552. 0xc4, 0x00, 0x18, 0x11, 0x00, 0x03, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00,
  1553. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x11, 0x21,
  1554. 0x31, 0xff, 0xda, 0x00, 0x08, 0x01, 0x03, 0x01, 0x01, 0x3f, 0x10, 0x53,
  1555. 0x50, 0xba, 0x54, 0xc1, 0x67, 0x4f, 0xff, 0xc4, 0x00, 0x18, 0x11, 0x00,
  1556. 0x03, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1557. 0x00, 0x00, 0x00, 0x01, 0x11, 0x21, 0x00, 0x10, 0xff, 0xda, 0x00, 0x08,
  1558. 0x01, 0x02, 0x01, 0x01, 0x3f, 0x10, 0x18, 0x81, 0x5c, 0x04, 0x1a, 0xca,
  1559. 0x91, 0xbf, 0xff, 0xc4, 0x00, 0x1f, 0x10, 0x01, 0x00, 0x02, 0x01, 0x04,
  1560. 0x03, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  1561. 0x00, 0x11, 0x31, 0x41, 0x61, 0x71, 0x91, 0x21, 0x81, 0xd1, 0xb1, 0xff,
  1562. 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x3f, 0x10, 0x1b, 0x30, 0xe9,
  1563. 0x58, 0xbe, 0x1a, 0xfd, 0x8a, 0xeb, 0x8b, 0x34, 0x74, 0x80, 0x4b, 0xb5,
  1564. 0xd5, 0xab, 0xcd, 0x46, 0x96, 0x2e, 0xec, 0xbd, 0xaa, 0x78, 0x47, 0x5c,
  1565. 0x47, 0xa7, 0x30, 0x49, 0xad, 0x88, 0x7c, 0x40, 0x74, 0x30, 0xff, 0x00,
  1566. 0x23, 0x1d, 0x03, 0x0b, 0xb7, 0xd4, 0xff, 0xd9};
  1567. static const size_t kTest3JpgLen = 704;
  1568. // test 4 is J422 vertical - not supported
  1569. static const uint8_t kTest4Jpg[] = {
  1570. 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01,
  1571. 0x01, 0x01, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0xff, 0xdb, 0x00, 0x43,
  1572. 0x00, 0x10, 0x0b, 0x0c, 0x0e, 0x0c, 0x0a, 0x10, 0x0e, 0x0d, 0x0e, 0x12,
  1573. 0x11, 0x10, 0x13, 0x18, 0x28, 0x1a, 0x18, 0x16, 0x16, 0x18, 0x31, 0x23,
  1574. 0x25, 0x1d, 0x28, 0x3a, 0x33, 0x3d, 0x3c, 0x39, 0x33, 0x38, 0x37, 0x40,
  1575. 0x48, 0x5c, 0x4e, 0x40, 0x44, 0x57, 0x45, 0x37, 0x38, 0x50, 0x6d, 0x51,
  1576. 0x57, 0x5f, 0x62, 0x67, 0x68, 0x67, 0x3e, 0x4d, 0x71, 0x79, 0x70, 0x64,
  1577. 0x78, 0x5c, 0x65, 0x67, 0x63, 0xff, 0xdb, 0x00, 0x43, 0x01, 0x11, 0x12,
  1578. 0x12, 0x18, 0x15, 0x18, 0x2f, 0x1a, 0x1a, 0x2f, 0x63, 0x42, 0x38, 0x42,
  1579. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1580. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1581. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1582. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1583. 0x63, 0x63, 0xff, 0xc2, 0x00, 0x11, 0x08, 0x00, 0x10, 0x00, 0x20, 0x03,
  1584. 0x01, 0x12, 0x00, 0x02, 0x11, 0x01, 0x03, 0x11, 0x01, 0xff, 0xc4, 0x00,
  1585. 0x18, 0x00, 0x00, 0x02, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1586. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x05, 0x01, 0x02, 0x03, 0xff,
  1587. 0xc4, 0x00, 0x16, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
  1588. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x03, 0xff,
  1589. 0xda, 0x00, 0x0c, 0x03, 0x01, 0x00, 0x02, 0x10, 0x03, 0x10, 0x00, 0x00,
  1590. 0x01, 0xd2, 0x98, 0xe9, 0x03, 0x0c, 0x00, 0x46, 0x21, 0xd9, 0xff, 0xc4,
  1591. 0x00, 0x1b, 0x10, 0x00, 0x03, 0x00, 0x02, 0x03, 0x00, 0x00, 0x00, 0x00,
  1592. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x11, 0x00, 0x03,
  1593. 0x10, 0x12, 0x13, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x05,
  1594. 0x02, 0x3b, 0x80, 0x6f, 0x56, 0x76, 0x56, 0x23, 0x87, 0x99, 0x0d, 0x26,
  1595. 0x62, 0xf6, 0xbf, 0xff, 0xc4, 0x00, 0x17, 0x11, 0x01, 0x01, 0x01, 0x01,
  1596. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1597. 0x00, 0x11, 0x01, 0x21, 0xff, 0xda, 0x00, 0x08, 0x01, 0x03, 0x01, 0x01,
  1598. 0x3f, 0x01, 0x98, 0xb1, 0xbd, 0x47, 0xff, 0xc4, 0x00, 0x18, 0x11, 0x00,
  1599. 0x03, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1600. 0x00, 0x00, 0x00, 0x00, 0x01, 0x12, 0x11, 0x21, 0xff, 0xda, 0x00, 0x08,
  1601. 0x01, 0x02, 0x01, 0x01, 0x3f, 0x01, 0xb6, 0x35, 0xa2, 0xe1, 0x47, 0xff,
  1602. 0xc4, 0x00, 0x1e, 0x10, 0x00, 0x02, 0x01, 0x03, 0x05, 0x00, 0x00, 0x00,
  1603. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11, 0x21, 0x02,
  1604. 0x12, 0x32, 0x10, 0x31, 0x71, 0x81, 0xa1, 0xff, 0xda, 0x00, 0x08, 0x01,
  1605. 0x01, 0x00, 0x06, 0x3f, 0x02, 0x4b, 0xb3, 0x28, 0x32, 0xd2, 0xed, 0xf9,
  1606. 0x1d, 0x3e, 0x13, 0x51, 0x73, 0x83, 0xff, 0xc4, 0x00, 0x1c, 0x10, 0x01,
  1607. 0x01, 0x01, 0x00, 0x02, 0x03, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1608. 0x00, 0x00, 0x00, 0x01, 0x11, 0x00, 0x21, 0x51, 0x31, 0x61, 0x81, 0xf0,
  1609. 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x3f, 0x21, 0x75, 0x6e,
  1610. 0x31, 0x94, 0x28, 0xf9, 0x30, 0xdc, 0x27, 0xdb, 0xa9, 0x01, 0xf3, 0xde,
  1611. 0x02, 0xa0, 0xed, 0x1e, 0x34, 0x68, 0x23, 0xf9, 0xc6, 0x48, 0x5d, 0x7a,
  1612. 0x35, 0x02, 0xf5, 0x6f, 0xff, 0xda, 0x00, 0x0c, 0x03, 0x01, 0x00, 0x02,
  1613. 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x24, 0xaf, 0xff, 0xc4, 0x00, 0x19,
  1614. 0x11, 0x00, 0x03, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1615. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x11, 0x51, 0x21, 0x31, 0xff,
  1616. 0xda, 0x00, 0x08, 0x01, 0x03, 0x01, 0x01, 0x3f, 0x10, 0x59, 0x11, 0xca,
  1617. 0x42, 0x60, 0x9f, 0x69, 0xff, 0xc4, 0x00, 0x19, 0x11, 0x00, 0x02, 0x03,
  1618. 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1619. 0x00, 0x00, 0x01, 0x11, 0x21, 0x31, 0x61, 0xff, 0xda, 0x00, 0x08, 0x01,
  1620. 0x02, 0x01, 0x01, 0x3f, 0x10, 0xb0, 0xd7, 0x27, 0x51, 0xb6, 0x41, 0xff,
  1621. 0xc4, 0x00, 0x1f, 0x10, 0x01, 0x00, 0x02, 0x01, 0x04, 0x03, 0x01, 0x00,
  1622. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x11, 0x31,
  1623. 0x41, 0x61, 0x71, 0x91, 0x21, 0x81, 0xd1, 0xb1, 0xff, 0xda, 0x00, 0x08,
  1624. 0x01, 0x01, 0x00, 0x01, 0x3f, 0x10, 0x1b, 0x30, 0xe9, 0x58, 0xbe, 0x1a,
  1625. 0xfd, 0x8a, 0xeb, 0x8b, 0x34, 0x74, 0x80, 0x4b, 0xb5, 0xd5, 0xab, 0xcd,
  1626. 0x46, 0x96, 0x2e, 0xec, 0xbd, 0xaa, 0x78, 0x47, 0x5c, 0x47, 0xa7, 0x30,
  1627. 0x49, 0xad, 0x88, 0x7c, 0x40, 0x74, 0x30, 0xff, 0x00, 0x23, 0x1d, 0x03,
  1628. 0x0b, 0xb7, 0xd4, 0xff, 0xd9};
  1629. static const size_t kTest4JpgLen = 701;
  1630. TEST_F(LibYUVConvertTest, TestMJPGSize) {
  1631. int width = 0;
  1632. int height = 0;
  1633. int ret = MJPGSize(kTest2Jpg, kTest2JpgLen, &width, &height);
  1634. EXPECT_EQ(0, ret);
  1635. printf("test jpeg size %d x %d\n", width, height);
  1636. }
  1637. TEST_F(LibYUVConvertTest, TestMJPGToI420) {
  1638. int width = 0;
  1639. int height = 0;
  1640. int ret = MJPGSize(kTest2Jpg, kTest2JpgLen, &width, &height);
  1641. EXPECT_EQ(0, ret);
  1642. int half_width = (width + 1) / 2;
  1643. int half_height = (height + 1) / 2;
  1644. int benchmark_iterations = benchmark_iterations_ * benchmark_width_ *
  1645. benchmark_height_ / (width * height);
  1646. align_buffer_page_end(dst_y, width * height);
  1647. align_buffer_page_end(dst_u, half_width * half_height);
  1648. align_buffer_page_end(dst_v, half_width * half_height);
  1649. for (int times = 0; times < benchmark_iterations; ++times) {
  1650. ret = MJPGToI420(kTest2Jpg, kTest2JpgLen, dst_y, width, dst_u, half_width,
  1651. dst_v, half_width, width, height, width, height);
  1652. }
  1653. // Expect sucesss
  1654. EXPECT_EQ(0, ret);
  1655. // Test result matches known hash value.
  1656. uint32_t dst_y_hash = HashDjb2(dst_y, width * height, 5381);
  1657. uint32_t dst_u_hash = HashDjb2(dst_u, half_width * half_height, 5381);
  1658. uint32_t dst_v_hash = HashDjb2(dst_v, half_width * half_height, 5381);
  1659. EXPECT_EQ(dst_y_hash, 2682851208u);
  1660. EXPECT_EQ(dst_u_hash, 2501859930u);
  1661. EXPECT_EQ(dst_v_hash, 2126459123u);
  1662. free_aligned_buffer_page_end(dst_y);
  1663. free_aligned_buffer_page_end(dst_u);
  1664. free_aligned_buffer_page_end(dst_v);
  1665. }
  1666. TEST_F(LibYUVConvertTest, TestMJPGToI420_NV21) {
  1667. int width = 0;
  1668. int height = 0;
  1669. int ret = MJPGSize(kTest2Jpg, kTest2JpgLen, &width, &height);
  1670. EXPECT_EQ(0, ret);
  1671. int half_width = (width + 1) / 2;
  1672. int half_height = (height + 1) / 2;
  1673. int benchmark_iterations = benchmark_iterations_ * benchmark_width_ *
  1674. benchmark_height_ / (width * height);
  1675. // Convert to NV21
  1676. align_buffer_page_end(dst_y, width * height);
  1677. align_buffer_page_end(dst_vu, half_width * half_height * 2);
  1678. for (int times = 0; times < benchmark_iterations; ++times) {
  1679. ret = MJPGToNV21(kTest2Jpg, kTest2JpgLen, dst_y, width, dst_vu,
  1680. half_width * 2, width, height, width, height);
  1681. }
  1682. // Expect sucesss
  1683. EXPECT_EQ(0, ret);
  1684. // Convert to I420
  1685. align_buffer_page_end(dst2_y, width * height);
  1686. align_buffer_page_end(dst2_u, half_width * half_height);
  1687. align_buffer_page_end(dst2_v, half_width * half_height);
  1688. for (int times = 0; times < benchmark_iterations; ++times) {
  1689. ret = MJPGToI420(kTest2Jpg, kTest2JpgLen, dst2_y, width, dst2_u, half_width,
  1690. dst2_v, half_width, width, height, width, height);
  1691. }
  1692. // Expect sucesss
  1693. EXPECT_EQ(0, ret);
  1694. // Convert I420 to NV21
  1695. align_buffer_page_end(dst3_y, width * height);
  1696. align_buffer_page_end(dst3_vu, half_width * half_height * 2);
  1697. I420ToNV21(dst2_y, width, dst2_u, half_width, dst2_v, half_width, dst3_y,
  1698. width, dst3_vu, half_width * 2, width, height);
  1699. for (int i = 0; i < width * height; ++i) {
  1700. EXPECT_EQ(dst_y[i], dst3_y[i]);
  1701. }
  1702. for (int i = 0; i < half_width * half_height * 2; ++i) {
  1703. EXPECT_EQ(dst_vu[i], dst3_vu[i]);
  1704. EXPECT_EQ(dst_vu[i], dst3_vu[i]);
  1705. }
  1706. free_aligned_buffer_page_end(dst3_y);
  1707. free_aligned_buffer_page_end(dst3_vu);
  1708. free_aligned_buffer_page_end(dst2_y);
  1709. free_aligned_buffer_page_end(dst2_u);
  1710. free_aligned_buffer_page_end(dst2_v);
  1711. free_aligned_buffer_page_end(dst_y);
  1712. free_aligned_buffer_page_end(dst_vu);
  1713. }
  1714. TEST_F(LibYUVConvertTest, TestMJPGToNV21_420) {
  1715. int width = 0;
  1716. int height = 0;
  1717. int ret = MJPGSize(kTest2Jpg, kTest2JpgLen, &width, &height);
  1718. EXPECT_EQ(0, ret);
  1719. int half_width = (width + 1) / 2;
  1720. int half_height = (height + 1) / 2;
  1721. int benchmark_iterations = benchmark_iterations_ * benchmark_width_ *
  1722. benchmark_height_ / (width * height);
  1723. align_buffer_page_end(dst_y, width * height);
  1724. align_buffer_page_end(dst_uv, half_width * half_height * 2);
  1725. for (int times = 0; times < benchmark_iterations; ++times) {
  1726. ret = MJPGToNV21(kTest2Jpg, kTest2JpgLen, dst_y, width, dst_uv,
  1727. half_width * 2, width, height, width, height);
  1728. }
  1729. // Expect sucesss
  1730. EXPECT_EQ(0, ret);
  1731. // Test result matches known hash value.
  1732. uint32_t dst_y_hash = HashDjb2(dst_y, width * height, 5381);
  1733. uint32_t dst_uv_hash = HashDjb2(dst_uv, half_width * half_height * 2, 5381);
  1734. EXPECT_EQ(dst_y_hash, 2682851208u);
  1735. EXPECT_EQ(dst_uv_hash, 1069662856u);
  1736. free_aligned_buffer_page_end(dst_y);
  1737. free_aligned_buffer_page_end(dst_uv);
  1738. }
  1739. TEST_F(LibYUVConvertTest, TestMJPGToNV21_422) {
  1740. int width = 0;
  1741. int height = 0;
  1742. int ret = MJPGSize(kTest3Jpg, kTest3JpgLen, &width, &height);
  1743. EXPECT_EQ(0, ret);
  1744. int half_width = (width + 1) / 2;
  1745. int half_height = (height + 1) / 2;
  1746. int benchmark_iterations = benchmark_iterations_ * benchmark_width_ *
  1747. benchmark_height_ / (width * height);
  1748. align_buffer_page_end(dst_y, width * height);
  1749. align_buffer_page_end(dst_uv, half_width * half_height * 2);
  1750. for (int times = 0; times < benchmark_iterations; ++times) {
  1751. ret = MJPGToNV21(kTest3Jpg, kTest3JpgLen, dst_y, width, dst_uv,
  1752. half_width * 2, width, height, width, height);
  1753. }
  1754. // Expect sucesss
  1755. EXPECT_EQ(0, ret);
  1756. // Test result matches known hash value.
  1757. uint32_t dst_y_hash = HashDjb2(dst_y, width * height, 5381);
  1758. uint32_t dst_uv_hash = HashDjb2(dst_uv, half_width * half_height * 2, 5381);
  1759. EXPECT_EQ(dst_y_hash, 2682851208u);
  1760. EXPECT_EQ(dst_uv_hash, 3543430771u);
  1761. free_aligned_buffer_page_end(dst_y);
  1762. free_aligned_buffer_page_end(dst_uv);
  1763. }
  1764. TEST_F(LibYUVConvertTest, TestMJPGToNV21_400) {
  1765. int width = 0;
  1766. int height = 0;
  1767. int ret = MJPGSize(kTest0Jpg, kTest0JpgLen, &width, &height);
  1768. EXPECT_EQ(0, ret);
  1769. int half_width = (width + 1) / 2;
  1770. int half_height = (height + 1) / 2;
  1771. int benchmark_iterations = benchmark_iterations_ * benchmark_width_ *
  1772. benchmark_height_ / (width * height);
  1773. align_buffer_page_end(dst_y, width * height);
  1774. align_buffer_page_end(dst_uv, half_width * half_height * 2);
  1775. for (int times = 0; times < benchmark_iterations; ++times) {
  1776. ret = MJPGToNV21(kTest0Jpg, kTest0JpgLen, dst_y, width, dst_uv,
  1777. half_width * 2, width, height, width, height);
  1778. }
  1779. // Expect sucesss
  1780. EXPECT_EQ(0, ret);
  1781. // Test result matches known hash value.
  1782. uint32_t dst_y_hash = HashDjb2(dst_y, width * height, 5381);
  1783. uint32_t dst_uv_hash = HashDjb2(dst_uv, half_width * half_height * 2, 5381);
  1784. EXPECT_EQ(dst_y_hash, 330644005u);
  1785. EXPECT_EQ(dst_uv_hash, 135214341u);
  1786. free_aligned_buffer_page_end(dst_y);
  1787. free_aligned_buffer_page_end(dst_uv);
  1788. }
  1789. TEST_F(LibYUVConvertTest, TestMJPGToNV21_444) {
  1790. int width = 0;
  1791. int height = 0;
  1792. int ret = MJPGSize(kTest1Jpg, kTest1JpgLen, &width, &height);
  1793. EXPECT_EQ(0, ret);
  1794. int half_width = (width + 1) / 2;
  1795. int half_height = (height + 1) / 2;
  1796. int benchmark_iterations = benchmark_iterations_ * benchmark_width_ *
  1797. benchmark_height_ / (width * height);
  1798. align_buffer_page_end(dst_y, width * height);
  1799. align_buffer_page_end(dst_uv, half_width * half_height * 2);
  1800. for (int times = 0; times < benchmark_iterations; ++times) {
  1801. ret = MJPGToNV21(kTest1Jpg, kTest1JpgLen, dst_y, width, dst_uv,
  1802. half_width * 2, width, height, width, height);
  1803. }
  1804. // Expect sucesss
  1805. EXPECT_EQ(0, ret);
  1806. // Test result matches known hash value.
  1807. uint32_t dst_y_hash = HashDjb2(dst_y, width * height, 5381);
  1808. uint32_t dst_uv_hash = HashDjb2(dst_uv, half_width * half_height * 2, 5381);
  1809. EXPECT_EQ(dst_y_hash, 2682851208u);
  1810. EXPECT_EQ(dst_uv_hash, 506143297u);
  1811. free_aligned_buffer_page_end(dst_y);
  1812. free_aligned_buffer_page_end(dst_uv);
  1813. }
  1814. TEST_F(LibYUVConvertTest, TestMJPGToARGB) {
  1815. int width = 0;
  1816. int height = 0;
  1817. int ret = MJPGSize(kTest3Jpg, kTest3JpgLen, &width, &height);
  1818. EXPECT_EQ(0, ret);
  1819. int benchmark_iterations = benchmark_iterations_ * benchmark_width_ *
  1820. benchmark_height_ / (width * height);
  1821. align_buffer_page_end(dst_argb, width * height * 4);
  1822. for (int times = 0; times < benchmark_iterations; ++times) {
  1823. ret = MJPGToARGB(kTest3Jpg, kTest3JpgLen, dst_argb, width * 4, width,
  1824. height, width, height);
  1825. }
  1826. // Expect sucesss
  1827. EXPECT_EQ(0, ret);
  1828. // Test result matches known hash value.
  1829. uint32_t dst_argb_hash = HashDjb2(dst_argb, width * height, 5381);
  1830. EXPECT_EQ(dst_argb_hash, 2355976473u);
  1831. free_aligned_buffer_page_end(dst_argb);
  1832. }
  1833. static int ShowJPegInfo(const uint8_t* sample, size_t sample_size) {
  1834. MJpegDecoder mjpeg_decoder;
  1835. LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
  1836. int width = mjpeg_decoder.GetWidth();
  1837. int height = mjpeg_decoder.GetHeight();
  1838. // YUV420
  1839. if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
  1840. mjpeg_decoder.GetNumComponents() == 3 &&
  1841. mjpeg_decoder.GetVertSampFactor(0) == 2 &&
  1842. mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
  1843. mjpeg_decoder.GetVertSampFactor(1) == 1 &&
  1844. mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
  1845. mjpeg_decoder.GetVertSampFactor(2) == 1 &&
  1846. mjpeg_decoder.GetHorizSampFactor(2) == 1) {
  1847. printf("JPeg is J420, %dx%d %d bytes\n", width, height,
  1848. static_cast<int>(sample_size));
  1849. // YUV422
  1850. } else if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
  1851. mjpeg_decoder.GetNumComponents() == 3 &&
  1852. mjpeg_decoder.GetVertSampFactor(0) == 1 &&
  1853. mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
  1854. mjpeg_decoder.GetVertSampFactor(1) == 1 &&
  1855. mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
  1856. mjpeg_decoder.GetVertSampFactor(2) == 1 &&
  1857. mjpeg_decoder.GetHorizSampFactor(2) == 1) {
  1858. printf("JPeg is J422, %dx%d %d bytes\n", width, height,
  1859. static_cast<int>(sample_size));
  1860. // YUV444
  1861. } else if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
  1862. mjpeg_decoder.GetNumComponents() == 3 &&
  1863. mjpeg_decoder.GetVertSampFactor(0) == 1 &&
  1864. mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
  1865. mjpeg_decoder.GetVertSampFactor(1) == 1 &&
  1866. mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
  1867. mjpeg_decoder.GetVertSampFactor(2) == 1 &&
  1868. mjpeg_decoder.GetHorizSampFactor(2) == 1) {
  1869. printf("JPeg is J444, %dx%d %d bytes\n", width, height,
  1870. static_cast<int>(sample_size));
  1871. // YUV400
  1872. } else if (mjpeg_decoder.GetColorSpace() ==
  1873. MJpegDecoder::kColorSpaceGrayscale &&
  1874. mjpeg_decoder.GetNumComponents() == 1 &&
  1875. mjpeg_decoder.GetVertSampFactor(0) == 1 &&
  1876. mjpeg_decoder.GetHorizSampFactor(0) == 1) {
  1877. printf("JPeg is J400, %dx%d %d bytes\n", width, height,
  1878. static_cast<int>(sample_size));
  1879. } else {
  1880. // Unknown colorspace.
  1881. printf("JPeg is Unknown colorspace.\n");
  1882. }
  1883. mjpeg_decoder.UnloadFrame();
  1884. return ret;
  1885. }
  1886. TEST_F(LibYUVConvertTest, TestMJPGInfo) {
  1887. EXPECT_EQ(1, ShowJPegInfo(kTest0Jpg, kTest0JpgLen));
  1888. EXPECT_EQ(1, ShowJPegInfo(kTest1Jpg, kTest1JpgLen));
  1889. EXPECT_EQ(1, ShowJPegInfo(kTest2Jpg, kTest2JpgLen));
  1890. EXPECT_EQ(1, ShowJPegInfo(kTest3Jpg, kTest3JpgLen));
  1891. EXPECT_EQ(1, ShowJPegInfo(kTest4Jpg,
  1892. kTest4JpgLen)); // Valid but unsupported.
  1893. }
  1894. #endif // HAVE_JPEG
  1895. TEST_F(LibYUVConvertTest, NV12Crop) {
  1896. const int SUBSAMP_X = 2;
  1897. const int SUBSAMP_Y = 2;
  1898. const int kWidth = benchmark_width_;
  1899. const int kHeight = benchmark_height_;
  1900. const int crop_y =
  1901. ((benchmark_height_ - (benchmark_height_ * 360 / 480)) / 2 + 1) & ~1;
  1902. const int kDestWidth = benchmark_width_;
  1903. const int kDestHeight = benchmark_height_ - crop_y * 2;
  1904. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X);
  1905. const int sample_size =
  1906. kWidth * kHeight + kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y) * 2;
  1907. align_buffer_page_end(src_y, sample_size);
  1908. uint8_t* src_uv = src_y + kWidth * kHeight;
  1909. align_buffer_page_end(dst_y, kDestWidth * kDestHeight);
  1910. align_buffer_page_end(dst_u, SUBSAMPLE(kDestWidth, SUBSAMP_X) *
  1911. SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  1912. align_buffer_page_end(dst_v, SUBSAMPLE(kDestWidth, SUBSAMP_X) *
  1913. SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  1914. align_buffer_page_end(dst_y_2, kDestWidth * kDestHeight);
  1915. align_buffer_page_end(dst_u_2, SUBSAMPLE(kDestWidth, SUBSAMP_X) *
  1916. SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  1917. align_buffer_page_end(dst_v_2, SUBSAMPLE(kDestWidth, SUBSAMP_X) *
  1918. SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  1919. for (int i = 0; i < kHeight * kWidth; ++i) {
  1920. src_y[i] = (fastrand() & 0xff);
  1921. }
  1922. for (int i = 0; i < (SUBSAMPLE(kHeight, SUBSAMP_Y) * kStrideUV) * 2; ++i) {
  1923. src_uv[i] = (fastrand() & 0xff);
  1924. }
  1925. memset(dst_y, 1, kDestWidth * kDestHeight);
  1926. memset(dst_u, 2,
  1927. SUBSAMPLE(kDestWidth, SUBSAMP_X) * SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  1928. memset(dst_v, 3,
  1929. SUBSAMPLE(kDestWidth, SUBSAMP_X) * SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  1930. memset(dst_y_2, 1, kDestWidth * kDestHeight);
  1931. memset(dst_u_2, 2,
  1932. SUBSAMPLE(kDestWidth, SUBSAMP_X) * SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  1933. memset(dst_v_2, 3,
  1934. SUBSAMPLE(kDestWidth, SUBSAMP_X) * SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  1935. ConvertToI420(src_y, sample_size, dst_y_2, kDestWidth, dst_u_2,
  1936. SUBSAMPLE(kDestWidth, SUBSAMP_X), dst_v_2,
  1937. SUBSAMPLE(kDestWidth, SUBSAMP_X), 0, crop_y, kWidth, kHeight,
  1938. kDestWidth, kDestHeight, libyuv::kRotate0, libyuv::FOURCC_NV12);
  1939. NV12ToI420(src_y + crop_y * kWidth, kWidth,
  1940. src_uv + (crop_y / 2) * kStrideUV * 2, kStrideUV * 2, dst_y,
  1941. kDestWidth, dst_u, SUBSAMPLE(kDestWidth, SUBSAMP_X), dst_v,
  1942. SUBSAMPLE(kDestWidth, SUBSAMP_X), kDestWidth, kDestHeight);
  1943. for (int i = 0; i < kDestHeight; ++i) {
  1944. for (int j = 0; j < kDestWidth; ++j) {
  1945. EXPECT_EQ(dst_y[i * kWidth + j], dst_y_2[i * kWidth + j]);
  1946. }
  1947. }
  1948. for (int i = 0; i < SUBSAMPLE(kDestHeight, SUBSAMP_Y); ++i) {
  1949. for (int j = 0; j < SUBSAMPLE(kDestWidth, SUBSAMP_X); ++j) {
  1950. EXPECT_EQ(dst_u[i * SUBSAMPLE(kDestWidth, SUBSAMP_X) + j],
  1951. dst_u_2[i * SUBSAMPLE(kDestWidth, SUBSAMP_X) + j]);
  1952. }
  1953. }
  1954. for (int i = 0; i < SUBSAMPLE(kDestHeight, SUBSAMP_Y); ++i) {
  1955. for (int j = 0; j < SUBSAMPLE(kDestWidth, SUBSAMP_X); ++j) {
  1956. EXPECT_EQ(dst_v[i * SUBSAMPLE(kDestWidth, SUBSAMP_X) + j],
  1957. dst_v_2[i * SUBSAMPLE(kDestWidth, SUBSAMP_X) + j]);
  1958. }
  1959. }
  1960. free_aligned_buffer_page_end(dst_y);
  1961. free_aligned_buffer_page_end(dst_u);
  1962. free_aligned_buffer_page_end(dst_v);
  1963. free_aligned_buffer_page_end(dst_y_2);
  1964. free_aligned_buffer_page_end(dst_u_2);
  1965. free_aligned_buffer_page_end(dst_v_2);
  1966. free_aligned_buffer_page_end(src_y);
  1967. }
  1968. TEST_F(LibYUVConvertTest, I420CropOddY) {
  1969. const int SUBSAMP_X = 2;
  1970. const int SUBSAMP_Y = 2;
  1971. const int kWidth = benchmark_width_;
  1972. const int kHeight = benchmark_height_;
  1973. const int crop_y = 1;
  1974. const int kDestWidth = benchmark_width_;
  1975. const int kDestHeight = benchmark_height_ - crop_y * 2;
  1976. const int kStrideU = SUBSAMPLE(kWidth, SUBSAMP_X);
  1977. const int kStrideV = SUBSAMPLE(kWidth, SUBSAMP_X);
  1978. const int sample_size = kWidth * kHeight +
  1979. kStrideU * SUBSAMPLE(kHeight, SUBSAMP_Y) +
  1980. kStrideV * SUBSAMPLE(kHeight, SUBSAMP_Y);
  1981. align_buffer_page_end(src_y, sample_size);
  1982. uint8_t* src_u = src_y + kWidth * kHeight;
  1983. uint8_t* src_v = src_u + kStrideU * SUBSAMPLE(kHeight, SUBSAMP_Y);
  1984. align_buffer_page_end(dst_y, kDestWidth * kDestHeight);
  1985. align_buffer_page_end(dst_u, SUBSAMPLE(kDestWidth, SUBSAMP_X) *
  1986. SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  1987. align_buffer_page_end(dst_v, SUBSAMPLE(kDestWidth, SUBSAMP_X) *
  1988. SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  1989. for (int i = 0; i < kHeight * kWidth; ++i) {
  1990. src_y[i] = (fastrand() & 0xff);
  1991. }
  1992. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y) * kStrideU; ++i) {
  1993. src_u[i] = (fastrand() & 0xff);
  1994. }
  1995. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y) * kStrideV; ++i) {
  1996. src_v[i] = (fastrand() & 0xff);
  1997. }
  1998. memset(dst_y, 1, kDestWidth * kDestHeight);
  1999. memset(dst_u, 2,
  2000. SUBSAMPLE(kDestWidth, SUBSAMP_X) * SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  2001. memset(dst_v, 3,
  2002. SUBSAMPLE(kDestWidth, SUBSAMP_X) * SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  2003. MaskCpuFlags(benchmark_cpu_info_);
  2004. for (int i = 0; i < benchmark_iterations_; ++i) {
  2005. ConvertToI420(src_y, sample_size, dst_y, kDestWidth, dst_u,
  2006. SUBSAMPLE(kDestWidth, SUBSAMP_X), dst_v,
  2007. SUBSAMPLE(kDestWidth, SUBSAMP_X), 0, crop_y, kWidth, kHeight,
  2008. kDestWidth, kDestHeight, libyuv::kRotate0,
  2009. libyuv::FOURCC_I420);
  2010. }
  2011. for (int i = 0; i < kDestHeight; ++i) {
  2012. for (int j = 0; j < kDestWidth; ++j) {
  2013. EXPECT_EQ(src_y[crop_y * kWidth + i * kWidth + j],
  2014. dst_y[i * kDestWidth + j]);
  2015. }
  2016. }
  2017. for (int i = 0; i < SUBSAMPLE(kDestHeight, SUBSAMP_Y); ++i) {
  2018. for (int j = 0; j < SUBSAMPLE(kDestWidth, SUBSAMP_X); ++j) {
  2019. EXPECT_EQ(src_u[(crop_y / 2 + i) * kStrideU + j],
  2020. dst_u[i * SUBSAMPLE(kDestWidth, SUBSAMP_X) + j]);
  2021. }
  2022. }
  2023. for (int i = 0; i < SUBSAMPLE(kDestHeight, SUBSAMP_Y); ++i) {
  2024. for (int j = 0; j < SUBSAMPLE(kDestWidth, SUBSAMP_X); ++j) {
  2025. EXPECT_EQ(src_v[(crop_y / 2 + i) * kStrideV + j],
  2026. dst_v[i * SUBSAMPLE(kDestWidth, SUBSAMP_X) + j]);
  2027. }
  2028. }
  2029. free_aligned_buffer_page_end(dst_y);
  2030. free_aligned_buffer_page_end(dst_u);
  2031. free_aligned_buffer_page_end(dst_v);
  2032. free_aligned_buffer_page_end(src_y);
  2033. }
  2034. TEST_F(LibYUVConvertTest, TestYToARGB) {
  2035. uint8_t y[32];
  2036. uint8_t expectedg[32];
  2037. for (int i = 0; i < 32; ++i) {
  2038. y[i] = i * 5 + 17;
  2039. expectedg[i] = static_cast<int>((y[i] - 16) * 1.164f + 0.5f);
  2040. }
  2041. uint8_t argb[32 * 4];
  2042. YToARGB(y, 0, argb, 0, 32, 1);
  2043. for (int i = 0; i < 32; ++i) {
  2044. printf("%2d %d: %d <-> %d,%d,%d,%d\n", i, y[i], expectedg[i],
  2045. argb[i * 4 + 0], argb[i * 4 + 1], argb[i * 4 + 2], argb[i * 4 + 3]);
  2046. }
  2047. for (int i = 0; i < 32; ++i) {
  2048. EXPECT_EQ(expectedg[i], argb[i * 4 + 0]);
  2049. }
  2050. }
  2051. static const uint8_t kNoDither4x4[16] = {
  2052. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  2053. };
  2054. TEST_F(LibYUVConvertTest, TestNoDither) {
  2055. align_buffer_page_end(src_argb, benchmark_width_ * benchmark_height_ * 4);
  2056. align_buffer_page_end(dst_rgb565, benchmark_width_ * benchmark_height_ * 2);
  2057. align_buffer_page_end(dst_rgb565dither,
  2058. benchmark_width_ * benchmark_height_ * 2);
  2059. MemRandomize(src_argb, benchmark_width_ * benchmark_height_ * 4);
  2060. MemRandomize(dst_rgb565, benchmark_width_ * benchmark_height_ * 2);
  2061. MemRandomize(dst_rgb565dither, benchmark_width_ * benchmark_height_ * 2);
  2062. ARGBToRGB565(src_argb, benchmark_width_ * 4, dst_rgb565, benchmark_width_ * 2,
  2063. benchmark_width_, benchmark_height_);
  2064. ARGBToRGB565Dither(src_argb, benchmark_width_ * 4, dst_rgb565dither,
  2065. benchmark_width_ * 2, kNoDither4x4, benchmark_width_,
  2066. benchmark_height_);
  2067. for (int i = 0; i < benchmark_width_ * benchmark_height_ * 2; ++i) {
  2068. EXPECT_EQ(dst_rgb565[i], dst_rgb565dither[i]);
  2069. }
  2070. free_aligned_buffer_page_end(src_argb);
  2071. free_aligned_buffer_page_end(dst_rgb565);
  2072. free_aligned_buffer_page_end(dst_rgb565dither);
  2073. }
  2074. // Ordered 4x4 dither for 888 to 565. Values from 0 to 7.
  2075. static const uint8_t kDither565_4x4[16] = {
  2076. 0, 4, 1, 5, 6, 2, 7, 3, 1, 5, 0, 4, 7, 3, 6, 2,
  2077. };
  2078. TEST_F(LibYUVConvertTest, TestDither) {
  2079. align_buffer_page_end(src_argb, benchmark_width_ * benchmark_height_ * 4);
  2080. align_buffer_page_end(dst_rgb565, benchmark_width_ * benchmark_height_ * 2);
  2081. align_buffer_page_end(dst_rgb565dither,
  2082. benchmark_width_ * benchmark_height_ * 2);
  2083. align_buffer_page_end(dst_argb, benchmark_width_ * benchmark_height_ * 4);
  2084. align_buffer_page_end(dst_argbdither,
  2085. benchmark_width_ * benchmark_height_ * 4);
  2086. MemRandomize(src_argb, benchmark_width_ * benchmark_height_ * 4);
  2087. MemRandomize(dst_rgb565, benchmark_width_ * benchmark_height_ * 2);
  2088. MemRandomize(dst_rgb565dither, benchmark_width_ * benchmark_height_ * 2);
  2089. MemRandomize(dst_argb, benchmark_width_ * benchmark_height_ * 4);
  2090. MemRandomize(dst_argbdither, benchmark_width_ * benchmark_height_ * 4);
  2091. ARGBToRGB565(src_argb, benchmark_width_ * 4, dst_rgb565, benchmark_width_ * 2,
  2092. benchmark_width_, benchmark_height_);
  2093. ARGBToRGB565Dither(src_argb, benchmark_width_ * 4, dst_rgb565dither,
  2094. benchmark_width_ * 2, kDither565_4x4, benchmark_width_,
  2095. benchmark_height_);
  2096. RGB565ToARGB(dst_rgb565, benchmark_width_ * 2, dst_argb, benchmark_width_ * 4,
  2097. benchmark_width_, benchmark_height_);
  2098. RGB565ToARGB(dst_rgb565dither, benchmark_width_ * 2, dst_argbdither,
  2099. benchmark_width_ * 4, benchmark_width_, benchmark_height_);
  2100. for (int i = 0; i < benchmark_width_ * benchmark_height_ * 4; ++i) {
  2101. EXPECT_NEAR(dst_argb[i], dst_argbdither[i], 9);
  2102. }
  2103. free_aligned_buffer_page_end(src_argb);
  2104. free_aligned_buffer_page_end(dst_rgb565);
  2105. free_aligned_buffer_page_end(dst_rgb565dither);
  2106. free_aligned_buffer_page_end(dst_argb);
  2107. free_aligned_buffer_page_end(dst_argbdither);
  2108. }
  2109. #define TESTPLANARTOBID(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2110. YALIGN, W1280, N, NEG, OFF, FMT_C, BPP_C) \
  2111. TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##Dither##N) { \
  2112. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  2113. const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
  2114. const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
  2115. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  2116. const int kSizeUV = kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y); \
  2117. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  2118. align_buffer_page_end(src_u, kSizeUV + OFF); \
  2119. align_buffer_page_end(src_v, kSizeUV + OFF); \
  2120. align_buffer_page_end(dst_argb_c, kStrideB* kHeight + OFF); \
  2121. align_buffer_page_end(dst_argb_opt, kStrideB* kHeight + OFF); \
  2122. for (int i = 0; i < kWidth * kHeight; ++i) { \
  2123. src_y[i + OFF] = (fastrand() & 0xff); \
  2124. } \
  2125. for (int i = 0; i < kSizeUV; ++i) { \
  2126. src_u[i + OFF] = (fastrand() & 0xff); \
  2127. src_v[i + OFF] = (fastrand() & 0xff); \
  2128. } \
  2129. memset(dst_argb_c + OFF, 1, kStrideB * kHeight); \
  2130. memset(dst_argb_opt + OFF, 101, kStrideB * kHeight); \
  2131. MaskCpuFlags(disable_cpu_flags_); \
  2132. FMT_PLANAR##To##FMT_B##Dither(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
  2133. src_v + OFF, kStrideUV, dst_argb_c + OFF, \
  2134. kStrideB, NULL, kWidth, NEG kHeight); \
  2135. MaskCpuFlags(benchmark_cpu_info_); \
  2136. for (int i = 0; i < benchmark_iterations_; ++i) { \
  2137. FMT_PLANAR##To##FMT_B##Dither( \
  2138. src_y + OFF, kWidth, src_u + OFF, kStrideUV, src_v + OFF, kStrideUV, \
  2139. dst_argb_opt + OFF, kStrideB, NULL, kWidth, NEG kHeight); \
  2140. } \
  2141. /* Convert to ARGB so 565 is expanded to bytes that can be compared. */ \
  2142. align_buffer_page_end(dst_argb32_c, kWidth* BPP_C* kHeight); \
  2143. align_buffer_page_end(dst_argb32_opt, kWidth* BPP_C* kHeight); \
  2144. memset(dst_argb32_c, 2, kWidth* BPP_C* kHeight); \
  2145. memset(dst_argb32_opt, 102, kWidth* BPP_C* kHeight); \
  2146. FMT_B##To##FMT_C(dst_argb_c + OFF, kStrideB, dst_argb32_c, kWidth * BPP_C, \
  2147. kWidth, kHeight); \
  2148. FMT_B##To##FMT_C(dst_argb_opt + OFF, kStrideB, dst_argb32_opt, \
  2149. kWidth * BPP_C, kWidth, kHeight); \
  2150. for (int i = 0; i < kWidth * BPP_C * kHeight; ++i) { \
  2151. EXPECT_EQ(dst_argb32_c[i], dst_argb32_opt[i]); \
  2152. } \
  2153. free_aligned_buffer_page_end(src_y); \
  2154. free_aligned_buffer_page_end(src_u); \
  2155. free_aligned_buffer_page_end(src_v); \
  2156. free_aligned_buffer_page_end(dst_argb_c); \
  2157. free_aligned_buffer_page_end(dst_argb_opt); \
  2158. free_aligned_buffer_page_end(dst_argb32_c); \
  2159. free_aligned_buffer_page_end(dst_argb32_opt); \
  2160. }
  2161. #define TESTPLANARTOBD(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2162. YALIGN, FMT_C, BPP_C) \
  2163. TESTPLANARTOBID(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2164. YALIGN, benchmark_width_ - 4, _Any, +, 0, FMT_C, BPP_C) \
  2165. TESTPLANARTOBID(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2166. YALIGN, benchmark_width_, _Unaligned, +, 1, FMT_C, BPP_C) \
  2167. TESTPLANARTOBID(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2168. YALIGN, benchmark_width_, _Invert, -, 0, FMT_C, BPP_C) \
  2169. TESTPLANARTOBID(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2170. YALIGN, benchmark_width_, _Opt, +, 0, FMT_C, BPP_C)
  2171. #ifdef LITTLE_ENDIAN_ONLY_TEST
  2172. TESTPLANARTOBD(I420, 2, 2, RGB565, 2, 2, 1, ARGB, 4)
  2173. #endif
  2174. #define TESTPTOB(NAME, UYVYTOI420, UYVYTONV12) \
  2175. TEST_F(LibYUVConvertTest, NAME) { \
  2176. const int kWidth = benchmark_width_; \
  2177. const int kHeight = benchmark_height_; \
  2178. \
  2179. align_buffer_page_end(orig_uyvy, 4 * SUBSAMPLE(kWidth, 2) * kHeight); \
  2180. align_buffer_page_end(orig_y, kWidth* kHeight); \
  2181. align_buffer_page_end(orig_u, \
  2182. SUBSAMPLE(kWidth, 2) * SUBSAMPLE(kHeight, 2)); \
  2183. align_buffer_page_end(orig_v, \
  2184. SUBSAMPLE(kWidth, 2) * SUBSAMPLE(kHeight, 2)); \
  2185. \
  2186. align_buffer_page_end(dst_y_orig, kWidth* kHeight); \
  2187. align_buffer_page_end(dst_uv_orig, \
  2188. 2 * SUBSAMPLE(kWidth, 2) * SUBSAMPLE(kHeight, 2)); \
  2189. \
  2190. align_buffer_page_end(dst_y, kWidth* kHeight); \
  2191. align_buffer_page_end(dst_uv, \
  2192. 2 * SUBSAMPLE(kWidth, 2) * SUBSAMPLE(kHeight, 2)); \
  2193. \
  2194. MemRandomize(orig_uyvy, 4 * SUBSAMPLE(kWidth, 2) * kHeight); \
  2195. \
  2196. /* Convert UYVY to NV12 in 2 steps for reference */ \
  2197. libyuv::UYVYTOI420(orig_uyvy, 4 * SUBSAMPLE(kWidth, 2), orig_y, kWidth, \
  2198. orig_u, SUBSAMPLE(kWidth, 2), orig_v, \
  2199. SUBSAMPLE(kWidth, 2), kWidth, kHeight); \
  2200. libyuv::I420ToNV12(orig_y, kWidth, orig_u, SUBSAMPLE(kWidth, 2), orig_v, \
  2201. SUBSAMPLE(kWidth, 2), dst_y_orig, kWidth, dst_uv_orig, \
  2202. 2 * SUBSAMPLE(kWidth, 2), kWidth, kHeight); \
  2203. \
  2204. /* Convert to NV12 */ \
  2205. for (int i = 0; i < benchmark_iterations_; ++i) { \
  2206. libyuv::UYVYTONV12(orig_uyvy, 4 * SUBSAMPLE(kWidth, 2), dst_y, kWidth, \
  2207. dst_uv, 2 * SUBSAMPLE(kWidth, 2), kWidth, kHeight); \
  2208. } \
  2209. \
  2210. for (int i = 0; i < kWidth * kHeight; ++i) { \
  2211. EXPECT_EQ(orig_y[i], dst_y[i]); \
  2212. } \
  2213. for (int i = 0; i < kWidth * kHeight; ++i) { \
  2214. EXPECT_EQ(dst_y_orig[i], dst_y[i]); \
  2215. } \
  2216. for (int i = 0; i < 2 * SUBSAMPLE(kWidth, 2) * SUBSAMPLE(kHeight, 2); \
  2217. ++i) { \
  2218. EXPECT_EQ(dst_uv_orig[i], dst_uv[i]); \
  2219. } \
  2220. \
  2221. free_aligned_buffer_page_end(orig_uyvy); \
  2222. free_aligned_buffer_page_end(orig_y); \
  2223. free_aligned_buffer_page_end(orig_u); \
  2224. free_aligned_buffer_page_end(orig_v); \
  2225. free_aligned_buffer_page_end(dst_y_orig); \
  2226. free_aligned_buffer_page_end(dst_uv_orig); \
  2227. free_aligned_buffer_page_end(dst_y); \
  2228. free_aligned_buffer_page_end(dst_uv); \
  2229. }
  2230. TESTPTOB(TestYUY2ToNV12, YUY2ToI420, YUY2ToNV12)
  2231. TESTPTOB(TestUYVYToNV12, UYVYToI420, UYVYToNV12)
  2232. // Transitive tests. A to B to C is same as A to C.
  2233. #define TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2234. W1280, N, NEG, OFF, FMT_C, BPP_C) \
  2235. TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##_##FMT_C##N) { \
  2236. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  2237. const int kHeight = benchmark_height_; \
  2238. const int kStrideB = SUBSAMPLE(kWidth, SUB_B) * BPP_B; \
  2239. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  2240. const int kSizeUV = kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y); \
  2241. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  2242. align_buffer_page_end(src_u, kSizeUV + OFF); \
  2243. align_buffer_page_end(src_v, kSizeUV + OFF); \
  2244. align_buffer_page_end(dst_argb_b, kStrideB* kHeight + OFF); \
  2245. for (int i = 0; i < kWidth * kHeight; ++i) { \
  2246. src_y[i + OFF] = (fastrand() & 0xff); \
  2247. } \
  2248. for (int i = 0; i < kSizeUV; ++i) { \
  2249. src_u[i + OFF] = (fastrand() & 0xff); \
  2250. src_v[i + OFF] = (fastrand() & 0xff); \
  2251. } \
  2252. memset(dst_argb_b + OFF, 1, kStrideB * kHeight); \
  2253. for (int i = 0; i < benchmark_iterations_; ++i) { \
  2254. FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
  2255. src_v + OFF, kStrideUV, dst_argb_b + OFF, \
  2256. kStrideB, kWidth, NEG kHeight); \
  2257. } \
  2258. /* Convert to a 3rd format in 1 step and 2 steps and compare */ \
  2259. const int kStrideC = kWidth * BPP_C; \
  2260. align_buffer_page_end(dst_argb_c, kStrideC* kHeight + OFF); \
  2261. align_buffer_page_end(dst_argb_bc, kStrideC* kHeight + OFF); \
  2262. memset(dst_argb_c + OFF, 2, kStrideC * kHeight); \
  2263. memset(dst_argb_bc + OFF, 3, kStrideC * kHeight); \
  2264. FMT_PLANAR##To##FMT_C(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
  2265. src_v + OFF, kStrideUV, dst_argb_c + OFF, kStrideC, \
  2266. kWidth, NEG kHeight); \
  2267. /* Convert B to C */ \
  2268. FMT_B##To##FMT_C(dst_argb_b + OFF, kStrideB, dst_argb_bc + OFF, kStrideC, \
  2269. kWidth, kHeight); \
  2270. for (int i = 0; i < kStrideC * kHeight; ++i) { \
  2271. EXPECT_EQ(dst_argb_c[i + OFF], dst_argb_bc[i + OFF]); \
  2272. } \
  2273. free_aligned_buffer_page_end(src_y); \
  2274. free_aligned_buffer_page_end(src_u); \
  2275. free_aligned_buffer_page_end(src_v); \
  2276. free_aligned_buffer_page_end(dst_argb_b); \
  2277. free_aligned_buffer_page_end(dst_argb_c); \
  2278. free_aligned_buffer_page_end(dst_argb_bc); \
  2279. }
  2280. #define TESTPLANARTOE(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2281. FMT_C, BPP_C) \
  2282. TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2283. benchmark_width_ - 4, _Any, +, 0, FMT_C, BPP_C) \
  2284. TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2285. benchmark_width_, _Unaligned, +, 1, FMT_C, BPP_C) \
  2286. TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2287. benchmark_width_, _Invert, -, 0, FMT_C, BPP_C) \
  2288. TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2289. benchmark_width_, _Opt, +, 0, FMT_C, BPP_C)
  2290. TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, ABGR, 4)
  2291. TESTPLANARTOE(I420, 2, 2, ABGR, 1, 4, ARGB, 4)
  2292. TESTPLANARTOE(J420, 2, 2, ARGB, 1, 4, ARGB, 4)
  2293. TESTPLANARTOE(J420, 2, 2, ABGR, 1, 4, ARGB, 4)
  2294. TESTPLANARTOE(H420, 2, 2, ARGB, 1, 4, ARGB, 4)
  2295. TESTPLANARTOE(H420, 2, 2, ABGR, 1, 4, ARGB, 4)
  2296. TESTPLANARTOE(U420, 2, 2, ARGB, 1, 4, ARGB, 4)
  2297. TESTPLANARTOE(U420, 2, 2, ABGR, 1, 4, ARGB, 4)
  2298. TESTPLANARTOE(I420, 2, 2, BGRA, 1, 4, ARGB, 4)
  2299. TESTPLANARTOE(I420, 2, 2, RGBA, 1, 4, ARGB, 4)
  2300. TESTPLANARTOE(I420, 2, 2, RGB24, 1, 3, ARGB, 4)
  2301. TESTPLANARTOE(I420, 2, 2, RAW, 1, 3, RGB24, 3)
  2302. TESTPLANARTOE(I420, 2, 2, RGB24, 1, 3, RAW, 3)
  2303. TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, RAW, 3)
  2304. TESTPLANARTOE(I420, 2, 2, RAW, 1, 3, ARGB, 4)
  2305. TESTPLANARTOE(H420, 2, 2, RGB24, 1, 3, ARGB, 4)
  2306. TESTPLANARTOE(H420, 2, 2, RAW, 1, 3, RGB24, 3)
  2307. TESTPLANARTOE(H420, 2, 2, RGB24, 1, 3, RAW, 3)
  2308. TESTPLANARTOE(H420, 2, 2, ARGB, 1, 4, RAW, 3)
  2309. TESTPLANARTOE(H420, 2, 2, RAW, 1, 3, ARGB, 4)
  2310. #ifdef LITTLE_ENDIAN_ONLY_TEST
  2311. TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, RGB565, 2)
  2312. TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, ARGB1555, 2)
  2313. TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, ARGB4444, 2)
  2314. TESTPLANARTOE(I422, 2, 1, ARGB, 1, 4, RGB565, 2)
  2315. #endif
  2316. TESTPLANARTOE(I422, 2, 1, ARGB, 1, 4, ABGR, 4)
  2317. TESTPLANARTOE(I422, 2, 1, ABGR, 1, 4, ARGB, 4)
  2318. TESTPLANARTOE(J422, 2, 1, ARGB, 1, 4, ARGB, 4)
  2319. TESTPLANARTOE(J422, 2, 1, ABGR, 1, 4, ARGB, 4)
  2320. TESTPLANARTOE(H422, 2, 1, ARGB, 1, 4, ARGB, 4)
  2321. TESTPLANARTOE(H422, 2, 1, ABGR, 1, 4, ARGB, 4)
  2322. TESTPLANARTOE(U422, 2, 1, ARGB, 1, 4, ARGB, 4)
  2323. TESTPLANARTOE(U422, 2, 1, ABGR, 1, 4, ARGB, 4)
  2324. TESTPLANARTOE(I422, 2, 1, BGRA, 1, 4, ARGB, 4)
  2325. TESTPLANARTOE(I422, 2, 1, RGBA, 1, 4, ARGB, 4)
  2326. TESTPLANARTOE(I444, 1, 1, ARGB, 1, 4, ABGR, 4)
  2327. TESTPLANARTOE(I444, 1, 1, ABGR, 1, 4, ARGB, 4)
  2328. TESTPLANARTOE(J444, 1, 1, ARGB, 1, 4, ARGB, 4)
  2329. TESTPLANARTOE(J444, 1, 1, ABGR, 1, 4, ARGB, 4)
  2330. TESTPLANARTOE(H444, 1, 1, ARGB, 1, 4, ARGB, 4)
  2331. TESTPLANARTOE(H444, 1, 1, ABGR, 1, 4, ARGB, 4)
  2332. TESTPLANARTOE(U444, 1, 1, ARGB, 1, 4, ARGB, 4)
  2333. TESTPLANARTOE(U444, 1, 1, ABGR, 1, 4, ARGB, 4)
  2334. TESTPLANARTOE(I420, 2, 2, YUY2, 2, 4, ARGB, 4)
  2335. TESTPLANARTOE(I420, 2, 2, UYVY, 2, 4, ARGB, 4)
  2336. TESTPLANARTOE(I422, 2, 1, YUY2, 2, 4, ARGB, 4)
  2337. TESTPLANARTOE(I422, 2, 1, UYVY, 2, 4, ARGB, 4)
  2338. #define TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2339. W1280, N, NEG, OFF, FMT_C, BPP_C, ATTEN) \
  2340. TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##_##FMT_C##N) { \
  2341. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  2342. const int kHeight = benchmark_height_; \
  2343. const int kStrideB = SUBSAMPLE(kWidth, SUB_B) * BPP_B; \
  2344. const int kSizeUV = \
  2345. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y); \
  2346. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  2347. align_buffer_page_end(src_u, kSizeUV + OFF); \
  2348. align_buffer_page_end(src_v, kSizeUV + OFF); \
  2349. align_buffer_page_end(src_a, kWidth* kHeight + OFF); \
  2350. align_buffer_page_end(dst_argb_b, kStrideB* kHeight + OFF); \
  2351. for (int i = 0; i < kWidth * kHeight; ++i) { \
  2352. src_y[i + OFF] = (fastrand() & 0xff); \
  2353. src_a[i + OFF] = (fastrand() & 0xff); \
  2354. } \
  2355. for (int i = 0; i < kSizeUV; ++i) { \
  2356. src_u[i + OFF] = (fastrand() & 0xff); \
  2357. src_v[i + OFF] = (fastrand() & 0xff); \
  2358. } \
  2359. memset(dst_argb_b + OFF, 1, kStrideB * kHeight); \
  2360. for (int i = 0; i < benchmark_iterations_; ++i) { \
  2361. FMT_PLANAR##To##FMT_B( \
  2362. src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), \
  2363. src_v + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), src_a + OFF, kWidth, \
  2364. dst_argb_b + OFF, kStrideB, kWidth, NEG kHeight, ATTEN); \
  2365. } \
  2366. /* Convert to a 3rd format in 1 step and 2 steps and compare */ \
  2367. const int kStrideC = kWidth * BPP_C; \
  2368. align_buffer_page_end(dst_argb_c, kStrideC* kHeight + OFF); \
  2369. align_buffer_page_end(dst_argb_bc, kStrideC* kHeight + OFF); \
  2370. memset(dst_argb_c + OFF, 2, kStrideC * kHeight); \
  2371. memset(dst_argb_bc + OFF, 3, kStrideC * kHeight); \
  2372. FMT_PLANAR##To##FMT_C( \
  2373. src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), \
  2374. src_v + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), src_a + OFF, kWidth, \
  2375. dst_argb_c + OFF, kStrideC, kWidth, NEG kHeight, ATTEN); \
  2376. /* Convert B to C */ \
  2377. FMT_B##To##FMT_C(dst_argb_b + OFF, kStrideB, dst_argb_bc + OFF, kStrideC, \
  2378. kWidth, kHeight); \
  2379. for (int i = 0; i < kStrideC * kHeight; ++i) { \
  2380. EXPECT_EQ(dst_argb_c[i + OFF], dst_argb_bc[i + OFF]); \
  2381. } \
  2382. free_aligned_buffer_page_end(src_y); \
  2383. free_aligned_buffer_page_end(src_u); \
  2384. free_aligned_buffer_page_end(src_v); \
  2385. free_aligned_buffer_page_end(src_a); \
  2386. free_aligned_buffer_page_end(dst_argb_b); \
  2387. free_aligned_buffer_page_end(dst_argb_c); \
  2388. free_aligned_buffer_page_end(dst_argb_bc); \
  2389. }
  2390. #define TESTQPLANARTOE(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2391. FMT_C, BPP_C) \
  2392. TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2393. benchmark_width_ - 4, _Any, +, 0, FMT_C, BPP_C, 0) \
  2394. TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2395. benchmark_width_, _Unaligned, +, 1, FMT_C, BPP_C, 0) \
  2396. TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2397. benchmark_width_, _Invert, -, 0, FMT_C, BPP_C, 0) \
  2398. TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2399. benchmark_width_, _Opt, +, 0, FMT_C, BPP_C, 0) \
  2400. TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2401. benchmark_width_, _Premult, +, 0, FMT_C, BPP_C, 1)
  2402. TESTQPLANARTOE(I420Alpha, 2, 2, ARGB, 1, 4, ABGR, 4)
  2403. TESTQPLANARTOE(I420Alpha, 2, 2, ABGR, 1, 4, ARGB, 4)
  2404. TESTQPLANARTOE(J420Alpha, 2, 2, ARGB, 1, 4, ABGR, 4)
  2405. TESTQPLANARTOE(J420Alpha, 2, 2, ABGR, 1, 4, ARGB, 4)
  2406. TESTQPLANARTOE(H420Alpha, 2, 2, ARGB, 1, 4, ABGR, 4)
  2407. TESTQPLANARTOE(H420Alpha, 2, 2, ABGR, 1, 4, ARGB, 4)
  2408. TESTQPLANARTOE(U420Alpha, 2, 2, ARGB, 1, 4, ABGR, 4)
  2409. TESTQPLANARTOE(U420Alpha, 2, 2, ABGR, 1, 4, ARGB, 4)
  2410. #define TESTPLANETOEI(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, W1280, N, NEG, \
  2411. OFF, FMT_C, BPP_C) \
  2412. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##_##FMT_C##N) { \
  2413. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  2414. const int kHeight = benchmark_height_; \
  2415. const int kStrideA = SUBSAMPLE(kWidth, SUB_A) * BPP_A; \
  2416. const int kStrideB = SUBSAMPLE(kWidth, SUB_B) * BPP_B; \
  2417. align_buffer_page_end(src_argb_a, kStrideA* kHeight + OFF); \
  2418. align_buffer_page_end(dst_argb_b, kStrideB* kHeight + OFF); \
  2419. MemRandomize(src_argb_a + OFF, kStrideA * kHeight); \
  2420. memset(dst_argb_b + OFF, 1, kStrideB * kHeight); \
  2421. for (int i = 0; i < benchmark_iterations_; ++i) { \
  2422. FMT_A##To##FMT_B(src_argb_a + OFF, kStrideA, dst_argb_b + OFF, kStrideB, \
  2423. kWidth, NEG kHeight); \
  2424. } \
  2425. /* Convert to a 3rd format in 1 step and 2 steps and compare */ \
  2426. const int kStrideC = kWidth * BPP_C; \
  2427. align_buffer_page_end(dst_argb_c, kStrideC* kHeight + OFF); \
  2428. align_buffer_page_end(dst_argb_bc, kStrideC* kHeight + OFF); \
  2429. memset(dst_argb_c + OFF, 2, kStrideC * kHeight); \
  2430. memset(dst_argb_bc + OFF, 3, kStrideC * kHeight); \
  2431. FMT_A##To##FMT_C(src_argb_a + OFF, kStrideA, dst_argb_c + OFF, kStrideC, \
  2432. kWidth, NEG kHeight); \
  2433. /* Convert B to C */ \
  2434. FMT_B##To##FMT_C(dst_argb_b + OFF, kStrideB, dst_argb_bc + OFF, kStrideC, \
  2435. kWidth, kHeight); \
  2436. for (int i = 0; i < kStrideC * kHeight; i += 4) { \
  2437. EXPECT_EQ(dst_argb_c[i + OFF + 0], dst_argb_bc[i + OFF + 0]); \
  2438. EXPECT_EQ(dst_argb_c[i + OFF + 1], dst_argb_bc[i + OFF + 1]); \
  2439. EXPECT_EQ(dst_argb_c[i + OFF + 2], dst_argb_bc[i + OFF + 2]); \
  2440. EXPECT_NEAR(dst_argb_c[i + OFF + 3], dst_argb_bc[i + OFF + 3], 64); \
  2441. } \
  2442. free_aligned_buffer_page_end(src_argb_a); \
  2443. free_aligned_buffer_page_end(dst_argb_b); \
  2444. free_aligned_buffer_page_end(dst_argb_c); \
  2445. free_aligned_buffer_page_end(dst_argb_bc); \
  2446. }
  2447. #define TESTPLANETOE(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, FMT_C, BPP_C) \
  2448. TESTPLANETOEI(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, \
  2449. benchmark_width_ - 4, _Any, +, 0, FMT_C, BPP_C) \
  2450. TESTPLANETOEI(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, benchmark_width_, \
  2451. _Unaligned, +, 1, FMT_C, BPP_C) \
  2452. TESTPLANETOEI(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, benchmark_width_, \
  2453. _Invert, -, 0, FMT_C, BPP_C) \
  2454. TESTPLANETOEI(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, benchmark_width_, \
  2455. _Opt, +, 0, FMT_C, BPP_C)
  2456. // Caveat: Destination needs to be 4 bytes
  2457. #ifdef LITTLE_ENDIAN_ONLY_TEST
  2458. TESTPLANETOE(ARGB, 1, 4, AR30, 1, 4, ARGB, 4)
  2459. TESTPLANETOE(ABGR, 1, 4, AR30, 1, 4, ABGR, 4)
  2460. TESTPLANETOE(AR30, 1, 4, ARGB, 1, 4, ABGR, 4)
  2461. TESTPLANETOE(AR30, 1, 4, ABGR, 1, 4, ARGB, 4)
  2462. TESTPLANETOE(ARGB, 1, 4, AB30, 1, 4, ARGB, 4)
  2463. TESTPLANETOE(ABGR, 1, 4, AB30, 1, 4, ABGR, 4)
  2464. TESTPLANETOE(AB30, 1, 4, ARGB, 1, 4, ABGR, 4)
  2465. TESTPLANETOE(AB30, 1, 4, ABGR, 1, 4, ARGB, 4)
  2466. #endif
  2467. TEST_F(LibYUVConvertTest, RotateWithARGBSource) {
  2468. // 2x2 frames
  2469. uint32_t src[4];
  2470. uint32_t dst[4];
  2471. // some random input
  2472. src[0] = 0x11000000;
  2473. src[1] = 0x00450000;
  2474. src[2] = 0x00009f00;
  2475. src[3] = 0x000000ff;
  2476. // zeros on destination
  2477. dst[0] = 0x00000000;
  2478. dst[1] = 0x00000000;
  2479. dst[2] = 0x00000000;
  2480. dst[3] = 0x00000000;
  2481. int r = ConvertToARGB(reinterpret_cast<uint8_t*>(src),
  2482. 16, // input size
  2483. reinterpret_cast<uint8_t*>(dst),
  2484. 8, // destination stride
  2485. 0, // crop_x
  2486. 0, // crop_y
  2487. 2, // width
  2488. 2, // height
  2489. 2, // crop width
  2490. 2, // crop height
  2491. kRotate90, FOURCC_ARGB);
  2492. EXPECT_EQ(r, 0);
  2493. // 90 degrees rotation, no conversion
  2494. EXPECT_EQ(dst[0], src[2]);
  2495. EXPECT_EQ(dst[1], src[0]);
  2496. EXPECT_EQ(dst[2], src[3]);
  2497. EXPECT_EQ(dst[3], src[1]);
  2498. }
  2499. #ifdef HAS_ARGBTOAR30ROW_AVX2
  2500. TEST_F(LibYUVConvertTest, ARGBToAR30Row_Opt) {
  2501. // ARGBToAR30Row_AVX2 expects a multiple of 8 pixels.
  2502. const int kPixels = (benchmark_width_ * benchmark_height_ + 7) & ~7;
  2503. align_buffer_page_end(src, kPixels * 4);
  2504. align_buffer_page_end(dst_opt, kPixels * 4);
  2505. align_buffer_page_end(dst_c, kPixels * 4);
  2506. MemRandomize(src, kPixels * 4);
  2507. memset(dst_opt, 0, kPixels * 4);
  2508. memset(dst_c, 1, kPixels * 4);
  2509. ARGBToAR30Row_C(src, dst_c, kPixels);
  2510. int has_avx2 = TestCpuFlag(kCpuHasAVX2);
  2511. int has_ssse3 = TestCpuFlag(kCpuHasSSSE3);
  2512. for (int i = 0; i < benchmark_iterations_; ++i) {
  2513. if (has_avx2) {
  2514. ARGBToAR30Row_AVX2(src, dst_opt, kPixels);
  2515. } else if (has_ssse3) {
  2516. ARGBToAR30Row_SSSE3(src, dst_opt, kPixels);
  2517. } else {
  2518. ARGBToAR30Row_C(src, dst_opt, kPixels);
  2519. }
  2520. }
  2521. for (int i = 0; i < kPixels * 4; ++i) {
  2522. EXPECT_EQ(dst_opt[i], dst_c[i]);
  2523. }
  2524. free_aligned_buffer_page_end(src);
  2525. free_aligned_buffer_page_end(dst_opt);
  2526. free_aligned_buffer_page_end(dst_c);
  2527. }
  2528. #endif // HAS_ARGBTOAR30ROW_AVX2
  2529. #ifdef HAS_ABGRTOAR30ROW_AVX2
  2530. TEST_F(LibYUVConvertTest, ABGRToAR30Row_Opt) {
  2531. // ABGRToAR30Row_AVX2 expects a multiple of 8 pixels.
  2532. const int kPixels = (benchmark_width_ * benchmark_height_ + 7) & ~7;
  2533. align_buffer_page_end(src, kPixels * 4);
  2534. align_buffer_page_end(dst_opt, kPixels * 4);
  2535. align_buffer_page_end(dst_c, kPixels * 4);
  2536. MemRandomize(src, kPixels * 4);
  2537. memset(dst_opt, 0, kPixels * 4);
  2538. memset(dst_c, 1, kPixels * 4);
  2539. ABGRToAR30Row_C(src, dst_c, kPixels);
  2540. int has_avx2 = TestCpuFlag(kCpuHasAVX2);
  2541. int has_ssse3 = TestCpuFlag(kCpuHasSSSE3);
  2542. for (int i = 0; i < benchmark_iterations_; ++i) {
  2543. if (has_avx2) {
  2544. ABGRToAR30Row_AVX2(src, dst_opt, kPixels);
  2545. } else if (has_ssse3) {
  2546. ABGRToAR30Row_SSSE3(src, dst_opt, kPixels);
  2547. } else {
  2548. ABGRToAR30Row_C(src, dst_opt, kPixels);
  2549. }
  2550. }
  2551. for (int i = 0; i < kPixels * 4; ++i) {
  2552. EXPECT_EQ(dst_opt[i], dst_c[i]);
  2553. }
  2554. free_aligned_buffer_page_end(src);
  2555. free_aligned_buffer_page_end(dst_opt);
  2556. free_aligned_buffer_page_end(dst_c);
  2557. }
  2558. #endif // HAS_ABGRTOAR30ROW_AVX2
  2559. // TODO(fbarchard): Fix clamping issue affected by U channel.
  2560. #define TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, \
  2561. ALIGN, YALIGN, W1280, N, NEG, SOFF, DOFF) \
  2562. TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
  2563. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  2564. const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
  2565. const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
  2566. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  2567. const int kSizeUV = kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y); \
  2568. const int kBpc = 2; \
  2569. align_buffer_page_end(src_y, kWidth* kHeight* kBpc + SOFF); \
  2570. align_buffer_page_end(src_u, kSizeUV* kBpc + SOFF); \
  2571. align_buffer_page_end(src_v, kSizeUV* kBpc + SOFF); \
  2572. align_buffer_page_end(dst_argb_c, kStrideB* kHeight + DOFF); \
  2573. align_buffer_page_end(dst_argb_opt, kStrideB* kHeight + DOFF); \
  2574. for (int i = 0; i < kWidth * kHeight; ++i) { \
  2575. reinterpret_cast<uint16_t*>(src_y + SOFF)[i] = (fastrand() & 0x3ff); \
  2576. } \
  2577. for (int i = 0; i < kSizeUV; ++i) { \
  2578. reinterpret_cast<uint16_t*>(src_u + SOFF)[i] = (fastrand() & 0x3ff); \
  2579. reinterpret_cast<uint16_t*>(src_v + SOFF)[i] = (fastrand() & 0x3ff); \
  2580. } \
  2581. memset(dst_argb_c + DOFF, 1, kStrideB * kHeight); \
  2582. memset(dst_argb_opt + DOFF, 101, kStrideB * kHeight); \
  2583. MaskCpuFlags(disable_cpu_flags_); \
  2584. FMT_PLANAR##To##FMT_B( \
  2585. reinterpret_cast<uint16_t*>(src_y + SOFF), kWidth, \
  2586. reinterpret_cast<uint16_t*>(src_u + SOFF), kStrideUV, \
  2587. reinterpret_cast<uint16_t*>(src_v + SOFF), kStrideUV, \
  2588. dst_argb_c + DOFF, kStrideB, kWidth, NEG kHeight); \
  2589. MaskCpuFlags(benchmark_cpu_info_); \
  2590. for (int i = 0; i < benchmark_iterations_; ++i) { \
  2591. FMT_PLANAR##To##FMT_B( \
  2592. reinterpret_cast<uint16_t*>(src_y + SOFF), kWidth, \
  2593. reinterpret_cast<uint16_t*>(src_u + SOFF), kStrideUV, \
  2594. reinterpret_cast<uint16_t*>(src_v + SOFF), kStrideUV, \
  2595. dst_argb_opt + DOFF, kStrideB, kWidth, NEG kHeight); \
  2596. } \
  2597. for (int i = 0; i < kWidth * BPP_B * kHeight; ++i) { \
  2598. EXPECT_EQ(dst_argb_c[i + DOFF], dst_argb_opt[i + DOFF]); \
  2599. } \
  2600. free_aligned_buffer_page_end(src_y); \
  2601. free_aligned_buffer_page_end(src_u); \
  2602. free_aligned_buffer_page_end(src_v); \
  2603. free_aligned_buffer_page_end(dst_argb_c); \
  2604. free_aligned_buffer_page_end(dst_argb_opt); \
  2605. }
  2606. #define TESTPLANAR16TOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2607. YALIGN) \
  2608. TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2609. YALIGN, benchmark_width_ - 4, _Any, +, 0, 0) \
  2610. TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2611. YALIGN, benchmark_width_, _Unaligned, +, 1, 1) \
  2612. TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2613. YALIGN, benchmark_width_, _Invert, -, 0, 0) \
  2614. TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2615. YALIGN, benchmark_width_, _Opt, +, 0, 0)
  2616. TESTPLANAR16TOB(I010, 2, 2, ARGB, 4, 4, 1)
  2617. TESTPLANAR16TOB(I010, 2, 2, ABGR, 4, 4, 1)
  2618. TESTPLANAR16TOB(H010, 2, 2, ARGB, 4, 4, 1)
  2619. TESTPLANAR16TOB(H010, 2, 2, ABGR, 4, 4, 1)
  2620. TESTPLANAR16TOB(U010, 2, 2, ARGB, 4, 4, 1)
  2621. TESTPLANAR16TOB(U010, 2, 2, ABGR, 4, 4, 1)
  2622. TESTPLANAR16TOB(I210, 2, 1, ARGB, 4, 4, 1)
  2623. TESTPLANAR16TOB(I210, 2, 1, ABGR, 4, 4, 1)
  2624. TESTPLANAR16TOB(H210, 2, 1, ARGB, 4, 4, 1)
  2625. TESTPLANAR16TOB(H210, 2, 1, ABGR, 4, 4, 1)
  2626. TESTPLANAR16TOB(U210, 2, 1, ARGB, 4, 4, 1)
  2627. TESTPLANAR16TOB(U210, 2, 1, ABGR, 4, 4, 1)
  2628. #ifdef LITTLE_ENDIAN_ONLY_TEST
  2629. TESTPLANAR16TOB(I010, 2, 2, AR30, 4, 4, 1)
  2630. TESTPLANAR16TOB(I010, 2, 2, AB30, 4, 4, 1)
  2631. TESTPLANAR16TOB(H010, 2, 2, AR30, 4, 4, 1)
  2632. TESTPLANAR16TOB(H010, 2, 2, AB30, 4, 4, 1)
  2633. TESTPLANAR16TOB(U010, 2, 2, AR30, 4, 4, 1)
  2634. TESTPLANAR16TOB(U010, 2, 2, AB30, 4, 4, 1)
  2635. TESTPLANAR16TOB(I210, 2, 1, AR30, 4, 4, 1)
  2636. TESTPLANAR16TOB(I210, 2, 1, AB30, 4, 4, 1)
  2637. TESTPLANAR16TOB(H210, 2, 1, AR30, 4, 4, 1)
  2638. TESTPLANAR16TOB(H210, 2, 1, AB30, 4, 4, 1)
  2639. TESTPLANAR16TOB(U210, 2, 1, AR30, 4, 4, 1)
  2640. TESTPLANAR16TOB(U210, 2, 1, AB30, 4, 4, 1)
  2641. #endif
  2642. static int Clamp(int y) {
  2643. if (y < 0) {
  2644. y = 0;
  2645. }
  2646. if (y > 255) {
  2647. y = 255;
  2648. }
  2649. return y;
  2650. }
  2651. static int Clamp10(int y) {
  2652. if (y < 0) {
  2653. y = 0;
  2654. }
  2655. if (y > 1023) {
  2656. y = 1023;
  2657. }
  2658. return y;
  2659. }
  2660. // Test 8 bit YUV to 8 bit RGB
  2661. TEST_F(LibYUVConvertTest, TestH420ToARGB) {
  2662. const int kSize = 256;
  2663. int histogram_b[256];
  2664. int histogram_g[256];
  2665. int histogram_r[256];
  2666. memset(histogram_b, 0, sizeof(histogram_b));
  2667. memset(histogram_g, 0, sizeof(histogram_g));
  2668. memset(histogram_r, 0, sizeof(histogram_r));
  2669. align_buffer_page_end(orig_yuv, kSize + kSize / 2 * 2);
  2670. align_buffer_page_end(argb_pixels, kSize * 4);
  2671. uint8_t* orig_y = orig_yuv;
  2672. uint8_t* orig_u = orig_y + kSize;
  2673. uint8_t* orig_v = orig_u + kSize / 2;
  2674. // Test grey scale
  2675. for (int i = 0; i < kSize; ++i) {
  2676. orig_y[i] = i;
  2677. }
  2678. for (int i = 0; i < kSize / 2; ++i) {
  2679. orig_u[i] = 128; // 128 is 0.
  2680. orig_v[i] = 128;
  2681. }
  2682. H420ToARGB(orig_y, 0, orig_u, 0, orig_v, 0, argb_pixels, 0, kSize, 1);
  2683. for (int i = 0; i < kSize; ++i) {
  2684. int b = argb_pixels[i * 4 + 0];
  2685. int g = argb_pixels[i * 4 + 1];
  2686. int r = argb_pixels[i * 4 + 2];
  2687. int a = argb_pixels[i * 4 + 3];
  2688. ++histogram_b[b];
  2689. ++histogram_g[g];
  2690. ++histogram_r[r];
  2691. int expected_y = Clamp(static_cast<int>((i - 16) * 1.164f));
  2692. EXPECT_NEAR(b, expected_y, 1);
  2693. EXPECT_NEAR(g, expected_y, 1);
  2694. EXPECT_NEAR(r, expected_y, 1);
  2695. EXPECT_EQ(a, 255);
  2696. }
  2697. int count_b = 0;
  2698. int count_g = 0;
  2699. int count_r = 0;
  2700. for (int i = 0; i < kSize; ++i) {
  2701. if (histogram_b[i]) {
  2702. ++count_b;
  2703. }
  2704. if (histogram_g[i]) {
  2705. ++count_g;
  2706. }
  2707. if (histogram_r[i]) {
  2708. ++count_r;
  2709. }
  2710. }
  2711. printf("uniques: B %d, G, %d, R %d\n", count_b, count_g, count_r);
  2712. free_aligned_buffer_page_end(orig_yuv);
  2713. free_aligned_buffer_page_end(argb_pixels);
  2714. }
  2715. // Test 10 bit YUV to 8 bit RGB
  2716. TEST_F(LibYUVConvertTest, TestH010ToARGB) {
  2717. const int kSize = 1024;
  2718. int histogram_b[1024];
  2719. int histogram_g[1024];
  2720. int histogram_r[1024];
  2721. memset(histogram_b, 0, sizeof(histogram_b));
  2722. memset(histogram_g, 0, sizeof(histogram_g));
  2723. memset(histogram_r, 0, sizeof(histogram_r));
  2724. align_buffer_page_end(orig_yuv, kSize * 2 + kSize / 2 * 2 * 2);
  2725. align_buffer_page_end(argb_pixels, kSize * 4);
  2726. uint16_t* orig_y = reinterpret_cast<uint16_t*>(orig_yuv);
  2727. uint16_t* orig_u = orig_y + kSize;
  2728. uint16_t* orig_v = orig_u + kSize / 2;
  2729. // Test grey scale
  2730. for (int i = 0; i < kSize; ++i) {
  2731. orig_y[i] = i;
  2732. }
  2733. for (int i = 0; i < kSize / 2; ++i) {
  2734. orig_u[i] = 512; // 512 is 0.
  2735. orig_v[i] = 512;
  2736. }
  2737. H010ToARGB(orig_y, 0, orig_u, 0, orig_v, 0, argb_pixels, 0, kSize, 1);
  2738. for (int i = 0; i < kSize; ++i) {
  2739. int b = argb_pixels[i * 4 + 0];
  2740. int g = argb_pixels[i * 4 + 1];
  2741. int r = argb_pixels[i * 4 + 2];
  2742. int a = argb_pixels[i * 4 + 3];
  2743. ++histogram_b[b];
  2744. ++histogram_g[g];
  2745. ++histogram_r[r];
  2746. int expected_y = Clamp(static_cast<int>((i - 64) * 1.164f / 4));
  2747. EXPECT_NEAR(b, expected_y, 1);
  2748. EXPECT_NEAR(g, expected_y, 1);
  2749. EXPECT_NEAR(r, expected_y, 1);
  2750. EXPECT_EQ(a, 255);
  2751. }
  2752. int count_b = 0;
  2753. int count_g = 0;
  2754. int count_r = 0;
  2755. for (int i = 0; i < kSize; ++i) {
  2756. if (histogram_b[i]) {
  2757. ++count_b;
  2758. }
  2759. if (histogram_g[i]) {
  2760. ++count_g;
  2761. }
  2762. if (histogram_r[i]) {
  2763. ++count_r;
  2764. }
  2765. }
  2766. printf("uniques: B %d, G, %d, R %d\n", count_b, count_g, count_r);
  2767. free_aligned_buffer_page_end(orig_yuv);
  2768. free_aligned_buffer_page_end(argb_pixels);
  2769. }
  2770. // Test 10 bit YUV to 10 bit RGB
  2771. // Caveat: Result is near due to float rounding in expected
  2772. // result.
  2773. TEST_F(LibYUVConvertTest, TestH010ToAR30) {
  2774. const int kSize = 1024;
  2775. int histogram_b[1024];
  2776. int histogram_g[1024];
  2777. int histogram_r[1024];
  2778. memset(histogram_b, 0, sizeof(histogram_b));
  2779. memset(histogram_g, 0, sizeof(histogram_g));
  2780. memset(histogram_r, 0, sizeof(histogram_r));
  2781. align_buffer_page_end(orig_yuv, kSize * 2 + kSize / 2 * 2 * 2);
  2782. align_buffer_page_end(ar30_pixels, kSize * 4);
  2783. uint16_t* orig_y = reinterpret_cast<uint16_t*>(orig_yuv);
  2784. uint16_t* orig_u = orig_y + kSize;
  2785. uint16_t* orig_v = orig_u + kSize / 2;
  2786. // Test grey scale
  2787. for (int i = 0; i < kSize; ++i) {
  2788. orig_y[i] = i;
  2789. }
  2790. for (int i = 0; i < kSize / 2; ++i) {
  2791. orig_u[i] = 512; // 512 is 0.
  2792. orig_v[i] = 512;
  2793. }
  2794. H010ToAR30(orig_y, 0, orig_u, 0, orig_v, 0, ar30_pixels, 0, kSize, 1);
  2795. for (int i = 0; i < kSize; ++i) {
  2796. int b10 = reinterpret_cast<uint32_t*>(ar30_pixels)[i] & 1023;
  2797. int g10 = (reinterpret_cast<uint32_t*>(ar30_pixels)[i] >> 10) & 1023;
  2798. int r10 = (reinterpret_cast<uint32_t*>(ar30_pixels)[i] >> 20) & 1023;
  2799. int a2 = (reinterpret_cast<uint32_t*>(ar30_pixels)[i] >> 30) & 3;
  2800. ++histogram_b[b10];
  2801. ++histogram_g[g10];
  2802. ++histogram_r[r10];
  2803. int expected_y = Clamp10(static_cast<int>((i - 64) * 1.164f));
  2804. EXPECT_NEAR(b10, expected_y, 4);
  2805. EXPECT_NEAR(g10, expected_y, 4);
  2806. EXPECT_NEAR(r10, expected_y, 4);
  2807. EXPECT_EQ(a2, 3);
  2808. }
  2809. int count_b = 0;
  2810. int count_g = 0;
  2811. int count_r = 0;
  2812. for (int i = 0; i < kSize; ++i) {
  2813. if (histogram_b[i]) {
  2814. ++count_b;
  2815. }
  2816. if (histogram_g[i]) {
  2817. ++count_g;
  2818. }
  2819. if (histogram_r[i]) {
  2820. ++count_r;
  2821. }
  2822. }
  2823. printf("uniques: B %d, G, %d, R %d\n", count_b, count_g, count_r);
  2824. free_aligned_buffer_page_end(orig_yuv);
  2825. free_aligned_buffer_page_end(ar30_pixels);
  2826. }
  2827. // Test 10 bit YUV to 10 bit RGB
  2828. // Caveat: Result is near due to float rounding in expected
  2829. // result.
  2830. TEST_F(LibYUVConvertTest, TestH010ToAB30) {
  2831. const int kSize = 1024;
  2832. int histogram_b[1024];
  2833. int histogram_g[1024];
  2834. int histogram_r[1024];
  2835. memset(histogram_b, 0, sizeof(histogram_b));
  2836. memset(histogram_g, 0, sizeof(histogram_g));
  2837. memset(histogram_r, 0, sizeof(histogram_r));
  2838. align_buffer_page_end(orig_yuv, kSize * 2 + kSize / 2 * 2 * 2);
  2839. align_buffer_page_end(ab30_pixels, kSize * 4);
  2840. uint16_t* orig_y = reinterpret_cast<uint16_t*>(orig_yuv);
  2841. uint16_t* orig_u = orig_y + kSize;
  2842. uint16_t* orig_v = orig_u + kSize / 2;
  2843. // Test grey scale
  2844. for (int i = 0; i < kSize; ++i) {
  2845. orig_y[i] = i;
  2846. }
  2847. for (int i = 0; i < kSize / 2; ++i) {
  2848. orig_u[i] = 512; // 512 is 0.
  2849. orig_v[i] = 512;
  2850. }
  2851. H010ToAB30(orig_y, 0, orig_u, 0, orig_v, 0, ab30_pixels, 0, kSize, 1);
  2852. for (int i = 0; i < kSize; ++i) {
  2853. int r10 = reinterpret_cast<uint32_t*>(ab30_pixels)[i] & 1023;
  2854. int g10 = (reinterpret_cast<uint32_t*>(ab30_pixels)[i] >> 10) & 1023;
  2855. int b10 = (reinterpret_cast<uint32_t*>(ab30_pixels)[i] >> 20) & 1023;
  2856. int a2 = (reinterpret_cast<uint32_t*>(ab30_pixels)[i] >> 30) & 3;
  2857. ++histogram_b[b10];
  2858. ++histogram_g[g10];
  2859. ++histogram_r[r10];
  2860. int expected_y = Clamp10(static_cast<int>((i - 64) * 1.164f));
  2861. EXPECT_NEAR(b10, expected_y, 4);
  2862. EXPECT_NEAR(g10, expected_y, 4);
  2863. EXPECT_NEAR(r10, expected_y, 4);
  2864. EXPECT_EQ(a2, 3);
  2865. }
  2866. int count_b = 0;
  2867. int count_g = 0;
  2868. int count_r = 0;
  2869. for (int i = 0; i < kSize; ++i) {
  2870. if (histogram_b[i]) {
  2871. ++count_b;
  2872. }
  2873. if (histogram_g[i]) {
  2874. ++count_g;
  2875. }
  2876. if (histogram_r[i]) {
  2877. ++count_r;
  2878. }
  2879. }
  2880. printf("uniques: B %d, G, %d, R %d\n", count_b, count_g, count_r);
  2881. free_aligned_buffer_page_end(orig_yuv);
  2882. free_aligned_buffer_page_end(ab30_pixels);
  2883. }
  2884. // Test 8 bit YUV to 10 bit RGB
  2885. TEST_F(LibYUVConvertTest, TestH420ToAR30) {
  2886. const int kSize = 256;
  2887. const int kHistSize = 1024;
  2888. int histogram_b[kHistSize];
  2889. int histogram_g[kHistSize];
  2890. int histogram_r[kHistSize];
  2891. memset(histogram_b, 0, sizeof(histogram_b));
  2892. memset(histogram_g, 0, sizeof(histogram_g));
  2893. memset(histogram_r, 0, sizeof(histogram_r));
  2894. align_buffer_page_end(orig_yuv, kSize + kSize / 2 * 2);
  2895. align_buffer_page_end(ar30_pixels, kSize * 4);
  2896. uint8_t* orig_y = orig_yuv;
  2897. uint8_t* orig_u = orig_y + kSize;
  2898. uint8_t* orig_v = orig_u + kSize / 2;
  2899. // Test grey scale
  2900. for (int i = 0; i < kSize; ++i) {
  2901. orig_y[i] = i;
  2902. }
  2903. for (int i = 0; i < kSize / 2; ++i) {
  2904. orig_u[i] = 128; // 128 is 0.
  2905. orig_v[i] = 128;
  2906. }
  2907. H420ToAR30(orig_y, 0, orig_u, 0, orig_v, 0, ar30_pixels, 0, kSize, 1);
  2908. for (int i = 0; i < kSize; ++i) {
  2909. int b10 = reinterpret_cast<uint32_t*>(ar30_pixels)[i] & 1023;
  2910. int g10 = (reinterpret_cast<uint32_t*>(ar30_pixels)[i] >> 10) & 1023;
  2911. int r10 = (reinterpret_cast<uint32_t*>(ar30_pixels)[i] >> 20) & 1023;
  2912. int a2 = (reinterpret_cast<uint32_t*>(ar30_pixels)[i] >> 30) & 3;
  2913. ++histogram_b[b10];
  2914. ++histogram_g[g10];
  2915. ++histogram_r[r10];
  2916. int expected_y = Clamp10(static_cast<int>((i - 16) * 1.164f * 4.f));
  2917. EXPECT_NEAR(b10, expected_y, 4);
  2918. EXPECT_NEAR(g10, expected_y, 4);
  2919. EXPECT_NEAR(r10, expected_y, 4);
  2920. EXPECT_EQ(a2, 3);
  2921. }
  2922. int count_b = 0;
  2923. int count_g = 0;
  2924. int count_r = 0;
  2925. for (int i = 0; i < kHistSize; ++i) {
  2926. if (histogram_b[i]) {
  2927. ++count_b;
  2928. }
  2929. if (histogram_g[i]) {
  2930. ++count_g;
  2931. }
  2932. if (histogram_r[i]) {
  2933. ++count_r;
  2934. }
  2935. }
  2936. printf("uniques: B %d, G, %d, R %d\n", count_b, count_g, count_r);
  2937. free_aligned_buffer_page_end(orig_yuv);
  2938. free_aligned_buffer_page_end(ar30_pixels);
  2939. }
  2940. // Test RGB24 to ARGB and back to RGB24
  2941. TEST_F(LibYUVConvertTest, TestARGBToRGB24) {
  2942. const int kSize = 256;
  2943. align_buffer_page_end(orig_rgb24, kSize * 3);
  2944. align_buffer_page_end(argb_pixels, kSize * 4);
  2945. align_buffer_page_end(dest_rgb24, kSize * 3);
  2946. // Test grey scale
  2947. for (int i = 0; i < kSize * 3; ++i) {
  2948. orig_rgb24[i] = i;
  2949. }
  2950. RGB24ToARGB(orig_rgb24, 0, argb_pixels, 0, kSize, 1);
  2951. ARGBToRGB24(argb_pixels, 0, dest_rgb24, 0, kSize, 1);
  2952. for (int i = 0; i < kSize * 3; ++i) {
  2953. EXPECT_EQ(orig_rgb24[i], dest_rgb24[i]);
  2954. }
  2955. free_aligned_buffer_page_end(orig_rgb24);
  2956. free_aligned_buffer_page_end(argb_pixels);
  2957. free_aligned_buffer_page_end(dest_rgb24);
  2958. }
  2959. // Test I400 with jpeg matrix is same as J400
  2960. TEST_F(LibYUVConvertTest, TestI400) {
  2961. const int kSize = 256;
  2962. align_buffer_page_end(orig_i400, kSize);
  2963. align_buffer_page_end(argb_pixels_i400, kSize * 4);
  2964. align_buffer_page_end(argb_pixels_j400, kSize * 4);
  2965. align_buffer_page_end(argb_pixels_jpeg_i400, kSize * 4);
  2966. align_buffer_page_end(argb_pixels_h709_i400, kSize * 4);
  2967. align_buffer_page_end(argb_pixels_2020_i400, kSize * 4);
  2968. // Test grey scale
  2969. for (int i = 0; i < kSize; ++i) {
  2970. orig_i400[i] = i;
  2971. }
  2972. J400ToARGB(orig_i400, 0, argb_pixels_j400, 0, kSize, 1);
  2973. I400ToARGB(orig_i400, 0, argb_pixels_i400, 0, kSize, 1);
  2974. I400ToARGBMatrix(orig_i400, 0, argb_pixels_jpeg_i400, 0, &kYuvJPEGConstants,
  2975. kSize, 1);
  2976. I400ToARGBMatrix(orig_i400, 0, argb_pixels_h709_i400, 0, &kYuvH709Constants,
  2977. kSize, 1);
  2978. I400ToARGBMatrix(orig_i400, 0, argb_pixels_2020_i400, 0, &kYuv2020Constants,
  2979. kSize, 1);
  2980. EXPECT_EQ(0, argb_pixels_i400[0]);
  2981. EXPECT_EQ(0, argb_pixels_j400[0]);
  2982. EXPECT_EQ(0, argb_pixels_jpeg_i400[0]);
  2983. EXPECT_EQ(0, argb_pixels_h709_i400[0]);
  2984. EXPECT_EQ(0, argb_pixels_2020_i400[0]);
  2985. EXPECT_EQ(0, argb_pixels_i400[16 * 4]);
  2986. EXPECT_EQ(16, argb_pixels_j400[16 * 4]);
  2987. EXPECT_EQ(16, argb_pixels_jpeg_i400[16 * 4]);
  2988. EXPECT_EQ(0, argb_pixels_h709_i400[16 * 4]);
  2989. EXPECT_EQ(0, argb_pixels_2020_i400[16 * 4]);
  2990. EXPECT_EQ(130, argb_pixels_i400[128 * 4]);
  2991. EXPECT_EQ(128, argb_pixels_j400[128 * 4]);
  2992. EXPECT_EQ(128, argb_pixels_jpeg_i400[128 * 4]);
  2993. EXPECT_EQ(130, argb_pixels_h709_i400[128 * 4]);
  2994. EXPECT_EQ(130, argb_pixels_2020_i400[128 * 4]);
  2995. EXPECT_EQ(255, argb_pixels_i400[255 * 4]);
  2996. EXPECT_EQ(255, argb_pixels_j400[255 * 4]);
  2997. EXPECT_EQ(255, argb_pixels_jpeg_i400[255 * 4]);
  2998. EXPECT_EQ(255, argb_pixels_h709_i400[255 * 4]);
  2999. EXPECT_EQ(255, argb_pixels_2020_i400[255 * 4]);
  3000. for (int i = 0; i < kSize * 4; ++i) {
  3001. if ((i & 3) == 3) {
  3002. EXPECT_EQ(255, argb_pixels_j400[i]);
  3003. } else {
  3004. EXPECT_EQ(i / 4, argb_pixels_j400[i]);
  3005. }
  3006. EXPECT_EQ(argb_pixels_jpeg_i400[i], argb_pixels_j400[i]);
  3007. }
  3008. free_aligned_buffer_page_end(orig_i400);
  3009. free_aligned_buffer_page_end(argb_pixels_i400);
  3010. free_aligned_buffer_page_end(argb_pixels_j400);
  3011. free_aligned_buffer_page_end(argb_pixels_jpeg_i400);
  3012. free_aligned_buffer_page_end(argb_pixels_h709_i400);
  3013. free_aligned_buffer_page_end(argb_pixels_2020_i400);
  3014. }
  3015. } // namespace libyuv