diff --git a/SharedContent/models/DenseNet121_fp16.onnx b/SharedContent/models/DenseNet121_fp16.onnx new file mode 100644 index 00000000..6411d7f1 Binary files /dev/null and b/SharedContent/models/DenseNet121_fp16.onnx differ diff --git a/SharedContent/models/DenseNet121_fp32.onnx b/SharedContent/models/DenseNet121_fp32.onnx new file mode 100644 index 00000000..dec88535 Binary files /dev/null and b/SharedContent/models/DenseNet121_fp32.onnx differ diff --git a/Testing/WinMLRunnerTest/OutputTensorData/DenseNet121_fp16_kitten_224_input_CPU.csv b/Testing/WinMLRunnerTest/OutputTensorData/DenseNet121_fp16_kitten_224_input_CPU.csv new file mode 100644 index 00000000..16109251 --- /dev/null +++ b/Testing/WinMLRunnerTest/OutputTensorData/DenseNet121_fp16_kitten_224_input_CPU.csv @@ -0,0 +1,1001 @@ +Index,Value +0,-1.91699 +1,-0.90918 +2,-0.0957031 +3,-0.886719 +4,-1.21875 +5,0.0318298 +6,-2.37109 +7,-1.63477 +8,-2.58984 +9,-2.0957 +10,-1.04492 +11,-3.46875 +12,-1.69531 +13,-1.2041 +14,-2.87305 +15,-0.90625 +16,-1.98242 +17,-2.8418 +18,-2.0957 +19,-4.15234 +20,-0.740723 +21,-0.580566 +22,0.0559082 +23,-2.56055 +24,-0.73877 +25,-3.17773 +26,-1.69531 +27,-0.966797 +28,-2.42969 +29,-0.354736 +30,-1.55078 +31,-2.88477 +32,-1.56641 +33,-3.41797 +34,-1.69434 +35,-1.82227 +36,-0.108276 +37,-2.3125 +38,0.194946 +39,-1.64746 +40,-2.86719 +41,-1.23633 +42,-2.54102 +43,1.14453 +44,-0.117737 +45,-0.779297 +46,-1.4668 +47,-1.33301 +48,-1.21777 +49,-0.118286 +50,0.679688 +51,0.0326538 +52,2.16797 +53,-1.05176 +54,0.754395 +55,-3.02148 +56,-0.225098 +57,-1.58789 +58,-0.72168 +59,-1.10449 +60,1.27344 +61,-0.361572 +62,0.630859 +63,0.809082 +64,-2.29883 +65,-1.39746 +66,1.44238 +67,3.08984 +68,0.384033 +69,-0.0054245 +70,-3.00195 +71,-1.53711 +72,-4.35938 +73,-1.68066 +74,-4.30469 +75,-2.18359 +76,-1.66504 +77,-3.13281 +78,0.624512 +79,0.171875 +80,-1.35254 +81,-1.98828 +82,-0.943359 +83,-1.29883 +84,-1.62891 +85,-3.28125 +86,-2.53711 +87,-1.32422 +88,-2.74805 +89,-3.50977 +90,-1.69629 +91,-3.24219 +92,-4.69922 +93,-6.24219 +94,-1.37891 +95,-2.30664 +96,-3.93555 +97,-2.33789 +98,-2.09766 +99,-2.07227 +100,-0.307129 +101,-2.79297 +102,-0.238403 +103,1.00781 +104,1.15625 +105,0.230957 +106,1.12695 +107,-1.70996 +108,-1.00098 +109,-1.38574 +110,-0.495361 +111,1.74316 +112,0.843262 +113,0.60791 +114,1.36328 +115,-1.80664 +116,0.168457 +117,2.10156 +118,-1.04688 +119,-2.78711 +120,-3.57031 +121,-0.888672 +122,-0.399658 +123,-1.72168 +124,0.128052 +125,-1.44043 +126,-0.178345 +127,-3.33789 +128,-2.21289 +129,-2.22461 +130,-3.80664 +131,-4.11719 +132,-2.71094 +133,-0.523438 +134,-3.31445 +135,-3.27344 +136,-2.28711 +137,-3.2793 +138,-1.77832 +139,-0.479004 +140,-2.59375 +141,0.103455 +142,-2.47266 +143,-2.85352 +144,-2.35156 +145,-2.60547 +146,-1.59668 +147,1.2666 +148,-0.312012 +149,-2.99219 +150,0.41333 +151,2.79883 +152,0.381104 +153,-1.28516 +154,-0.665527 +155,-0.131836 +156,-1.58594 +157,-0.382568 +158,0.00324631 +159,-1.04883 +160,-1.30762 +161,-0.395752 +162,0.114014 +163,0.803223 +164,-0.774414 +165,-1.27832 +166,-1.8916 +167,-2.27734 +168,0.55957 +169,-1.05762 +170,-1.53418 +171,0.188477 +172,0.878418 +173,2.54297 +174,0.70752 +175,-1.18164 +176,-0.737305 +177,-1.41406 +178,0.44043 +179,1.40723 +180,0.655273 +181,-1.70508 +182,-1.56641 +183,-1.16113 +184,-1.12305 +185,0.974121 +186,1.98438 +187,1.44531 +188,0.672363 +189,-0.474854 +190,-0.452393 +191,-0.510254 +192,-2.58398 +193,2.54102 +194,-1.94238 +195,3.44531 +196,1.90234 +197,-0.937988 +198,0.143799 +199,1.25098 +200,1.43945 +201,-1.06152 +202,-1.62012 +203,-0.246704 +204,0.166504 +205,-0.609863 +206,-0.171265 +207,-0.262695 +208,0.914551 +209,0.341309 +210,-0.0346375 +211,0.897461 +212,-1.51465 +213,-1.26855 +214,-1.64746 +215,-0.525391 +216,0.101135 +217,-0.455811 +218,-1.3623 +219,-0.378906 +220,0.348145 +221,-1.69922 +222,-0.879883 +223,0.304688 +224,-0.533203 +225,-0.504883 +226,-1.36133 +227,3.22266 +228,-1.03027 +229,1.89258 +230,-0.389893 +231,0.944824 +232,2.45313 +233,0.150757 +234,-0.154419 +235,2.57422 +236,0.808594 +237,0.431885 +238,-0.237061 +239,0.76416 +240,1.47168 +241,-0.663086 +242,0.320801 +243,0.591309 +244,-0.724121 +245,2.19336 +246,1.20605 +247,-0.270508 +248,3.62305 +249,1.82813 +250,3.30078 +251,0.220703 +252,-0.582031 +253,2.35547 +254,0.109375 +255,-2.95508 +256,-0.581543 +257,-1.13184 +258,-1.40918 +259,-0.92334 +260,-1.36719 +261,-2.8418 +262,0.751465 +263,2.2832 +264,3.72656 +265,-0.165771 +266,-1.22559 +267,-1.36816 +268,-0.953613 +269,0.229492 +270,-0.65332 +271,-0.757324 +272,0.734375 +273,1.06738 +274,0.627441 +275,-2.16992 +276,-1.57227 +277,-0.144775 +278,-1.55859 +279,-0.0563049 +280,1.00098 +281,13.1328 +282,9.79688 +283,6.875 +284,4.57422 +285,9.73438 +286,1.27051 +287,4.26563 +288,2.16992 +289,0.719238 +290,0.355713 +291,-2.22852 +292,3.375 +293,-1.62598 +294,-0.586914 +295,-0.432373 +296,-2.59961 +297,-2.32031 +298,-1.06348 +299,-0.46875 +300,-2.50586 +301,-1.13867 +302,-1.10742 +303,-0.944336 +304,-3.5625 +305,-3.86133 +306,-0.0374756 +307,-1.75098 +308,-3.61914 +309,-1.58496 +310,1.11035 +311,-2.25586 +312,-1.59961 +313,-2.3125 +314,-0.428223 +315,-2.41992 +316,-0.606934 +317,-2.85156 +318,-0.947266 +319,-2.99609 +320,-4.31641 +321,-3.21484 +322,-2.03516 +323,-4.23828 +324,-2.83203 +325,-3.80273 +326,-2.32422 +327,0.684082 +328,0.44043 +329,0.836426 +330,2.67383 +331,2.16211 +332,2.01172 +333,1.87988 +334,-1.42578 +335,0.204346 +336,-4.125 +337,0.569336 +338,0.866211 +339,-3.33789 +340,-0.970215 +341,-1.94824 +342,-3 +343,-4.38672 +344,-2.36914 +345,-3.69141 +346,-3.48242 +347,-5.27344 +348,-4.40234 +349,-4.60547 +350,-2.96875 +351,-3.81445 +352,-3.02148 +353,-3.04492 +354,-4.99609 +355,-2.85352 +356,1.72852 +357,1.75293 +358,0.921387 +359,1.46094 +360,2.36328 +361,0.121338 +362,-0.699707 +363,-0.0753174 +364,-2.48438 +365,-4.79297 +366,-6.07813 +367,-4.26172 +368,-3.83789 +369,-3.26172 +370,-2.68359 +371,-2.64648 +372,-3.73828 +373,-1.81641 +374,-4.67188 +375,-5.4375 +376,-4.16797 +377,-0.436523 +378,-1.44531 +379,-4.35156 +380,-0.496582 +381,-4.83594 +382,-2.08008 +383,-2.2793 +384,-2.97266 +385,-2.5 +386,-3.53906 +387,-0.942383 +388,-4.08594 +389,0.643066 +390,-1.11328 +391,-0.112122 +392,-2.06641 +393,-3.30859 +394,0.171631 +395,0.0499268 +396,-3.29297 +397,-2.05859 +398,-0.0167389 +399,0.386719 +400,2.44727 +401,0.115173 +402,0.788086 +403,-3.47461 +404,-2.71094 +405,-1.36719 +406,-0.440918 +407,-1.48438 +408,0.176392 +409,2.86133 +410,-1.60449 +411,0.643555 +412,4.64844 +413,-1.64551 +414,3.20898 +415,-1.19727 +416,2.48438 +417,0.967285 +418,1.36133 +419,2.50195 +420,0.408936 +421,-1.12012 +422,1.65137 +423,1.60156 +424,-1.12988 +425,-3.25391 +426,0.409912 +427,1.68652 +428,4.55859 +429,-0.72168 +430,-1.87402 +431,3.7207 +432,-0.423096 +433,0.599609 +434,2.99023 +435,4.27344 +436,-2.0957 +437,-1.57324 +438,2.13672 +439,-0.250977 +440,1.98242 +441,1.36035 +442,-0.993652 +443,2.34961 +444,-0.811523 +445,-1.31738 +446,0.76123 +447,-0.0315247 +448,0.796875 +449,-0.930176 +450,-3.04297 +451,0.0389709 +452,2.17773 +453,3.35547 +454,1.19434 +455,1.70215 +456,0.594727 +457,3.99023 +458,-1.00391 +459,1.83887 +460,0.300293 +461,-0.228149 +462,3.43359 +463,4.80469 +464,1.4707 +465,0.698242 +466,-1.85645 +467,-1.44727 +468,-1.84961 +469,3.21289 +470,1.33594 +471,-2.18164 +472,-0.902344 +473,0.450439 +474,0.946777 +475,0.851563 +476,-3.15039 +477,2.99414 +478,5.19141 +479,1.48242 +480,-0.960449 +481,0.743164 +482,-1.35938 +483,-0.837891 +484,-1.72363 +485,-0.118164 +486,0.278564 +487,1.79883 +488,-0.0309906 +489,-0.765137 +490,-1.83008 +491,-0.927246 +492,0.0228577 +493,-0.041748 +494,-0.322266 +495,-0.464844 +496,4.21094 +497,-2.36328 +498,-1.77148 +499,1.53711 +500,-2.80273 +501,2.44141 +502,0.740234 +503,1.45898 +504,2.52148 +505,0.891113 +506,0.757813 +507,0.776367 +508,6.00391 +509,-0.669922 +510,-1.5752 +511,-3.34766 +512,1.6123 +513,-1.02637 +514,2.16797 +515,4.13672 +516,2.6582 +517,-1.2168 +518,3.6875 +519,2.27734 +520,0.164673 +521,2.68359 +522,1.84961 +523,1.34961 +524,-0.575684 +525,-2.79297 +526,2.3418 +527,2.07031 +528,0.307129 +529,2.18359 +530,1.69238 +531,0.675293 +532,0.288086 +533,-0.462891 +534,2.37109 +535,0.839844 +536,-0.387451 +537,-0.155273 +538,-3.36133 +539,5.00781 +540,-2.84766 +541,1.87891 +542,0.89502 +543,3.67773 +544,0.820313 +545,1.19434 +546,1.03809 +547,-2.13281 +548,-1.77441 +549,1.43457 +550,-0.503418 +551,2.25 +552,3.58594 +553,3.25586 +554,-2.4082 +555,-2.59766 +556,-0.419678 +557,-0.0878296 +558,-1.2334 +559,2.05078 +560,-3.53125 +561,-1.44727 +562,-1.01953 +563,-0.111755 +564,-1.18652 +565,-0.252197 +566,-0.69043 +567,3.55469 +568,3.28906 +569,-0.848145 +570,1.79297 +571,-3.65625 +572,2.12891 +573,-2.4707 +574,1.83984 +575,-2.13477 +576,-0.861328 +577,1.21875 +578,3.05859 +579,0.160767 +580,-0.711426 +581,-2.44336 +582,-0.230347 +583,0.24585 +584,1.6084 +585,1.91309 +586,-1.06152 +587,3.08008 +588,6.86328 +589,1.92578 +590,1.62012 +591,-0.0619202 +592,-0.123962 +593,0.519043 +594,0.147827 +595,-1.7627 +596,1.83105 +597,-0.459229 +598,-2.21484 +599,1.37695 +600,1.87305 +601,2.17578 +602,0.637207 +603,-3.82227 +604,0.765137 +605,1.98438 +606,2.61328 +607,-0.526367 +608,2.66016 +609,-1.79102 +610,1.0918 +611,2.65234 +612,-0.987793 +613,1.4502 +614,0.597656 +615,1.82324 +616,-0.697266 +617,1.96582 +618,2.39453 +619,1.36328 +620,2.65039 +621,-0.276855 +622,4.49219 +623,2.06836 +624,0.669434 +625,-2.12891 +626,0.687012 +627,-0.747559 +628,-2.06836 +629,1.24707 +630,0.545898 +631,2.42188 +632,0.80957 +633,2.14844 +634,-2.45703 +635,1.73047 +636,3.43359 +637,-0.731445 +638,1.03516 +639,0.246704 +640,-0.468506 +641,2.39844 +642,-1.50098 +643,1.75586 +644,2.26367 +645,-0.848633 +646,-0.741699 +647,0.675781 +648,1.35059 +649,-2.07227 +650,0.765137 +651,1.44922 +652,0.244629 +653,-0.467529 +654,-0.649414 +655,1.27539 +656,-0.406006 +657,-2.66797 +658,3.00391 +659,3.99414 +660,-0.416992 +661,-1.32324 +662,0.312744 +663,-2.63672 +664,1.95508 +665,1.7666 +666,2.0957 +667,2.87305 +668,-3.6543 +669,-1.58496 +670,1.97656 +671,-1.41895 +672,-2.33008 +673,6.07813 +674,1.96973 +675,-0.894043 +676,2.81836 +677,1.7998 +678,1.93359 +679,-0.0203247 +680,3.74805 +681,3.74609 +682,-0.559082 +683,-0.883301 +684,0.807617 +685,-1.51563 +686,1.22266 +687,-3.29492 +688,0.313965 +689,1.30371 +690,-3.66602 +691,1.28906 +692,2.58008 +693,0.633301 +694,-1.87109 +695,-0.448242 +696,2.11133 +697,1.35645 +698,-1.39453 +699,1.29395 +700,4.47266 +701,-0.412598 +702,-1.33887 +703,-0.421143 +704,0.373291 +705,-1.25684 +706,-0.946777 +707,-0.361084 +708,2.80469 +709,0.958984 +710,0.897461 +711,1.14551 +712,-0.286377 +713,3.30469 +714,3.66602 +715,-1.12402 +716,1.02051 +717,-1.31934 +718,-1.68652 +719,0.318359 +720,2.31445 +721,1.62793 +722,5.25391 +723,1.82227 +724,-1.76953 +725,2.66992 +726,-0.136719 +727,-3.23047 +728,6.78125 +729,0.658203 +730,-1.8457 +731,3.23047 +732,-0.150513 +733,0.931152 +734,-2.69141 +735,1.05371 +736,1.03125 +737,0.626953 +738,1.52734 +739,0.822266 +740,0.63916 +741,0.776367 +742,5.4375 +743,-0.689941 +744,-1.16602 +745,0.260986 +746,2.31641 +747,3.35547 +748,1.37793 +749,-0.303223 +750,3.16602 +751,-2.24219 +752,-0.926758 +753,3.6543 +754,1.74414 +755,-0.951172 +756,0.566406 +757,-0.00231552 +758,0.97168 +759,1.47852 +760,2.80859 +761,7.07031 +762,-1.06055 +763,-0.120728 +764,-0.662598 +765,1.81348 +766,-0.70166 +767,1.35156 +768,1.21484 +769,2.58203 +770,1.92773 +771,-0.927734 +772,1.75488 +773,-0.144897 +774,0.387451 +775,1.25879 +776,-0.249268 +777,0.915039 +778,2.20703 +779,-1.9209 +780,-0.812012 +781,-2.70313 +782,3.27539 +783,1.56445 +784,2.42383 +785,1.38281 +786,-0.536133 +787,0.970703 +788,-0.279541 +789,2.30469 +790,3.41992 +791,0.711914 +792,3.48438 +793,4.76953 +794,2.22461 +795,0.717773 +796,2.69531 +797,5.46094 +798,-1.18652 +799,1.62207 +800,-0.813965 +801,-0.513184 +802,0.229736 +803,0.887207 +804,2.64844 +805,1.34277 +806,3.88086 +807,0.794922 +808,1.67773 +809,2.43555 +810,2.63477 +811,4.91797 +812,-3.82031 +813,2.86133 +814,0.770996 +815,-1.66211 +816,-0.129639 +817,-2.26758 +818,4.00781 +819,-0.225708 +820,-2.97266 +821,-3.21094 +822,-0.0430603 +823,2.24023 +824,4.26172 +825,-0.069458 +826,0.9375 +827,1.66895 +828,2.16016 +829,-2.91797 +830,0.286621 +831,2.72852 +832,-3.0293 +833,-2.79883 +834,1.72168 +835,-0.970215 +836,1.49219 +837,1.83398 +838,3.5957 +839,-3.25586 +840,2.80469 +841,2.90625 +842,2.20703 +843,-0.837891 +844,2.54883 +845,2.6582 +846,1.72656 +847,-1.51172 +848,-1.19336 +849,-0.419189 +850,1.51953 +851,3.24219 +852,4.34375 +853,-0.374268 +854,0.949707 +855,2.99414 +856,-2.82422 +857,-1.31641 +858,-1.16895 +859,0.299072 +860,0.521973 +861,5.37109 +862,-0.0744629 +863,-1.87402 +864,-4.28906 +865,-1.80078 +866,-0.67627 +867,-3.47852 +868,1.36816 +869,1.30176 +870,2.21484 +871,-1.88574 +872,0.0296631 +873,-0.959961 +874,-2.88281 +875,-0.38501 +876,4.07813 +877,-1.11035 +878,1.18457 +879,0.949219 +880,1.00293 +881,1.69727 +882,2.33594 +883,3.38477 +884,-2.36914 +885,5.08203 +886,2.37891 +887,0.30249 +888,-1.19922 +889,1.6582 +890,-3.57617 +891,0.468994 +892,3.22852 +893,0.623047 +894,0.80957 +895,-2.0332 +896,6.14063 +897,5.68359 +898,3.96484 +899,4.35938 +900,-2.30469 +901,0.80957 +902,0.844727 +903,2.52148 +904,3.82813 +905,2.42773 +906,0.318848 +907,-0.117188 +908,-2.0332 +909,4.37891 +910,3.17969 +911,4.28516 +912,-1.28613 +913,-1.37988 +914,-1.55664 +915,-3.95313 +916,3.97852 +917,-0.964844 +918,2.81445 +919,0.335449 +920,-0.731934 +921,-0.499756 +922,-0.303955 +923,1.52637 +924,0.853027 +925,0.438721 +926,0.267334 +927,0.0336914 +928,-0.0375671 +929,3.00977 +930,0.0627441 +931,0.94043 +932,0.0706787 +933,-1.86523 +934,-1.07617 +935,-0.587402 +936,2.78516 +937,2.77344 +938,0.0526733 +939,1.38965 +940,2.13672 +941,-1.78027 +942,1.86133 +943,-0.42749 +944,-2.23438 +945,-0.0325623 +946,-1.16309 +947,0.112061 +948,1.34277 +949,1.32324 +950,1.43848 +951,1.32324 +952,0.171875 +953,0.883301 +954,1.93555 +955,0.0570374 +956,-1.24219 +957,1.01855 +958,-0.294434 +959,-0.0189209 +960,0.877441 +961,0.531738 +962,0.194946 +963,1.13867 +964,0.791504 +965,0.38501 +966,2.29492 +967,0.203735 +968,4.875 +969,1.65234 +970,-0.302734 +971,1.29297 +972,-0.339844 +973,-2.86523 +974,-2.75586 +975,0.644531 +976,-1.08203 +977,-0.0139389 +978,1.29492 +979,-0.507813 +980,-0.800781 +981,-3.76758 +982,-1.27246 +983,-1.74316 +984,-1.19727 +985,-0.493652 +986,-1.19922 +987,3.36719 +988,-0.61084 +989,-1.16699 +990,1.29102 +991,-2.70508 +992,-3.16602 +993,-2.99219 +994,-3.16992 +995,-2.24414 +996,0.438232 +997,-1.7793 +998,2.81641 +999,2.87695 \ No newline at end of file diff --git a/Testing/WinMLRunnerTest/OutputTensorData/DenseNet121_fp16_kitten_224_input_GPU.csv b/Testing/WinMLRunnerTest/OutputTensorData/DenseNet121_fp16_kitten_224_input_GPU.csv new file mode 100644 index 00000000..dbbc8134 --- /dev/null +++ b/Testing/WinMLRunnerTest/OutputTensorData/DenseNet121_fp16_kitten_224_input_GPU.csv @@ -0,0 +1,1001 @@ +Index,Value +0,-1.94141 +1,-0.911133 +2,-0.105896 +3,-0.876953 +4,-1.2168 +5,0.0284576 +6,-2.35156 +7,-1.6377 +8,-2.61719 +9,-2.0918 +10,-1.06738 +11,-3.48633 +12,-1.70508 +13,-1.22461 +14,-2.87305 +15,-0.92627 +16,-2.00391 +17,-2.83398 +18,-2.0957 +19,-4.15234 +20,-0.75 +21,-0.601563 +22,0.0649414 +23,-2.57813 +24,-0.740723 +25,-3.1875 +26,-1.70508 +27,-0.969727 +28,-2.45703 +29,-0.323242 +30,-1.56348 +31,-2.88672 +32,-1.57129 +33,-3.42969 +34,-1.69531 +35,-1.81934 +36,-0.107178 +37,-2.30859 +38,0.191162 +39,-1.64941 +40,-2.86133 +41,-1.24219 +42,-2.5293 +43,1.14746 +44,-0.115845 +45,-0.775391 +46,-1.45703 +47,-1.32617 +48,-1.23242 +49,-0.121277 +50,0.696289 +51,0.0428162 +52,2.16797 +53,-1.05957 +54,0.76416 +55,-3.0293 +56,-0.221313 +57,-1.59277 +58,-0.734375 +59,-1.1084 +60,1.27539 +61,-0.351807 +62,0.630859 +63,0.802734 +64,-2.31641 +65,-1.40039 +66,1.45703 +67,3.11328 +68,0.388428 +69,-0.00834656 +70,-3.0293 +71,-1.51074 +72,-4.36719 +73,-1.68945 +74,-4.3125 +75,-2.18359 +76,-1.66504 +77,-3.13867 +78,0.630371 +79,0.170898 +80,-1.36523 +81,-2.00781 +82,-0.939941 +83,-1.31445 +84,-1.62598 +85,-3.28906 +86,-2.56055 +87,-1.32129 +88,-2.74414 +89,-3.50977 +90,-1.70996 +91,-3.24609 +92,-4.73047 +93,-6.23438 +94,-1.3916 +95,-2.30273 +96,-3.91406 +97,-2.34961 +98,-2.10352 +99,-2.08008 +100,-0.309082 +101,-2.80664 +102,-0.23877 +103,1.01074 +104,1.15723 +105,0.21521 +106,1.12012 +107,-1.71289 +108,-1.00488 +109,-1.38574 +110,-0.500977 +111,1.74023 +112,0.839355 +113,0.611328 +114,1.34961 +115,-1.80957 +116,0.16394 +117,2.11523 +118,-1.03613 +119,-2.80273 +120,-3.58789 +121,-0.880371 +122,-0.387695 +123,-1.72656 +124,0.136719 +125,-1.44629 +126,-0.18103 +127,-3.33984 +128,-2.20703 +129,-2.21094 +130,-3.80664 +131,-4.10938 +132,-2.69531 +133,-0.540039 +134,-3.31445 +135,-3.27539 +136,-2.30469 +137,-3.2793 +138,-1.79395 +139,-0.494629 +140,-2.59766 +141,0.108948 +142,-2.47266 +143,-2.85742 +144,-2.36133 +145,-2.64063 +146,-1.60742 +147,1.2627 +148,-0.320313 +149,-2.96875 +150,0.392334 +151,2.81641 +152,0.376709 +153,-1.26953 +154,-0.661621 +155,-0.138428 +156,-1.58984 +157,-0.373291 +158,0.0158997 +159,-1.05859 +160,-1.30469 +161,-0.397949 +162,0.126465 +163,0.808594 +164,-0.775391 +165,-1.2832 +166,-1.88184 +167,-2.28711 +168,0.557129 +169,-1.0625 +170,-1.53516 +171,0.184204 +172,0.861816 +173,2.54492 +174,0.71875 +175,-1.17383 +176,-0.755371 +177,-1.41602 +178,0.443115 +179,1.41016 +180,0.663086 +181,-1.69629 +182,-1.55176 +183,-1.17285 +184,-1.12012 +185,0.978027 +186,1.99121 +187,1.44727 +188,0.683105 +189,-0.474365 +190,-0.456055 +191,-0.51416 +192,-2.56055 +193,2.54297 +194,-1.93262 +195,3.45898 +196,1.91016 +197,-0.947266 +198,0.14563 +199,1.24609 +200,1.44043 +201,-1.05566 +202,-1.6123 +203,-0.225464 +204,0.176392 +205,-0.627441 +206,-0.187988 +207,-0.269287 +208,0.90918 +209,0.333252 +210,-0.0360107 +211,0.89502 +212,-1.51855 +213,-1.27246 +214,-1.66113 +215,-0.518555 +216,0.0996704 +217,-0.461914 +218,-1.37207 +219,-0.384521 +220,0.3396 +221,-1.69238 +222,-0.885742 +223,0.296875 +224,-0.540527 +225,-0.517578 +226,-1.35547 +227,3.20313 +228,-1.02637 +229,1.89355 +230,-0.387451 +231,0.940918 +232,2.45508 +233,0.147705 +234,-0.168213 +235,2.55859 +236,0.800293 +237,0.428711 +238,-0.235474 +239,0.755859 +240,1.45605 +241,-0.664063 +242,0.320801 +243,0.590332 +244,-0.731934 +245,2.19531 +246,1.18848 +247,-0.280762 +248,3.62305 +249,1.81445 +250,3.30078 +251,0.223389 +252,-0.570801 +253,2.35156 +254,0.107849 +255,-2.96289 +256,-0.593262 +257,-1.12598 +258,-1.41504 +259,-0.916504 +260,-1.37109 +261,-2.84766 +262,0.753906 +263,2.27539 +264,3.71484 +265,-0.154663 +266,-1.21289 +267,-1.36523 +268,-0.935059 +269,0.213501 +270,-0.660156 +271,-0.781738 +272,0.733887 +273,1.04492 +274,0.615723 +275,-2.18359 +276,-1.58789 +277,-0.150635 +278,-1.57617 +279,-0.0640259 +280,0.988281 +281,13.1094 +282,9.8125 +283,6.89844 +284,4.59375 +285,9.71094 +286,1.25098 +287,4.30078 +288,2.15039 +289,0.713867 +290,0.343506 +291,-2.23438 +292,3.3457 +293,-1.62012 +294,-0.599121 +295,-0.442627 +296,-2.60742 +297,-2.32813 +298,-1.06934 +299,-0.482422 +300,-2.51563 +301,-1.1377 +302,-1.11426 +303,-0.946777 +304,-3.56445 +305,-3.89063 +306,-0.0444031 +307,-1.76563 +308,-3.64063 +309,-1.59277 +310,1.09766 +311,-2.25586 +312,-1.60645 +313,-2.31445 +314,-0.414795 +315,-2.42969 +316,-0.612305 +317,-2.86523 +318,-0.96582 +319,-2.99609 +320,-4.32422 +321,-3.24414 +322,-2.04102 +323,-4.23828 +324,-2.85352 +325,-3.83008 +326,-2.33008 +327,0.691406 +328,0.452637 +329,0.852539 +330,2.66406 +331,2.1582 +332,2.01172 +333,1.88086 +334,-1.43262 +335,0.19812 +336,-4.14844 +337,0.562988 +338,0.887207 +339,-3.34766 +340,-0.974121 +341,-1.94434 +342,-3 +343,-4.41016 +344,-2.4043 +345,-3.69531 +346,-3.46875 +347,-5.28125 +348,-4.39063 +349,-4.62891 +350,-2.9707 +351,-3.81641 +352,-3.02734 +353,-3.04102 +354,-4.99219 +355,-2.84766 +356,1.72168 +357,1.75 +358,0.914551 +359,1.4668 +360,2.34961 +361,0.123169 +362,-0.69873 +363,-0.0761108 +364,-2.49805 +365,-4.79297 +366,-6.05469 +367,-4.25781 +368,-3.8457 +369,-3.26953 +370,-2.68555 +371,-2.65234 +372,-3.72266 +373,-1.81055 +374,-4.66016 +375,-5.45313 +376,-4.17578 +377,-0.436279 +378,-1.43945 +379,-4.35156 +380,-0.490234 +381,-4.82813 +382,-2.08594 +383,-2.29102 +384,-2.99414 +385,-2.50781 +386,-3.54883 +387,-0.961914 +388,-4.11328 +389,0.633301 +390,-1.0918 +391,-0.126465 +392,-2.0625 +393,-3.33008 +394,0.164551 +395,0.0529175 +396,-3.25781 +397,-2.05859 +398,-0.00420761 +399,0.374023 +400,2.45898 +401,0.102295 +402,0.775879 +403,-3.47461 +404,-2.72461 +405,-1.3584 +406,-0.42334 +407,-1.4707 +408,0.193726 +409,2.88086 +410,-1.61621 +411,0.642578 +412,4.65234 +413,-1.64746 +414,3.19727 +415,-1.1875 +416,2.48438 +417,0.967773 +418,1.35938 +419,2.51563 +420,0.416016 +421,-1.13086 +422,1.66113 +423,1.60938 +424,-1.12305 +425,-3.25977 +426,0.429443 +427,1.67676 +428,4.57422 +429,-0.734375 +430,-1.88574 +431,3.73047 +432,-0.433838 +433,0.615234 +434,2.99609 +435,4.26563 +436,-2.0918 +437,-1.57422 +438,2.13086 +439,-0.237793 +440,1.97266 +441,1.35449 +442,-0.970215 +443,2.35547 +444,-0.810547 +445,-1.32324 +446,0.775879 +447,-0.0395813 +448,0.804199 +449,-0.916504 +450,-3.04688 +451,0.0646973 +452,2.17969 +453,3.36328 +454,1.20605 +455,1.70898 +456,0.584473 +457,4.01953 +458,-0.993164 +459,1.85059 +460,0.300049 +461,-0.242432 +462,3.42773 +463,4.82813 +464,1.49512 +465,0.69043 +466,-1.86914 +467,-1.43945 +468,-1.83594 +469,3.19922 +470,1.34668 +471,-2.17773 +472,-0.910156 +473,0.467529 +474,0.955566 +475,0.876465 +476,-3.14063 +477,2.9707 +478,5.19531 +479,1.47656 +480,-0.956543 +481,0.748535 +482,-1.35352 +483,-0.840332 +484,-1.72168 +485,-0.103516 +486,0.272217 +487,1.79492 +488,-0.019989 +489,-0.758301 +490,-1.82422 +491,-0.925781 +492,0.0241241 +493,-0.0485535 +494,-0.322998 +495,-0.467529 +496,4.21484 +497,-2.35742 +498,-1.76074 +499,1.53125 +500,-2.8125 +501,2.4375 +502,0.748047 +503,1.44727 +504,2.52539 +505,0.891602 +506,0.749023 +507,0.776855 +508,6.04297 +509,-0.663086 +510,-1.56152 +511,-3.33203 +512,1.62012 +513,-1.0293 +514,2.14648 +515,4.17188 +516,2.67188 +517,-1.21191 +518,3.70117 +519,2.2832 +520,0.174927 +521,2.71094 +522,1.83105 +523,1.3584 +524,-0.585449 +525,-2.78906 +526,2.36914 +527,2.07227 +528,0.302979 +529,2.20703 +530,1.7041 +531,0.671387 +532,0.282227 +533,-0.451172 +534,2.37695 +535,0.841797 +536,-0.389648 +537,-0.169189 +538,-3.35742 +539,5 +540,-2.84375 +541,1.86133 +542,0.899414 +543,3.67773 +544,0.826172 +545,1.20508 +546,1.03125 +547,-2.11328 +548,-1.78906 +549,1.46484 +550,-0.513672 +551,2.25195 +552,3.58594 +553,3.26563 +554,-2.40625 +555,-2.58008 +556,-0.423828 +557,-0.0914917 +558,-1.24707 +559,2.04883 +560,-3.51953 +561,-1.44531 +562,-1.00977 +563,-0.106079 +564,-1.19434 +565,-0.24939 +566,-0.705078 +567,3.5625 +568,3.28711 +569,-0.854492 +570,1.79395 +571,-3.65625 +572,2.11719 +573,-2.46094 +574,1.82715 +575,-2.11914 +576,-0.846191 +577,1.2334 +578,3.04102 +579,0.152588 +580,-0.716797 +581,-2.44336 +582,-0.225464 +583,0.238159 +584,1.62402 +585,1.91504 +586,-1.04199 +587,3.06055 +588,6.86328 +589,1.92578 +590,1.60449 +591,-0.0466309 +592,-0.128662 +593,0.523926 +594,0.143921 +595,-1.78223 +596,1.8291 +597,-0.468506 +598,-2.2168 +599,1.39551 +600,1.88184 +601,2.18359 +602,0.637207 +603,-3.83203 +604,0.760742 +605,2.00391 +606,2.64844 +607,-0.526367 +608,2.65234 +609,-1.78516 +610,1.0957 +611,2.67578 +612,-0.977051 +613,1.44434 +614,0.570801 +615,1.8252 +616,-0.709961 +617,1.97559 +618,2.39063 +619,1.37598 +620,2.64648 +621,-0.283447 +622,4.47266 +623,2.08789 +624,0.6875 +625,-2.13281 +626,0.685059 +627,-0.728516 +628,-2.08203 +629,1.23145 +630,0.53125 +631,2.43164 +632,0.807129 +633,2.16211 +634,-2.46484 +635,1.73438 +636,3.42578 +637,-0.722656 +638,1.03418 +639,0.24585 +640,-0.462158 +641,2.41016 +642,-1.50293 +643,1.76074 +644,2.26563 +645,-0.850098 +646,-0.750488 +647,0.69043 +648,1.35352 +649,-2.09375 +650,0.779297 +651,1.46973 +652,0.247925 +653,-0.475342 +654,-0.640625 +655,1.28223 +656,-0.398438 +657,-2.66602 +658,3.00391 +659,4.01172 +660,-0.414795 +661,-1.30371 +662,0.321045 +663,-2.63477 +664,1.96289 +665,1.78027 +666,2.07813 +667,2.88281 +668,-3.64648 +669,-1.56641 +670,1.98828 +671,-1.41797 +672,-2.3457 +673,6.11719 +674,1.97266 +675,-0.884766 +676,2.82031 +677,1.78809 +678,1.95313 +679,-0.0108566 +680,3.75977 +681,3.75781 +682,-0.557129 +683,-0.887207 +684,0.79834 +685,-1.49805 +686,1.23242 +687,-3.30664 +688,0.30249 +689,1.30371 +690,-3.67188 +691,1.29883 +692,2.58008 +693,0.63623 +694,-1.83789 +695,-0.426758 +696,2.10156 +697,1.375 +698,-1.38965 +699,1.29785 +700,4.47266 +701,-0.415039 +702,-1.33887 +703,-0.414551 +704,0.370605 +705,-1.24707 +706,-0.952637 +707,-0.361816 +708,2.79883 +709,0.953613 +710,0.895996 +711,1.14453 +712,-0.282959 +713,3.29102 +714,3.64648 +715,-1.12793 +716,1.02051 +717,-1.30762 +718,-1.68555 +719,0.30835 +720,2.32617 +721,1.61816 +722,5.23047 +723,1.83203 +724,-1.76953 +725,2.67383 +726,-0.133667 +727,-3.23242 +728,6.76563 +729,0.663086 +730,-1.85059 +731,3.22852 +732,-0.170166 +733,0.918457 +734,-2.68164 +735,1.0498 +736,1.02441 +737,0.621094 +738,1.52246 +739,0.807129 +740,0.622559 +741,0.779297 +742,5.41797 +743,-0.694824 +744,-1.16895 +745,0.27002 +746,2.33594 +747,3.36523 +748,1.38672 +749,-0.30835 +750,3.1543 +751,-2.24805 +752,-0.925781 +753,3.6582 +754,1.73438 +755,-0.966797 +756,0.565918 +757,-0.0030365 +758,0.977051 +759,1.48145 +760,2.82617 +761,7.10938 +762,-1.06445 +763,-0.125732 +764,-0.665527 +765,1.8125 +766,-0.703613 +767,1.36719 +768,1.20996 +769,2.5918 +770,1.92871 +771,-0.910156 +772,1.76465 +773,-0.154785 +774,0.386719 +775,1.23926 +776,-0.255615 +777,0.915039 +778,2.25 +779,-1.92969 +780,-0.817383 +781,-2.70703 +782,3.28711 +783,1.56055 +784,2.4043 +785,1.41406 +786,-0.536133 +787,0.983887 +788,-0.268555 +789,2.30664 +790,3.43164 +791,0.723633 +792,3.48438 +793,4.79688 +794,2.22266 +795,0.70752 +796,2.68359 +797,5.44531 +798,-1.17383 +799,1.62402 +800,-0.822266 +801,-0.512207 +802,0.222534 +803,0.88623 +804,2.64258 +805,1.3457 +806,3.87305 +807,0.787598 +808,1.69141 +809,2.45117 +810,2.64844 +811,4.92188 +812,-3.81055 +813,2.88477 +814,0.779297 +815,-1.65723 +816,-0.129761 +817,-2.27148 +818,3.98242 +819,-0.226685 +820,-2.95898 +821,-3.21484 +822,-0.0496826 +823,2.25977 +824,4.26953 +825,-0.072937 +826,0.943848 +827,1.67578 +828,2.1543 +829,-2.89063 +830,0.281494 +831,2.72852 +832,-3.03516 +833,-2.80664 +834,1.70508 +835,-0.967773 +836,1.47852 +837,1.83594 +838,3.61523 +839,-3.24609 +840,2.79297 +841,2.91016 +842,2.21484 +843,-0.836914 +844,2.54297 +845,2.66016 +846,1.7334 +847,-1.50977 +848,-1.20605 +849,-0.411133 +850,1.5293 +851,3.25391 +852,4.34375 +853,-0.389648 +854,0.957031 +855,2.99609 +856,-2.82422 +857,-1.30762 +858,-1.16406 +859,0.301758 +860,0.527832 +861,5.39063 +862,-0.0621948 +863,-1.89258 +864,-4.28906 +865,-1.78711 +866,-0.674316 +867,-3.47656 +868,1.39258 +869,1.29102 +870,2.21289 +871,-1.88184 +872,0.011322 +873,-0.955566 +874,-2.88867 +875,-0.388184 +876,4.07031 +877,-1.11914 +878,1.18555 +879,0.936523 +880,1.00781 +881,1.68848 +882,2.32031 +883,3.37109 +884,-2.36133 +885,5.07031 +886,2.36914 +887,0.302002 +888,-1.2002 +889,1.65137 +890,-3.58203 +891,0.475342 +892,3.25586 +893,0.624512 +894,0.812988 +895,-2.03125 +896,6.13281 +897,5.70313 +898,3.96094 +899,4.36328 +900,-2.29688 +901,0.816895 +902,0.877441 +903,2.51758 +904,3.83789 +905,2.45313 +906,0.309082 +907,-0.118225 +908,-1.99902 +909,4.39844 +910,3.17969 +911,4.32031 +912,-1.2832 +913,-1.3877 +914,-1.56641 +915,-3.95117 +916,3.98438 +917,-0.963867 +918,2.82227 +919,0.350098 +920,-0.72168 +921,-0.48291 +922,-0.303467 +923,1.52637 +924,0.859375 +925,0.442383 +926,0.276855 +927,0.0311737 +928,-0.0423584 +929,2.99609 +930,0.0568237 +931,0.925293 +932,0.0690918 +933,-1.85059 +934,-1.06738 +935,-0.57373 +936,2.76758 +937,2.76172 +938,0.0513916 +939,1.40918 +940,2.1543 +941,-1.7627 +942,1.88086 +943,-0.408691 +944,-2.24609 +945,-0.0343933 +946,-1.17383 +947,0.113586 +948,1.35938 +949,1.3291 +950,1.45117 +951,1.32227 +952,0.166138 +953,0.894531 +954,1.95508 +955,0.0471802 +956,-1.24609 +957,1.03027 +958,-0.304443 +959,-0.0187836 +960,0.868164 +961,0.539063 +962,0.201294 +963,1.14746 +964,0.801758 +965,0.38623 +966,2.29883 +967,0.207397 +968,4.88672 +969,1.63965 +970,-0.303955 +971,1.2793 +972,-0.350586 +973,-2.86133 +974,-2.76367 +975,0.648926 +976,-1.08008 +977,-0.00375366 +978,1.30762 +979,-0.501953 +980,-0.809082 +981,-3.76367 +982,-1.28027 +983,-1.75879 +984,-1.2041 +985,-0.499023 +986,-1.21191 +987,3.36523 +988,-0.624023 +989,-1.14941 +990,1.2998 +991,-2.71484 +992,-3.16602 +993,-3.00586 +994,-3.17188 +995,-2.26172 +996,0.439697 +997,-1.7959 +998,2.83398 +999,2.88086 \ No newline at end of file diff --git a/Testing/WinMLRunnerTest/OutputTensorData/DenseNet121_fp32_kitten_224_input_CPU.csv b/Testing/WinMLRunnerTest/OutputTensorData/DenseNet121_fp32_kitten_224_input_CPU.csv new file mode 100644 index 00000000..14964894 --- /dev/null +++ b/Testing/WinMLRunnerTest/OutputTensorData/DenseNet121_fp32_kitten_224_input_CPU.csv @@ -0,0 +1,1001 @@ +Index,Value +0,-1.91606 +1,-0.905249 +2,-0.0867284 +3,-0.882166 +4,-1.21631 +5,0.0379311 +6,-2.36279 +7,-1.63114 +8,-2.58456 +9,-2.09799 +10,-1.0558 +11,-3.48017 +12,-1.69516 +13,-1.19998 +14,-2.87957 +15,-0.908859 +16,-1.9843 +17,-2.84587 +18,-2.09693 +19,-4.1625 +20,-0.725434 +21,-0.590097 +22,0.053395 +23,-2.56425 +24,-0.737564 +25,-3.1768 +26,-1.69523 +27,-0.963239 +28,-2.42826 +29,-0.353437 +30,-1.53978 +31,-2.87875 +32,-1.56672 +33,-3.42582 +34,-1.69567 +35,-1.81892 +36,-0.0992224 +37,-2.31653 +38,0.19193 +39,-1.6505 +40,-2.86796 +41,-1.24237 +42,-2.53785 +43,1.14519 +44,-0.112008 +45,-0.781623 +46,-1.46385 +47,-1.33402 +48,-1.22221 +49,-0.112601 +50,0.699213 +51,0.0413103 +52,2.16638 +53,-1.05753 +54,0.759585 +55,-3.02008 +56,-0.222634 +57,-1.58715 +58,-0.710625 +59,-1.10785 +60,1.27667 +61,-0.364929 +62,0.635029 +63,0.808093 +64,-2.30073 +65,-1.40456 +66,1.4404 +67,3.0896 +68,0.382814 +69,0.00424534 +70,-2.99935 +71,-1.53491 +72,-4.36506 +73,-1.68652 +74,-4.31438 +75,-2.18416 +76,-1.67295 +77,-3.13755 +78,0.636146 +79,0.182761 +80,-1.35404 +81,-1.99378 +82,-0.946274 +83,-1.29763 +84,-1.63141 +85,-3.2835 +86,-2.53778 +87,-1.32172 +88,-2.74977 +89,-3.52056 +90,-1.69814 +91,-3.24118 +92,-4.70068 +93,-6.23999 +94,-1.3672 +95,-2.30657 +96,-3.94059 +97,-2.32913 +98,-2.09191 +99,-2.08211 +100,-0.29678 +101,-2.80316 +102,-0.246208 +103,1.01586 +104,1.15834 +105,0.222449 +106,1.12247 +107,-1.70558 +108,-0.997156 +109,-1.38944 +110,-0.49443 +111,1.74204 +112,0.841824 +113,0.612719 +114,1.36953 +115,-1.81339 +116,0.181316 +117,2.1097 +118,-1.04499 +119,-2.78759 +120,-3.56816 +121,-0.890338 +122,-0.391347 +123,-1.71094 +124,0.137863 +125,-1.43321 +126,-0.177382 +127,-3.34594 +128,-2.21415 +129,-2.22575 +130,-3.81039 +131,-4.11833 +132,-2.70923 +133,-0.522449 +134,-3.31302 +135,-3.26697 +136,-2.27509 +137,-3.26818 +138,-1.78638 +139,-0.477856 +140,-2.58207 +141,0.113008 +142,-2.46954 +143,-2.85111 +144,-2.35603 +145,-2.60421 +146,-1.6003 +147,1.27253 +148,-0.309361 +149,-3.00004 +150,0.41338 +151,2.81164 +152,0.371395 +153,-1.28592 +154,-0.674069 +155,-0.141232 +156,-1.59301 +157,-0.380119 +158,0.00621204 +159,-1.05372 +160,-1.31206 +161,-0.399168 +162,0.113584 +163,0.80035 +164,-0.766685 +165,-1.28776 +166,-1.89365 +167,-2.27387 +168,0.555824 +169,-1.06589 +170,-1.53725 +171,0.189953 +172,0.880422 +173,2.56043 +174,0.713877 +175,-1.18002 +176,-0.742424 +177,-1.4137 +178,0.441834 +179,1.40009 +180,0.652328 +181,-1.70928 +182,-1.56574 +183,-1.15887 +184,-1.12583 +185,0.975679 +186,2.00473 +187,1.44885 +188,0.672709 +189,-0.477761 +190,-0.460921 +191,-0.51254 +192,-2.57591 +193,2.55202 +194,-1.94449 +195,3.44925 +196,1.90518 +197,-0.939112 +198,0.137988 +199,1.26284 +200,1.43325 +201,-1.0555 +202,-1.63007 +203,-0.238386 +204,0.163825 +205,-0.604585 +206,-0.169243 +207,-0.26425 +208,0.914737 +209,0.348523 +210,-0.0265459 +211,0.897771 +212,-1.51765 +213,-1.26897 +214,-1.64825 +215,-0.526051 +216,0.105087 +217,-0.453204 +218,-1.36355 +219,-0.385476 +220,0.345838 +221,-1.70141 +222,-0.884092 +223,0.310773 +224,-0.522081 +225,-0.497735 +226,-1.35751 +227,3.23484 +228,-1.03509 +229,1.88976 +230,-0.387604 +231,0.945491 +232,2.46138 +233,0.146372 +234,-0.16357 +235,2.58579 +236,0.803393 +237,0.43301 +238,-0.233263 +239,0.763235 +240,1.47391 +241,-0.661304 +242,0.311539 +243,0.584794 +244,-0.734899 +245,2.20196 +246,1.1956 +247,-0.277042 +248,3.6375 +249,1.8378 +250,3.30869 +251,0.221028 +252,-0.586488 +253,2.36563 +254,0.101866 +255,-2.96305 +256,-0.58911 +257,-1.13579 +258,-1.40487 +259,-0.925016 +260,-1.37039 +261,-2.84886 +262,0.742018 +263,2.30111 +264,3.74745 +265,-0.168756 +266,-1.22273 +267,-1.37082 +268,-0.953986 +269,0.239227 +270,-0.644133 +271,-0.753054 +272,0.749482 +273,1.07466 +274,0.636035 +275,-2.16813 +276,-1.5721 +277,-0.129151 +278,-1.54885 +279,-0.0554669 +280,1.01759 +281,13.141 +282,9.80613 +283,6.87191 +284,4.58034 +285,9.75746 +286,1.26755 +287,4.27051 +288,2.17699 +289,0.721271 +290,0.354685 +291,-2.23643 +292,3.37039 +293,-1.62486 +294,-0.588247 +295,-0.426578 +296,-2.60045 +297,-2.32561 +298,-1.06071 +299,-0.470834 +300,-2.49715 +301,-1.14143 +302,-1.106 +303,-0.941481 +304,-3.56411 +305,-3.86208 +306,-0.0369265 +307,-1.7545 +308,-3.60791 +309,-1.59302 +310,1.11367 +311,-2.25821 +312,-1.5902 +313,-2.31117 +314,-0.428193 +315,-2.42562 +316,-0.60292 +317,-2.85117 +318,-0.948686 +319,-2.99872 +320,-4.32247 +321,-3.22123 +322,-2.04551 +323,-4.25169 +324,-2.83752 +325,-3.80456 +326,-2.32319 +327,0.686995 +328,0.439239 +329,0.839418 +330,2.67618 +331,2.16541 +332,2.00835 +333,1.88336 +334,-1.42355 +335,0.199188 +336,-4.13739 +337,0.573329 +338,0.865215 +339,-3.344 +340,-0.966377 +341,-1.94621 +342,-2.99668 +343,-4.38761 +344,-2.35951 +345,-3.70153 +346,-3.48128 +347,-5.28067 +348,-4.40523 +349,-4.60571 +350,-2.9724 +351,-3.8212 +352,-3.02265 +353,-3.04911 +354,-5.00967 +355,-2.85844 +356,1.73895 +357,1.76015 +358,0.923337 +359,1.45929 +360,2.36823 +361,0.110966 +362,-0.701255 +363,-0.0793139 +364,-2.48603 +365,-4.80638 +366,-6.08569 +367,-4.2676 +368,-3.85256 +369,-3.269 +370,-2.68882 +371,-2.64796 +372,-3.73309 +373,-1.81992 +374,-4.68106 +375,-5.44761 +376,-4.16538 +377,-0.436954 +378,-1.44925 +379,-4.35825 +380,-0.501376 +381,-4.84513 +382,-2.08458 +383,-2.28374 +384,-2.98171 +385,-2.51493 +386,-3.54691 +387,-0.947644 +388,-4.09568 +389,0.647466 +390,-1.11853 +391,-0.10017 +392,-2.0754 +393,-3.30833 +394,0.173513 +395,0.0531768 +396,-3.2948 +397,-2.0546 +398,-0.0179479 +399,0.38584 +400,2.43649 +401,0.114888 +402,0.783349 +403,-3.47379 +404,-2.70834 +405,-1.36699 +406,-0.440062 +407,-1.47779 +408,0.183518 +409,2.85646 +410,-1.60138 +411,0.640649 +412,4.63954 +413,-1.64934 +414,3.20527 +415,-1.19964 +416,2.47761 +417,0.970359 +418,1.3595 +419,2.50419 +420,0.411327 +421,-1.12114 +422,1.64827 +423,1.59733 +424,-1.13499 +425,-3.25601 +426,0.401926 +427,1.68188 +428,4.55787 +429,-0.715596 +430,-1.86561 +431,3.72594 +432,-0.428388 +433,0.58863 +434,2.99024 +435,4.28582 +436,-2.10221 +437,-1.57713 +438,2.13402 +439,-0.254877 +440,1.98789 +441,1.36903 +442,-1.00218 +443,2.3492 +444,-0.822481 +445,-1.31977 +446,0.75691 +447,-0.0422113 +448,0.791564 +449,-0.936276 +450,-3.04269 +451,0.0474846 +452,2.17971 +453,3.35822 +454,1.19236 +455,1.71125 +456,0.597461 +457,3.99598 +458,-1.00467 +459,1.84724 +460,0.299725 +461,-0.220255 +462,3.43401 +463,4.80236 +464,1.47725 +465,0.697727 +466,-1.8532 +467,-1.44979 +468,-1.84985 +469,3.21514 +470,1.34001 +471,-2.18369 +472,-0.903525 +473,0.450736 +474,0.945778 +475,0.865018 +476,-3.15112 +477,2.98632 +478,5.20532 +479,1.47973 +480,-0.964329 +481,0.74251 +482,-1.36595 +483,-0.836668 +484,-1.72503 +485,-0.118794 +486,0.273875 +487,1.79512 +488,-0.0426795 +489,-0.76686 +490,-1.82816 +491,-0.925629 +492,0.0207857 +493,-0.0436539 +494,-0.335253 +495,-0.46233 +496,4.21855 +497,-2.37299 +498,-1.76886 +499,1.541 +500,-2.8017 +501,2.44362 +502,0.744078 +503,1.4571 +504,2.5279 +505,0.894255 +506,0.752984 +507,0.767749 +508,6.00689 +509,-0.672177 +510,-1.56431 +511,-3.35193 +512,1.60584 +513,-1.02447 +514,2.17595 +515,4.15234 +516,2.66378 +517,-1.22391 +518,3.6949 +519,2.28389 +520,0.170292 +521,2.68822 +522,1.8424 +523,1.35123 +524,-0.56969 +525,-2.79718 +526,2.33587 +527,2.07065 +528,0.310783 +529,2.17929 +530,1.69388 +531,0.673913 +532,0.29049 +533,-0.464572 +534,2.38161 +535,0.837488 +536,-0.385438 +537,-0.146416 +538,-3.36926 +539,4.99681 +540,-2.85517 +541,1.8765 +542,0.895901 +543,3.6698 +544,0.824615 +545,1.19181 +546,1.03693 +547,-2.13662 +548,-1.78113 +549,1.43859 +550,-0.509029 +551,2.24176 +552,3.58876 +553,3.24814 +554,-2.40285 +555,-2.60312 +556,-0.421825 +557,-0.0852488 +558,-1.23606 +559,2.05074 +560,-3.52361 +561,-1.45234 +562,-1.00759 +563,-0.111572 +564,-1.18737 +565,-0.257686 +566,-0.689726 +567,3.56563 +568,3.29082 +569,-0.861249 +570,1.79249 +571,-3.66609 +572,2.13762 +573,-2.46991 +574,1.8372 +575,-2.14414 +576,-0.859271 +577,1.22245 +578,3.06392 +579,0.150279 +580,-0.725912 +581,-2.44156 +582,-0.234968 +583,0.23626 +584,1.6036 +585,1.90402 +586,-1.07298 +587,3.07891 +588,6.85513 +589,1.92081 +590,1.61465 +591,-0.0654475 +592,-0.132772 +593,0.514308 +594,0.146493 +595,-1.77144 +596,1.83016 +597,-0.459755 +598,-2.21367 +599,1.37204 +600,1.86382 +601,2.17567 +602,0.63769 +603,-3.82384 +604,0.765516 +605,1.98936 +606,2.62766 +607,-0.521949 +608,2.65816 +609,-1.79352 +610,1.08443 +611,2.65525 +612,-0.981261 +613,1.45001 +614,0.599856 +615,1.82149 +616,-0.700877 +617,1.96954 +618,2.40256 +619,1.36609 +620,2.65006 +621,-0.287354 +622,4.49172 +623,2.06578 +624,0.657885 +625,-2.13471 +626,0.67632 +627,-0.754166 +628,-2.06118 +629,1.24893 +630,0.551652 +631,2.41911 +632,0.802067 +633,2.14179 +634,-2.46775 +635,1.72241 +636,3.42929 +637,-0.730969 +638,1.03761 +639,0.244734 +640,-0.472867 +641,2.39207 +642,-1.51454 +643,1.76192 +644,2.26767 +645,-0.862059 +646,-0.744783 +647,0.678205 +648,1.34701 +649,-2.06914 +650,0.764452 +651,1.45471 +652,0.25177 +653,-0.470542 +654,-0.64593 +655,1.27738 +656,-0.405339 +657,-2.67172 +658,3.02277 +659,4.00241 +660,-0.427193 +661,-1.33006 +662,0.314888 +663,-2.64669 +664,1.95958 +665,1.76008 +666,2.10254 +667,2.86341 +668,-3.65951 +669,-1.57904 +670,1.97378 +671,-1.42031 +672,-2.32954 +673,6.08096 +674,1.96901 +675,-0.907172 +676,2.82112 +677,1.79752 +678,1.93552 +679,-0.0262457 +680,3.75737 +681,3.74677 +682,-0.564623 +683,-0.887312 +684,0.81555 +685,-1.51792 +686,1.20776 +687,-3.31239 +688,0.319768 +689,1.30024 +690,-3.67327 +691,1.28979 +692,2.57712 +693,0.634678 +694,-1.86756 +695,-0.4635 +696,2.11373 +697,1.35721 +698,-1.40013 +699,1.28439 +700,4.46772 +701,-0.40675 +702,-1.33635 +703,-0.431013 +704,0.371427 +705,-1.26746 +706,-0.950144 +707,-0.363945 +708,2.80213 +709,0.956198 +710,0.889312 +711,1.14723 +712,-0.285241 +713,3.30197 +714,3.66356 +715,-1.12727 +716,1.01621 +717,-1.32181 +718,-1.693 +719,0.328016 +720,2.30824 +721,1.63587 +722,5.25727 +723,1.81472 +724,-1.76752 +725,2.68063 +726,-0.147058 +727,-3.23122 +728,6.78859 +729,0.665028 +730,-1.84608 +731,3.23236 +732,-0.160082 +733,0.92795 +734,-2.69275 +735,1.05049 +736,1.02664 +737,0.633376 +738,1.51382 +739,0.821492 +740,0.627798 +741,0.777682 +742,5.43022 +743,-0.696332 +744,-1.16717 +745,0.255916 +746,2.33138 +747,3.35423 +748,1.37415 +749,-0.303649 +750,3.17047 +751,-2.24685 +752,-0.926331 +753,3.65205 +754,1.74512 +755,-0.948427 +756,0.553077 +757,-0.00452093 +758,0.968067 +759,1.47745 +760,2.81092 +761,7.07178 +762,-1.0592 +763,-0.122721 +764,-0.664518 +765,1.81316 +766,-0.703827 +767,1.34542 +768,1.22458 +769,2.58456 +770,1.93575 +771,-0.937133 +772,1.75695 +773,-0.13477 +774,0.389383 +775,1.25563 +776,-0.249664 +777,0.918251 +778,2.20454 +779,-1.92332 +780,-0.811451 +781,-2.70414 +782,3.27942 +783,1.55489 +784,2.41949 +785,1.38601 +786,-0.532347 +787,0.977482 +788,-0.276558 +789,2.30738 +790,3.41956 +791,0.712186 +792,3.47707 +793,4.77185 +794,2.23102 +795,0.725238 +796,2.70315 +797,5.47313 +798,-1.19102 +799,1.62433 +800,-0.821411 +801,-0.51498 +802,0.236786 +803,0.89071 +804,2.64701 +805,1.34608 +806,3.88814 +807,0.785301 +808,1.68689 +809,2.45297 +810,2.62732 +811,4.92052 +812,-3.82417 +813,2.86908 +814,0.775503 +815,-1.6677 +816,-0.127956 +817,-2.26967 +818,4.00756 +819,-0.227122 +820,-2.98073 +821,-3.21862 +822,-0.0508108 +823,2.24119 +824,4.26391 +825,-0.0673175 +826,0.930901 +827,1.66573 +828,2.16628 +829,-2.91885 +830,0.289731 +831,2.73797 +832,-3.02852 +833,-2.80114 +834,1.72181 +835,-0.972265 +836,1.49743 +837,1.84037 +838,3.59088 +839,-3.2549 +840,2.79797 +841,2.90805 +842,2.19606 +843,-0.845528 +844,2.54546 +845,2.66328 +846,1.72761 +847,-1.50837 +848,-1.19992 +849,-0.406037 +850,1.51716 +851,3.2494 +852,4.34673 +853,-0.378114 +854,0.951921 +855,2.99571 +856,-2.84057 +857,-1.31137 +858,-1.17655 +859,0.306228 +860,0.512488 +861,5.37306 +862,-0.0752091 +863,-1.87033 +864,-4.3011 +865,-1.8103 +866,-0.682876 +867,-3.48853 +868,1.37623 +869,1.30874 +870,2.21094 +871,-1.87806 +872,0.0261972 +873,-0.968912 +874,-2.885 +875,-0.381717 +876,4.08916 +877,-1.12278 +878,1.17655 +879,0.950572 +880,1.00028 +881,1.67716 +882,2.33014 +883,3.38959 +884,-2.37311 +885,5.08916 +886,2.37815 +887,0.30689 +888,-1.20633 +889,1.65275 +890,-3.57546 +891,0.465135 +892,3.23399 +893,0.616869 +894,0.807644 +895,-2.02836 +896,6.14744 +897,5.69066 +898,3.9785 +899,4.36713 +900,-2.31014 +901,0.818092 +902,0.834508 +903,2.51939 +904,3.82208 +905,2.42871 +906,0.320352 +907,-0.121049 +908,-2.02944 +909,4.39181 +910,3.18796 +911,4.27932 +912,-1.28884 +913,-1.37342 +914,-1.55542 +915,-3.95259 +916,3.98201 +917,-0.966499 +918,2.81322 +919,0.33457 +920,-0.725598 +921,-0.496225 +922,-0.292705 +923,1.53581 +924,0.859475 +925,0.449415 +926,0.269882 +927,0.0375257 +928,-0.0325534 +929,3.00702 +930,0.0656589 +931,0.947039 +932,0.0739413 +933,-1.85573 +934,-1.06271 +935,-0.578699 +936,2.79573 +937,2.79183 +938,0.0577565 +939,1.3942 +940,2.14155 +941,-1.77846 +942,1.8653 +943,-0.423057 +944,-2.23958 +945,-0.0213172 +946,-1.1703 +947,0.119139 +948,1.34218 +949,1.3334 +950,1.44451 +951,1.33316 +952,0.174211 +953,0.888419 +954,1.94004 +955,0.0659385 +956,-1.24016 +957,1.02164 +958,-0.293981 +959,-0.00740861 +960,0.881002 +961,0.540011 +962,0.204585 +963,1.14904 +964,0.79566 +965,0.397019 +966,2.30606 +967,0.210156 +968,4.88998 +969,1.66129 +970,-0.303616 +971,1.29067 +972,-0.342649 +973,-2.87391 +974,-2.75477 +975,0.641559 +976,-1.08903 +977,-0.0146732 +978,1.29263 +979,-0.504065 +980,-0.806904 +981,-3.76551 +982,-1.27592 +983,-1.74248 +984,-1.20251 +985,-0.498472 +986,-1.20171 +987,3.36917 +988,-0.602464 +989,-1.17178 +990,1.29307 +991,-2.70877 +992,-3.15857 +993,-2.99487 +994,-3.16664 +995,-2.24433 +996,0.434152 +997,-1.77759 +998,2.81283 +999,2.88125 \ No newline at end of file diff --git a/Testing/WinMLRunnerTest/OutputTensorData/DenseNet121_fp32_kitten_224_input_GPU.csv b/Testing/WinMLRunnerTest/OutputTensorData/DenseNet121_fp32_kitten_224_input_GPU.csv new file mode 100644 index 00000000..c69ef46a --- /dev/null +++ b/Testing/WinMLRunnerTest/OutputTensorData/DenseNet121_fp32_kitten_224_input_GPU.csv @@ -0,0 +1,1001 @@ +Index,Value +0,-1.91606 +1,-0.90525 +2,-0.086729 +3,-0.882168 +4,-1.21631 +5,0.0379299 +6,-2.36279 +7,-1.63114 +8,-2.58456 +9,-2.09799 +10,-1.0558 +11,-3.48017 +12,-1.69515 +13,-1.19998 +14,-2.87957 +15,-0.908859 +16,-1.9843 +17,-2.84587 +18,-2.09693 +19,-4.1625 +20,-0.725436 +21,-0.590097 +22,0.0533921 +23,-2.56425 +24,-0.737564 +25,-3.1768 +26,-1.69523 +27,-0.963241 +28,-2.42826 +29,-0.353434 +30,-1.53978 +31,-2.87875 +32,-1.56672 +33,-3.42582 +34,-1.69568 +35,-1.81892 +36,-0.0992221 +37,-2.31653 +38,0.191929 +39,-1.6505 +40,-2.86796 +41,-1.24237 +42,-2.53785 +43,1.14519 +44,-0.112009 +45,-0.781622 +46,-1.46385 +47,-1.33402 +48,-1.22221 +49,-0.112604 +50,0.699211 +51,0.0413078 +52,2.16637 +53,-1.05753 +54,0.759582 +55,-3.02008 +56,-0.222635 +57,-1.58716 +58,-0.710626 +59,-1.10786 +60,1.27667 +61,-0.364929 +62,0.635031 +63,0.808093 +64,-2.30073 +65,-1.40456 +66,1.44039 +67,3.0896 +68,0.382815 +69,0.00424403 +70,-2.99935 +71,-1.53491 +72,-4.36506 +73,-1.68652 +74,-4.31438 +75,-2.18416 +76,-1.67295 +77,-3.13755 +78,0.636145 +79,0.182758 +80,-1.35404 +81,-1.99379 +82,-0.946276 +83,-1.29763 +84,-1.63141 +85,-3.2835 +86,-2.53778 +87,-1.32172 +88,-2.74977 +89,-3.52056 +90,-1.69814 +91,-3.24118 +92,-4.70068 +93,-6.24 +94,-1.3672 +95,-2.30657 +96,-3.94059 +97,-2.32913 +98,-2.09191 +99,-2.08211 +100,-0.296781 +101,-2.80316 +102,-0.246207 +103,1.01586 +104,1.15834 +105,0.222452 +106,1.12247 +107,-1.70558 +108,-0.997157 +109,-1.38944 +110,-0.49443 +111,1.74204 +112,0.841823 +113,0.612718 +114,1.36953 +115,-1.81339 +116,0.181315 +117,2.1097 +118,-1.04499 +119,-2.7876 +120,-3.56816 +121,-0.890339 +122,-0.391348 +123,-1.71094 +124,0.137861 +125,-1.43321 +126,-0.177383 +127,-3.34594 +128,-2.21415 +129,-2.22575 +130,-3.81039 +131,-4.11834 +132,-2.70923 +133,-0.522448 +134,-3.31302 +135,-3.26697 +136,-2.27509 +137,-3.26818 +138,-1.78638 +139,-0.477857 +140,-2.58207 +141,0.113007 +142,-2.46954 +143,-2.85111 +144,-2.35603 +145,-2.60421 +146,-1.6003 +147,1.27253 +148,-0.309362 +149,-3.00004 +150,0.413377 +151,2.81164 +152,0.371395 +153,-1.28592 +154,-0.674069 +155,-0.141231 +156,-1.59301 +157,-0.38012 +158,0.00621201 +159,-1.05372 +160,-1.31206 +161,-0.399167 +162,0.113585 +163,0.80035 +164,-0.766684 +165,-1.28776 +166,-1.89365 +167,-2.27387 +168,0.555825 +169,-1.06588 +170,-1.53724 +171,0.189951 +172,0.88042 +173,2.56043 +174,0.713875 +175,-1.18002 +176,-0.742423 +177,-1.41369 +178,0.441834 +179,1.40009 +180,0.652327 +181,-1.70928 +182,-1.56574 +183,-1.15887 +184,-1.12583 +185,0.975681 +186,2.00473 +187,1.44885 +188,0.67271 +189,-0.47776 +190,-0.460921 +191,-0.512539 +192,-2.57591 +193,2.55202 +194,-1.94449 +195,3.44925 +196,1.90518 +197,-0.939113 +198,0.137988 +199,1.26285 +200,1.43325 +201,-1.0555 +202,-1.63007 +203,-0.238382 +204,0.163825 +205,-0.604585 +206,-0.169244 +207,-0.26425 +208,0.914735 +209,0.348522 +210,-0.0265466 +211,0.897769 +212,-1.51765 +213,-1.26897 +214,-1.64825 +215,-0.526051 +216,0.105086 +217,-0.453203 +218,-1.36355 +219,-0.385475 +220,0.345837 +221,-1.70141 +222,-0.884091 +223,0.310774 +224,-0.522081 +225,-0.497735 +226,-1.35751 +227,3.23484 +228,-1.03509 +229,1.88976 +230,-0.387604 +231,0.945491 +232,2.46138 +233,0.146373 +234,-0.163571 +235,2.58579 +236,0.803394 +237,0.43301 +238,-0.233263 +239,0.763235 +240,1.47391 +241,-0.661304 +242,0.31154 +243,0.584793 +244,-0.7349 +245,2.20196 +246,1.1956 +247,-0.277043 +248,3.6375 +249,1.8378 +250,3.30869 +251,0.221029 +252,-0.586487 +253,2.36563 +254,0.101866 +255,-2.96305 +256,-0.589111 +257,-1.13579 +258,-1.40487 +259,-0.925015 +260,-1.37039 +261,-2.84886 +262,0.742017 +263,2.30111 +264,3.74744 +265,-0.168756 +266,-1.22273 +267,-1.37082 +268,-0.953985 +269,0.239227 +270,-0.644132 +271,-0.753055 +272,0.749483 +273,1.07466 +274,0.636038 +275,-2.16813 +276,-1.5721 +277,-0.129149 +278,-1.54885 +279,-0.0554658 +280,1.01759 +281,13.141 +282,9.80613 +283,6.87191 +284,4.58034 +285,9.75746 +286,1.26755 +287,4.27051 +288,2.17699 +289,0.721271 +290,0.354685 +291,-2.23643 +292,3.37039 +293,-1.62486 +294,-0.588245 +295,-0.426577 +296,-2.60045 +297,-2.32561 +298,-1.06071 +299,-0.470833 +300,-2.49715 +301,-1.14144 +302,-1.106 +303,-0.941483 +304,-3.56411 +305,-3.86208 +306,-0.0369293 +307,-1.7545 +308,-3.60791 +309,-1.59302 +310,1.11367 +311,-2.25822 +312,-1.5902 +313,-2.31117 +314,-0.428193 +315,-2.42562 +316,-0.60292 +317,-2.85118 +318,-0.948686 +319,-2.99873 +320,-4.32247 +321,-3.22123 +322,-2.04551 +323,-4.25169 +324,-2.83752 +325,-3.80456 +326,-2.32319 +327,0.686994 +328,0.439235 +329,0.839417 +330,2.67618 +331,2.16541 +332,2.00836 +333,1.88336 +334,-1.42355 +335,0.199189 +336,-4.13738 +337,0.573334 +338,0.865216 +339,-3.344 +340,-0.966378 +341,-1.94621 +342,-2.99668 +343,-4.38761 +344,-2.3595 +345,-3.70153 +346,-3.48128 +347,-5.28067 +348,-4.40523 +349,-4.60571 +350,-2.9724 +351,-3.8212 +352,-3.02265 +353,-3.04911 +354,-5.00967 +355,-2.85844 +356,1.73895 +357,1.76015 +358,0.923339 +359,1.45929 +360,2.36823 +361,0.110968 +362,-0.70125 +363,-0.0793113 +364,-2.48602 +365,-4.80638 +366,-6.08569 +367,-4.2676 +368,-3.85255 +369,-3.269 +370,-2.68882 +371,-2.64795 +372,-3.73309 +373,-1.81991 +374,-4.68106 +375,-5.4476 +376,-4.16538 +377,-0.436953 +378,-1.44924 +379,-4.35825 +380,-0.501372 +381,-4.84513 +382,-2.08458 +383,-2.28373 +384,-2.98171 +385,-2.51493 +386,-3.54691 +387,-0.947641 +388,-4.09568 +389,0.647465 +390,-1.11854 +391,-0.100173 +392,-2.0754 +393,-3.30833 +394,0.173512 +395,0.0531761 +396,-3.2948 +397,-2.0546 +398,-0.0179486 +399,0.385839 +400,2.43649 +401,0.114887 +402,0.783345 +403,-3.47379 +404,-2.70834 +405,-1.36699 +406,-0.440062 +407,-1.47779 +408,0.183516 +409,2.85646 +410,-1.60137 +411,0.640649 +412,4.63954 +413,-1.64934 +414,3.20527 +415,-1.19964 +416,2.47761 +417,0.970361 +418,1.35949 +419,2.50419 +420,0.411326 +421,-1.12114 +422,1.64827 +423,1.59733 +424,-1.13499 +425,-3.25601 +426,0.401928 +427,1.68188 +428,4.55787 +429,-0.715596 +430,-1.86561 +431,3.72594 +432,-0.428388 +433,0.588628 +434,2.99024 +435,4.28582 +436,-2.10221 +437,-1.57713 +438,2.13402 +439,-0.254876 +440,1.98789 +441,1.36903 +442,-1.00218 +443,2.3492 +444,-0.822482 +445,-1.31977 +446,0.756911 +447,-0.0422127 +448,0.791565 +449,-0.936276 +450,-3.04269 +451,0.0474834 +452,2.17971 +453,3.35822 +454,1.19236 +455,1.71125 +456,0.597459 +457,3.99598 +458,-1.00467 +459,1.84724 +460,0.299724 +461,-0.220255 +462,3.43402 +463,4.80236 +464,1.47725 +465,0.697725 +466,-1.8532 +467,-1.44979 +468,-1.84985 +469,3.21514 +470,1.34001 +471,-2.18369 +472,-0.903525 +473,0.450735 +474,0.945779 +475,0.865017 +476,-3.15112 +477,2.98632 +478,5.20532 +479,1.47972 +480,-0.964329 +481,0.74251 +482,-1.36595 +483,-0.836669 +484,-1.72503 +485,-0.118793 +486,0.273874 +487,1.79512 +488,-0.04268 +489,-0.76686 +490,-1.82816 +491,-0.925627 +492,0.0207878 +493,-0.0436526 +494,-0.335252 +495,-0.46233 +496,4.21855 +497,-2.37299 +498,-1.76886 +499,1.541 +500,-2.8017 +501,2.44362 +502,0.744078 +503,1.4571 +504,2.5279 +505,0.894251 +506,0.752984 +507,0.767749 +508,6.00689 +509,-0.672178 +510,-1.56431 +511,-3.35193 +512,1.60584 +513,-1.02447 +514,2.17595 +515,4.15233 +516,2.66378 +517,-1.22391 +518,3.6949 +519,2.28389 +520,0.170295 +521,2.68823 +522,1.8424 +523,1.35123 +524,-0.56969 +525,-2.79718 +526,2.33587 +527,2.07065 +528,0.310784 +529,2.17929 +530,1.69388 +531,0.673913 +532,0.290493 +533,-0.464574 +534,2.38161 +535,0.837486 +536,-0.385441 +537,-0.146417 +538,-3.36926 +539,4.99681 +540,-2.85517 +541,1.87651 +542,0.8959 +543,3.66979 +544,0.824613 +545,1.19181 +546,1.03693 +547,-2.13662 +548,-1.78113 +549,1.43859 +550,-0.509031 +551,2.24177 +552,3.58876 +553,3.24814 +554,-2.40285 +555,-2.60312 +556,-0.421823 +557,-0.0852482 +558,-1.23606 +559,2.05074 +560,-3.52361 +561,-1.45234 +562,-1.00759 +563,-0.111574 +564,-1.18737 +565,-0.257686 +566,-0.689726 +567,3.56563 +568,3.29082 +569,-0.861249 +570,1.79249 +571,-3.66609 +572,2.13762 +573,-2.46991 +574,1.8372 +575,-2.14414 +576,-0.859272 +577,1.22245 +578,3.06392 +579,0.150279 +580,-0.72591 +581,-2.44156 +582,-0.234968 +583,0.236261 +584,1.60359 +585,1.90402 +586,-1.07298 +587,3.07891 +588,6.85513 +589,1.92081 +590,1.61465 +591,-0.0654491 +592,-0.132772 +593,0.514306 +594,0.146493 +595,-1.77144 +596,1.83016 +597,-0.459756 +598,-2.21366 +599,1.37204 +600,1.86382 +601,2.17567 +602,0.637686 +603,-3.82384 +604,0.765517 +605,1.98936 +606,2.62766 +607,-0.521948 +608,2.65816 +609,-1.79352 +610,1.08443 +611,2.65525 +612,-0.981262 +613,1.45001 +614,0.599857 +615,1.82149 +616,-0.700878 +617,1.96954 +618,2.40256 +619,1.36609 +620,2.65006 +621,-0.287351 +622,4.49172 +623,2.06578 +624,0.657885 +625,-2.13471 +626,0.676318 +627,-0.754166 +628,-2.06118 +629,1.24893 +630,0.551654 +631,2.41911 +632,0.802069 +633,2.1418 +634,-2.46776 +635,1.72241 +636,3.42929 +637,-0.730968 +638,1.03761 +639,0.244736 +640,-0.472867 +641,2.39207 +642,-1.51454 +643,1.76192 +644,2.26767 +645,-0.862059 +646,-0.744783 +647,0.678203 +648,1.34701 +649,-2.06913 +650,0.764449 +651,1.45471 +652,0.25177 +653,-0.470541 +654,-0.645929 +655,1.27738 +656,-0.405337 +657,-2.67172 +658,3.02277 +659,4.00241 +660,-0.427192 +661,-1.33006 +662,0.314889 +663,-2.64669 +664,1.95959 +665,1.76008 +666,2.10254 +667,2.86341 +668,-3.6595 +669,-1.57904 +670,1.97378 +671,-1.42031 +672,-2.32954 +673,6.08096 +674,1.96902 +675,-0.90717 +676,2.82112 +677,1.79752 +678,1.93552 +679,-0.0262455 +680,3.75737 +681,3.74677 +682,-0.564625 +683,-0.887312 +684,0.815549 +685,-1.51792 +686,1.20775 +687,-3.31239 +688,0.319768 +689,1.30024 +690,-3.67328 +691,1.28979 +692,2.57712 +693,0.634677 +694,-1.86756 +695,-0.463498 +696,2.11373 +697,1.35722 +698,-1.40014 +699,1.28439 +700,4.46772 +701,-0.406752 +702,-1.33635 +703,-0.431014 +704,0.371427 +705,-1.26746 +706,-0.950141 +707,-0.363946 +708,2.80213 +709,0.956196 +710,0.889309 +711,1.14723 +712,-0.28524 +713,3.30197 +714,3.66356 +715,-1.12727 +716,1.01621 +717,-1.32181 +718,-1.693 +719,0.328014 +720,2.30824 +721,1.63587 +722,5.25727 +723,1.81472 +724,-1.76752 +725,2.68063 +726,-0.147058 +727,-3.23122 +728,6.78859 +729,0.665027 +730,-1.84609 +731,3.23235 +732,-0.160082 +733,0.927949 +734,-2.69275 +735,1.05049 +736,1.02664 +737,0.633375 +738,1.51382 +739,0.82149 +740,0.627798 +741,0.777681 +742,5.43022 +743,-0.69633 +744,-1.16717 +745,0.255919 +746,2.33137 +747,3.35423 +748,1.37415 +749,-0.30365 +750,3.17047 +751,-2.24686 +752,-0.926332 +753,3.65205 +754,1.74512 +755,-0.948426 +756,0.553078 +757,-0.00452085 +758,0.968065 +759,1.47745 +760,2.81092 +761,7.07178 +762,-1.0592 +763,-0.122723 +764,-0.664519 +765,1.81317 +766,-0.703826 +767,1.34542 +768,1.22458 +769,2.58456 +770,1.93575 +771,-0.937132 +772,1.75694 +773,-0.134771 +774,0.389385 +775,1.25563 +776,-0.249664 +777,0.91825 +778,2.20454 +779,-1.92332 +780,-0.811451 +781,-2.70414 +782,3.27942 +783,1.55489 +784,2.41949 +785,1.38601 +786,-0.532349 +787,0.977483 +788,-0.276558 +789,2.30738 +790,3.41956 +791,0.712185 +792,3.47707 +793,4.77185 +794,2.23102 +795,0.725237 +796,2.70315 +797,5.47313 +798,-1.19102 +799,1.62433 +800,-0.821411 +801,-0.51498 +802,0.236786 +803,0.890709 +804,2.64701 +805,1.34608 +806,3.88814 +807,0.785303 +808,1.68689 +809,2.45297 +810,2.62732 +811,4.92052 +812,-3.82418 +813,2.86908 +814,0.775504 +815,-1.6677 +816,-0.127956 +817,-2.26967 +818,4.00756 +819,-0.227123 +820,-2.98073 +821,-3.21862 +822,-0.0508096 +823,2.24119 +824,4.26391 +825,-0.0673167 +826,0.9309 +827,1.66573 +828,2.16628 +829,-2.91885 +830,0.289731 +831,2.73797 +832,-3.02852 +833,-2.80115 +834,1.72181 +835,-0.972265 +836,1.49743 +837,1.84037 +838,3.59088 +839,-3.2549 +840,2.79797 +841,2.90805 +842,2.19606 +843,-0.845527 +844,2.54546 +845,2.66328 +846,1.72761 +847,-1.50837 +848,-1.19992 +849,-0.406039 +850,1.51716 +851,3.2494 +852,4.34673 +853,-0.378115 +854,0.951923 +855,2.99571 +856,-2.84057 +857,-1.31137 +858,-1.17655 +859,0.306228 +860,0.512488 +861,5.37306 +862,-0.0752091 +863,-1.87033 +864,-4.3011 +865,-1.8103 +866,-0.682875 +867,-3.48853 +868,1.37623 +869,1.30874 +870,2.21094 +871,-1.87807 +872,0.0261958 +873,-0.968914 +874,-2.885 +875,-0.381717 +876,4.08916 +877,-1.12278 +878,1.17655 +879,0.950573 +880,1.00028 +881,1.67716 +882,2.33015 +883,3.38959 +884,-2.37311 +885,5.08916 +886,2.37815 +887,0.306892 +888,-1.20633 +889,1.65275 +890,-3.57546 +891,0.465137 +892,3.23399 +893,0.616868 +894,0.807645 +895,-2.02836 +896,6.14744 +897,5.69066 +898,3.9785 +899,4.36713 +900,-2.31014 +901,0.818092 +902,0.834507 +903,2.51939 +904,3.82208 +905,2.42871 +906,0.320348 +907,-0.121047 +908,-2.02944 +909,4.39181 +910,3.18795 +911,4.27932 +912,-1.28884 +913,-1.37342 +914,-1.55542 +915,-3.95259 +916,3.98202 +917,-0.9665 +918,2.81322 +919,0.33457 +920,-0.725599 +921,-0.496225 +922,-0.292705 +923,1.53581 +924,0.859473 +925,0.449414 +926,0.269884 +927,0.0375253 +928,-0.0325544 +929,3.00702 +930,0.0656582 +931,0.947039 +932,0.0739418 +933,-1.85573 +934,-1.06271 +935,-0.5787 +936,2.79573 +937,2.79183 +938,0.0577568 +939,1.3942 +940,2.14155 +941,-1.77846 +942,1.8653 +943,-0.423055 +944,-2.23958 +945,-0.0213181 +946,-1.1703 +947,0.11914 +948,1.34218 +949,1.3334 +950,1.44451 +951,1.33316 +952,0.174213 +953,0.888418 +954,1.94004 +955,0.0659381 +956,-1.24016 +957,1.02164 +958,-0.293979 +959,-0.0074103 +960,0.881 +961,0.540009 +962,0.204584 +963,1.14904 +964,0.795661 +965,0.397016 +966,2.30606 +967,0.210155 +968,4.88998 +969,1.66129 +970,-0.303616 +971,1.29067 +972,-0.342651 +973,-2.87392 +974,-2.75477 +975,0.641559 +976,-1.08903 +977,-0.0146722 +978,1.29263 +979,-0.504064 +980,-0.806906 +981,-3.76551 +982,-1.27591 +983,-1.74248 +984,-1.20251 +985,-0.498473 +986,-1.20172 +987,3.36917 +988,-0.602464 +989,-1.17178 +990,1.29307 +991,-2.70878 +992,-3.15857 +993,-2.99487 +994,-3.16664 +995,-2.24433 +996,0.434152 +997,-1.77758 +998,2.81283 +999,2.88125 \ No newline at end of file diff --git a/Testing/WinMLRunnerTest/WinMLRunnerTest.cpp b/Testing/WinMLRunnerTest/WinMLRunnerTest.cpp index 1e250d96..6357acd1 100644 --- a/Testing/WinMLRunnerTest/WinMLRunnerTest.cpp +++ b/Testing/WinMLRunnerTest/WinMLRunnerTest.cpp @@ -121,6 +121,7 @@ namespace WinMLRunnerTest { Assert::Fail(L"Failed to open tensor files\n"); } + bool isFirstRow = true; while (!tensorFileStream.eof()) { @@ -554,6 +555,30 @@ namespace WinMLRunnerTest Assert::AreEqual(S_OK, RunProc((wchar_t *)command.c_str())); } + TEST_METHOD(ProvidedImageInputFolder) + { + // Make test_folder_input folder before starting the tests + std::string mkFolderCommand = "mkdir " + std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end()); + system(mkFolderCommand.c_str()); + + std::vector images = { "fish.png", "kitten_224.png" }; + + // Copy images from list to test_folder_input + for (auto image : images) + { + std::string copyCommand = "Copy "; + copyCommand += image; + copyCommand += ' ' + std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end()); + system(copyCommand.c_str()); + } + const std::wstring command = BuildCommand({ EXE_PATH, L"-model", L"SqueezeNet.onnx", L"-InputImageFolder", INPUT_FOLDER_PATH }); + Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str())); + + std::string removeCommand = "rd /s /q "; + removeCommand += std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end()); + system(removeCommand.c_str()); + } + TEST_METHOD(AutoScaleImage) { const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx"; @@ -563,15 +588,18 @@ namespace WinMLRunnerTest Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str())); } - TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensor) + TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuPerIterationPerformance) const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx"; const std::wstring inputPath = CURRENT_PATH + L"fish.png"; const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME; - const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath, - L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-GPU" }); + const std::wstring command = + BuildCommand({ EXE_PATH, L"-model", modelPath, L"-input", inputPath, L"-PerfOutput", OUTPUT_PATH, L"-perf", + L"-SavePerIterationPerf", L"-BaseOutputPath", tensorDataPath, + L"-PerIterationPath PerIterationData", L"-CPU" }); Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str())); - Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Squeezenet_fish_input_GPU.csv", - tensorDataPath + L"\\softmaxout_1GpuIteration1.csv")); + + // We need to expect one more line because of the header + Assert::AreEqual(static_cast(2), GetOutputCSVLineCount(tensorDataPath + L"\\PerIterationData\\Summary.csv")); } TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuSaveTensor) @@ -585,15 +613,15 @@ namespace WinMLRunnerTest tensorDataPath + L"\\softmaxout_1CpuIteration1.csv")); } - TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensorImageDenotation) - const std::wstring modelPath = CURRENT_PATH + L"mnist.onnx"; - const std::wstring inputPath = CURRENT_PATH + L"mnist_28.png"; + TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensor) + const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx"; + const std::wstring inputPath = CURRENT_PATH + L"fish.png"; const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME; const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath, L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-GPU" }); Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str())); - Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Mnist_8_input_GPU.csv", - tensorDataPath + L"\\Plus214_Output_0GpuIteration1.csv")); + Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Squeezenet_fish_input_GPU.csv", + tensorDataPath + L"\\softmaxout_1GpuIteration1.csv")); } TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuSaveTensorImageDenotation) @@ -607,15 +635,15 @@ namespace WinMLRunnerTest tensorDataPath + L"\\Plus214_Output_0CpuIteration1.csv")); } - TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensorFp16) - const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet_fp16.onnx"; - const std::wstring inputPath = CURRENT_PATH + L"fish.png"; + TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensorImageDenotation) + const std::wstring modelPath = CURRENT_PATH + L"mnist.onnx"; + const std::wstring inputPath = CURRENT_PATH + L"mnist_28.png"; const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME; const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath, L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-GPU" }); Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str())); - Assert::AreEqual(true, CompareTensorsFP16(L"OutputTensorData\\Squeezenet_fp16_fish_input_GPU.csv", - tensorDataPath + L"\\softmaxout_1GpuIteration1.csv")); + Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Mnist_8_input_GPU.csv", + tensorDataPath + L"\\Plus214_Output_0GpuIteration1.csv")); } TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuSaveTensorFp16) @@ -629,41 +657,87 @@ namespace WinMLRunnerTest tensorDataPath + L"\\softmaxout_1CpuIteration1.csv")); } - TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuPerIterationPerformance) + TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensorFp16) + const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet_fp16.onnx"; + const std::wstring inputPath = CURRENT_PATH + L"fish.png"; + const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME; + const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath, + L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-GPU" }); + Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str())); + Assert::AreEqual(true, CompareTensorsFP16(L"OutputTensorData\\Squeezenet_fp16_fish_input_GPU.csv", + tensorDataPath + L"\\softmaxout_1GpuIteration1.csv")); + } + + TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuSaveTensorTensorizeIdentity) const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx"; + const std::wstring inputPath = CURRENT_PATH + L"fish.png"; const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME; - const std::wstring command = - BuildCommand({ EXE_PATH, L"-model", modelPath, L"-PerfOutput", OUTPUT_PATH, L"-perf", - L"-SavePerIterationPerf", L"-BaseOutputPath", tensorDataPath, - L"-PerIterationPath PerIterationData", L"-CPU" }); + const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath, + L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-CPU", + L"-Tensor" }); Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str())); + Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Squeezenet_fish_input_CPU.csv", + tensorDataPath + L"\\softmaxout_1CpuIteration1.csv")); + } - // We need to expect one more line because of the header - Assert::AreEqual(static_cast(2), GetOutputCSVLineCount(tensorDataPath + L"\\PerIterationData\\Summary.csv")); + TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensorTensorizeIdentity) + const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx"; + const std::wstring inputPath = CURRENT_PATH + L"fish.png"; + const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME; + const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath, + L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-GPU", + L"-Tensor Identity" }); + Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str())); + Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Squeezenet_fish_input_GPU.csv", + tensorDataPath + L"\\softmaxout_1GpuIteration1.csv")); } - TEST_METHOD(ProvidedImageInputFolder) - { - // Make test_folder_input folder before starting the tests - std::string mkFolderCommand = "mkdir " + std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end()); - system(mkFolderCommand.c_str()); + TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuSaveTensorTensorizeScaleMeanStdDev) + const std::wstring modelPath = CURRENT_PATH + L"DenseNet121_fp32.onnx"; + const std::wstring inputPath = CURRENT_PATH + L"kitten_224.png"; + const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME; + const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath, + L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-CPU", + L"-Tensor Normalize 255 0.485,0.456,0.406 0.229,0.224,0.225" }); + Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str())); + Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\DenseNet121_fp32_kitten_224_input_CPU.csv", + tensorDataPath + L"\\fc6_1CpuIteration1.csv")); + } - std::vector images = { "fish.png", "kitten_224.png" }; + TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensorTensorizeScaleMeanStdDev) + const std::wstring modelPath = CURRENT_PATH + L"DenseNet121_fp32.onnx"; + const std::wstring inputPath = CURRENT_PATH + L"kitten_224.png"; + const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME; + const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath, + L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-GPU", + L"-Tensor Normalize 255 0.485,0.456,0.406 0.229,0.224,0.225" }); + Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str())); + Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\DenseNet121_fp32_kitten_224_input_GPU.csv", + tensorDataPath + L"\\fc6_1GpuIteration1.csv")); + } - // Copy images from list to test_folder_input - for (auto image : images) - { - std::string copyCommand = "Copy "; - copyCommand += image; - copyCommand += ' ' + std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end()); - system(copyCommand.c_str()); - } - const std::wstring command = BuildCommand({ EXE_PATH, L"-model", L"SqueezeNet.onnx", L"-InputImageFolder", INPUT_FOLDER_PATH }); + TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuSaveTensorTensorizeScaleMeanStdDevFP16) + const std::wstring modelPath = CURRENT_PATH + L"DenseNet121_fp16.onnx"; + const std::wstring inputPath = CURRENT_PATH + L"kitten_224.png"; + const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME; + const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath, + L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-CPU", + L"-Tensor Normalize 255 0.485,0.456,0.406 0.229,0.224,0.225" }); Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str())); + Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\DenseNet121_fp16_kitten_224_input_CPU.csv", + tensorDataPath + L"\\fc6_1CpuIteration1.csv")); + } - std::string removeCommand = "rd /s /q "; - removeCommand += std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end()); - system(removeCommand.c_str()); + TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensorTensorizeScaleMeanStdDevFP16) + const std::wstring modelPath = CURRENT_PATH + L"DenseNet121_fp16.onnx"; + const std::wstring inputPath = CURRENT_PATH + L"kitten_224.png"; + const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME; + const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath, + L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-GPU", + L"-Tensor Normalize 255 0.485,0.456,0.406 0.229,0.224,0.225" }); + Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str())); + Assert::AreEqual(true, CompareTensorsFP16(L"OutputTensorData\\DenseNet121_fp16_kitten_224_input_GPU.csv", + tensorDataPath + L"\\fc6_1GpuIteration1.csv")); } }; @@ -823,7 +897,7 @@ namespace WinMLRunnerTest TEST_METHOD(TestTopK) { - const std::wstring command = BuildCommand({ EXE_PATH, L"-model", L"SqueezeNet.onnx", L"-TopK", L"5" }); + const std::wstring command = BuildCommand({ EXE_PATH, L"-model", CURRENT_PATH + L"SqueezeNet.onnx", L"-TopK", L"5" }); Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str())); } diff --git a/Testing/WinMLRunnerTest/WinMLRunnerTest.vcxproj b/Testing/WinMLRunnerTest/WinMLRunnerTest.vcxproj index 1a8e4edc..47028fba 100644 --- a/Testing/WinMLRunnerTest/WinMLRunnerTest.vcxproj +++ b/Testing/WinMLRunnerTest/WinMLRunnerTest.vcxproj @@ -390,6 +390,72 @@ true PreserveNewest + + false + true + false + true + false + true + false + true + PreserveNewest + + + false + true + false + true + false + true + false + true + PreserveNewest + + + false + true + false + true + false + true + false + true + PreserveNewest + + + false + true + false + true + false + true + false + true + PreserveNewest + + + false + true + false + true + false + true + false + true + PreserveNewest + + + false + true + false + true + false + true + false + true + PreserveNewest + diff --git a/Tools/WinMLRunner/README.md b/Tools/WinMLRunner/README.md index ad3dc2e9..dc3f3b8c 100644 --- a/Tools/WinMLRunner/README.md +++ b/Tools/WinMLRunner/README.md @@ -42,7 +42,10 @@ Required command-Line arguments: -GPUBoundInput : bind the input to the GPU -RGB : load the input as an RGB image -BGR : load the input as a BGR image --Tensor : load the input as a tensor +-Tensor [function] : load the input as a tensor, with optional function for input preprocessing + Optional function arguments: + Identity(default) : No input transformations will be performed. + Normalize : float scale factor and comma separated per channel means and stddev for normalization. -Perf [all]: capture performance measurements such as timing and memory usage. Specifying "all" will output all measurements -Iterations : # times perf measurements will be run/averaged. (maximum: 1024 times) -Input : binds image or CSV to model diff --git a/Tools/WinMLRunner/src/BindingUtilities.h b/Tools/WinMLRunner/src/BindingUtilities.h index 98319ed6..c045b0c9 100644 --- a/Tools/WinMLRunner/src/BindingUtilities.h +++ b/Tools/WinMLRunner/src/BindingUtilities.h @@ -4,6 +4,7 @@ #include "Common.h" #include "Windows.AI.Machinelearning.Native.h" #include "d3dx12.h" +#include "MemoryBuffer.h" using namespace winrt::Windows::Media; using namespace winrt::Windows::Storage; using namespace winrt::Windows::Storage::Streams; @@ -14,63 +15,6 @@ using namespace winrt::Windows::Graphics::Imaging; using namespace winrt::Windows::Graphics::DirectX::Direct3D11; using namespace DirectX::PackedVector; -template struct TensorKindToArithmeticType -{ - static_assert(true, "No TensorKind mapped for given type!"); -}; -template <> struct TensorKindToArithmeticType -{ - typedef uint8_t Type; -}; -template <> struct TensorKindToArithmeticType -{ - typedef uint8_t Type; -}; -template <> struct TensorKindToArithmeticType -{ - typedef uint16_t Type; -}; -template <> struct TensorKindToArithmeticType -{ - typedef int16_t Type; -}; -template <> struct TensorKindToArithmeticType -{ - typedef uint32_t Type; -}; -template <> struct TensorKindToArithmeticType -{ - typedef int32_t Type; -}; -template <> struct TensorKindToArithmeticType -{ - typedef uint64_t Type; -}; -template <> struct TensorKindToArithmeticType -{ - typedef int64_t Type; -}; -template <> struct TensorKindToArithmeticType -{ - typedef boolean Type; -}; -template <> struct TensorKindToArithmeticType -{ - typedef double Type; -}; -template <> struct TensorKindToArithmeticType -{ - typedef float Type; -}; -template <> struct TensorKindToArithmeticType -{ - typedef float Type; -}; -template <> struct TensorKindToArithmeticType -{ - typedef winrt::hstring Type; -}; - template struct TensorKindToPointerType { static_assert(true, "No TensorKind mapped for given type!"); @@ -81,7 +25,7 @@ template <> struct TensorKindToPointerType }; template <> struct TensorKindToPointerType { - typedef uint8_t Type; + typedef int8_t Type; }; template <> struct TensorKindToPointerType { @@ -185,59 +129,61 @@ template <> struct TensorKindToValue typedef TensorString Type; }; -template PointerType ConvertArithmeticTypeToPointerType(ArithmeticType value) +template +ToType ConvertToPointerType(FromType value) { static_assert(true, "No TensorKind mapped for given type!"); + return 0; }; -template <> uint8_t ConvertArithmeticTypeToPointerType(uint8_t value) +template <> uint8_t ConvertToPointerType(float value) { return static_cast(value); }; -template <> uint8_t ConvertArithmeticTypeToPointerType(uint8_t value) +template <> int8_t ConvertToPointerType(float value) { - return static_cast(value); + return static_cast(value); }; -template <> uint16_t ConvertArithmeticTypeToPointerType(uint16_t value) +template <> uint16_t ConvertToPointerType(float value) { return static_cast(value); }; -template <> int16_t ConvertArithmeticTypeToPointerType(int16_t value) +template <> int16_t ConvertToPointerType(float value) { return static_cast(value); }; -template <> uint32_t ConvertArithmeticTypeToPointerType(uint32_t value) +template <> uint32_t ConvertToPointerType(float value) { return static_cast(value); }; -template <> int32_t ConvertArithmeticTypeToPointerType(int32_t value) +template <> int32_t ConvertToPointerType(float value) { return static_cast(value); }; -template <> uint64_t ConvertArithmeticTypeToPointerType(uint64_t value) +template <> uint64_t ConvertToPointerType(float value) { return static_cast(value); }; -template <> int64_t ConvertArithmeticTypeToPointerType(int64_t value) +template <> int64_t ConvertToPointerType(float value) { return static_cast(value); }; -template <> boolean ConvertArithmeticTypeToPointerType(boolean value) +template <> boolean ConvertToPointerType(float value) { return static_cast(value); }; -template <> double ConvertArithmeticTypeToPointerType(double value) +template <> double ConvertToPointerType(double value) { return static_cast(value); }; -template <> float ConvertArithmeticTypeToPointerType(float value) +template <> float ConvertToPointerType(float value) { return static_cast(value); }; -template <> HALF ConvertArithmeticTypeToPointerType(float value) +template <> HALF ConvertToPointerType(float value) { return XMConvertFloatToHalf(value); }; -template <> winrt::hstring ConvertArithmeticTypeToPointerType(winrt::hstring value) +template <> winrt::hstring ConvertToPointerType(winrt::hstring value) { return static_cast(value); }; @@ -307,8 +253,6 @@ namespace BindingUtilities const InputDataType inputDataType, const hstring& filePath, const CommandLineArgs& args, uint32_t iterationNum) { - assert(inputDataType != InputDataType::Tensor); - // We assume NCHW and NCDHW uint64_t width = 0; uint64_t height = 0; @@ -321,6 +265,9 @@ namespace BindingUtilities auto stream = file.OpenAsync(FileAccessMode::Read).get(); // Create the decoder from the stream BitmapDecoder decoder = BitmapDecoder::CreateAsync(stream).get(); + BitmapPixelFormat format = inputDataType == InputDataType::Tensor + ? decoder.BitmapPixelFormat() + : TypeHelper::GetBitmapPixelFormat(inputDataType); // If input dimensions are different from tensor input, then scale / crop while reading if (args.IsAutoScale() && (decoder.PixelHeight() != height || decoder.PixelWidth() != width)) @@ -337,23 +284,21 @@ namespace BindingUtilities // get the bitmap return decoder - .GetSoftwareBitmapAsync(TypeHelper::GetBitmapPixelFormat(inputDataType), BitmapAlphaMode::Ignore, - transform, ExifOrientationMode::RespectExifOrientation, + .GetSoftwareBitmapAsync(format, BitmapAlphaMode::Ignore, transform, + ExifOrientationMode::RespectExifOrientation, ColorManagementMode::DoNotColorManage) .get(); } else { // get the bitmap - return decoder - .GetSoftwareBitmapAsync(TypeHelper::GetBitmapPixelFormat(inputDataType), BitmapAlphaMode::Ignore) - .get(); + return decoder.GetSoftwareBitmapAsync(format, BitmapAlphaMode::Ignore).get(); } } catch (...) { - std::cout << "BindingUtilities: could not open image file, make sure you are using fully qualified paths." - << std::endl; + std::wcout << L"BindingUtilities: could not open image file (" << std::wstring(filePath) << L"), " + << L"make sure you are using fully qualified paths." << std::endl; return nullptr; } } @@ -382,27 +327,24 @@ namespace BindingUtilities return inputImage; } - std::vector ReadCsvLine(std::ifstream& fileStream) + struct InputBufferDesc { - std::vector elementStrings; - // Read next line. - std::string line; - if (!std::getline(fileStream, line)) + uint8_t* elements; + uint32_t totalSizeInBytes; + uint32_t numChannelsPerElement; + uint32_t elementStrideInBytes; + bool isPlanar; + TensorKind channelFormat; + BitmapPixelFormat elementFormat; + + InputBufferDesc() + : elements(nullptr), totalSizeInBytes(0), numChannelsPerElement(0), elementStrideInBytes(0), isPlanar(0), + channelFormat(TensorKind::Undefined), elementFormat(BitmapPixelFormat::Unknown) { - ThrowFailure(L"BindingUtilities: expected more input rows."); } + }; - // Split the line into strings for each value. - std::istringstream elementsString(line); - std::string elementString; - while (std::getline(elementsString, elementString, ',')) - { - elementStrings.push_back(elementString); - } - return elementStrings; - } - - std::vector ParseCSVElementStrings(const std::wstring& csvFilePath) + void ReadCSVIntoBuffer(const std::wstring& csvFilePath, InputBufferDesc& inputBufferDesc) { std::ifstream fileStream; fileStream.open(csvFilePath); @@ -411,74 +353,144 @@ namespace BindingUtilities ThrowFailure(L"BindingUtilities: could not open data file."); } - std::vector elementStrings = ReadCsvLine(fileStream); + uint32_t pos = 0; + std::string line; + float_t* pData = (float_t*)inputBufferDesc.elements; + while (std::getline(fileStream, line, ',')) + { + *pData = std::stof(line); + ++pData; + + ++pos; + if (pos >= inputBufferDesc.totalSizeInBytes) + break; + } + + // Check to see if csv didn't fill in entire buffer and throw or fill with zeros? + if (pos != (inputBufferDesc.totalSizeInBytes * inputBufferDesc.numChannelsPerElement) / inputBufferDesc.elementStrideInBytes) + { + throw hresult_invalid_argument(L"CSV input size/shape is different from what model expects!"); + } - return elementStrings; } - template - static ITensor CreateTensor(const CommandLineArgs& args, const std::vector& tensorStringInput, - const IVectorView& tensorShape, const InputBindingType inputBindingType) + // Roll the array correctly for the tensor + template + void CopyTensorFromBuffer(void* actualData, uint32_t tensorHeight, uint32_t tensorWidth, + const InputBufferDesc& inputBufferDesc, float scale, + const std::vector& means, const std::vector& stddevs) { - using TensorValue = typename TensorKindToValue::Type; - using ArithmeticType = typename TensorKindToArithmeticType::Type; - using PointerType = typename TensorKindToPointerType::Type; + using WriteType = typename TensorKindToPointerType::Type; - std::vector vecShape = {}; - for (UINT dim = 0; dim < tensorShape.Size(); dim++) + WriteType* pDataOut = static_cast(actualData); + InputType* pDataIn = (InputType*)inputBufferDesc.elements; + uint32_t elementOffsetMultiplier = inputBufferDesc.isPlanar ? inputBufferDesc.numChannelsPerElement : 1; + uint32_t channelOffsetMultiplier = inputBufferDesc.isPlanar ? 1 : tensorHeight * tensorWidth; + for (uint32_t element = 0; element < tensorHeight * tensorWidth; ++element) { - INT64 dimSize = tensorShape.GetAt(dim); - if (dimSize > 0) // If the dimension is greater than 0, then it is known. - { - vecShape.push_back(dimSize); - } - else // otherwise, make sure that the dimension is -1, representing free dimension. If not, then it's an - // invalid model. + for (uint32_t channel = 0; channel < inputBufferDesc.numChannelsPerElement; ++channel) { - if (dimSize == -1) - { - vecShape.push_back(1); - } - else - { - throw hresult_invalid_argument(L"Failed to create a tensor with an unknown dimension of: " + - dimSize); - } + pDataOut[element * elementOffsetMultiplier + channel * channelOffsetMultiplier] = + ConvertToPointerType( + ((pDataIn[channel] / scale) - means[channel]) / stddevs[channel]); } + pDataIn += inputBufferDesc.elementStrideInBytes / sizeof(InputType); } + } + + template + static ITensor CreateTensor(const CommandLineArgs& args, const std::vector& tensorShape, + const InputBindingType inputBindingType, const InputBufferDesc& inputBufferDesc) + { + using TensorValue = typename TensorKindToValue::Type; + using WriteType = typename TensorKindToPointerType::Type; // Map the incoming Tensor as a TensorNative to get the actual data buffer. - auto tensorValue = TensorValue::Create(vecShape); + auto tensorValue = TensorValue::Create(tensorShape); com_ptr spTensorValueNative; tensorValue.as(spTensorValueNative); - PointerType* actualData; + WriteType* actualData; uint32_t actualSizeInBytes; - spTensorValueNative->GetBuffer(reinterpret_cast(&actualData), - &actualSizeInBytes); + THROW_IF_FAILED(spTensorValueNative->GetBuffer(reinterpret_cast(&actualData), &actualSizeInBytes)); - if (args.IsCSVInput()) + if (args.IsCSVInput() || args.IsImageInput()) { - if (tensorStringInput.size() != actualSizeInBytes / sizeof(PointerType)) + // Assumes NCHW + uint32_t channels = static_cast(tensorShape[1]); + uint32_t tensorHeight = static_cast(tensorShape[2]); + uint32_t tensorWidth = static_cast(tensorShape[3]); + + // Check to make sure the sizes are right + uint32_t inputElementCount = inputBufferDesc.totalSizeInBytes / inputBufferDesc.elementStrideInBytes; + uint32_t outputElementCount = actualSizeInBytes / (channels * sizeof(WriteType)); + if (inputElementCount != outputElementCount) { - throw hresult_invalid_argument(L"CSV input size/shape is different from what model expects"); + throw hresult_invalid_argument(L"Input size / shape is different from what the model expects"); } - // Write the elementStrings into the iTensorNative - PointerType* dataPtr = actualData; - for (const auto &tensorString : tensorStringInput) + float scale; + std::vector means = {}; + std::vector stddevs = {}; + + const auto& tensorizeArgs = args.TensorizeArgs(); + const auto& normalizeParams = tensorizeArgs.Normalize; + switch (tensorizeArgs.Func) { - ArithmeticType value; - std::stringstream(tensorString) >> value; - *dataPtr = ConvertArithmeticTypeToPointerType(value); - dataPtr++; + case TensorizeFuncs::Identity: + scale = 1.0f; + means.resize(channels, 0.0f); + stddevs.resize(channels, 1.0f); + break; + case TensorizeFuncs::Normalize: + switch (inputBufferDesc.elementFormat) + { + case BitmapPixelFormat::Gray8: + case BitmapPixelFormat::Gray16: + case BitmapPixelFormat::Rgba8: + case BitmapPixelFormat::Rgba16: + scale = normalizeParams.Scale; + means.resize(channels); + stddevs.resize(channels); + for (uint32_t i = 0; i < channels; ++i) + { + means[i] = normalizeParams.Means[i]; + stddevs[i] = normalizeParams.StdDevs[i]; + } + break; + case BitmapPixelFormat::Bgra8: + scale = normalizeParams.Scale; + means.resize(channels); + stddevs.resize(channels); + for (uint32_t i = 0; i < channels; ++i) + { + means[channels - 1 - i] = normalizeParams.Means[i]; + stddevs[channels - 1 - i] = normalizeParams.StdDevs[i]; + } + break; + + default: + throw hresult_invalid_argument(L"CreateTensor: Unhandled SoftwareBitmap pixel format"); + } + break; + default: + throw hresult_invalid_argument(L"CreateTensor: Unknown Tensorize Function"); + } + + switch (inputBufferDesc.channelFormat) + { + case TensorKind::UInt8: + CopyTensorFromBuffer(actualData, tensorHeight, tensorWidth, inputBufferDesc, scale, + means, stddevs); + break; + case TensorKind::Float: + CopyTensorFromBuffer(actualData, tensorHeight, tensorWidth, inputBufferDesc, scale, + means, stddevs); + break; + default: + throw hresult_not_implemented(L"Creating Tensors for Input Images with unhandled channel format!"); } - } - else if (args.IsImageInput()) - { - // Creating Tensors for Input Images haven't been added. - throw hresult_not_implemented(L"Creating Tensors for Input Images haven't been implemented!"); } if (inputBindingType == InputBindingType::CPU) @@ -494,15 +506,11 @@ namespace BindingUtilities com_ptr pD3D12Device = nullptr; D3D12CreateDevice(nullptr, D3D_FEATURE_LEVEL::D3D_FEATURE_LEVEL_11_0, __uuidof(ID3D12Device), reinterpret_cast(&pD3D12Device)); - + pD3D12Device->CreateCommittedResource( - &CD3DX12_HEAP_PROPERTIES(D3D12_HEAP_TYPE_DEFAULT), - D3D12_HEAP_FLAG_NONE, - &CD3DX12_RESOURCE_DESC::Buffer( - actualSizeInBytes, - D3D12_RESOURCE_FLAG_ALLOW_UNORDERED_ACCESS), - D3D12_RESOURCE_STATE_COMMON, nullptr, - __uuidof(ID3D12Resource), pGPUResource.put_void()); + &CD3DX12_HEAP_PROPERTIES(D3D12_HEAP_TYPE_DEFAULT), D3D12_HEAP_FLAG_NONE, + &CD3DX12_RESOURCE_DESC::Buffer(actualSizeInBytes, D3D12_RESOURCE_FLAG_ALLOW_UNORDERED_ACCESS), + D3D12_RESOURCE_STATE_COMMON, nullptr, __uuidof(ID3D12Resource), pGPUResource.put_void()); if (!args.IsGarbageInput()) { com_ptr imageUploadHeap; @@ -570,126 +578,217 @@ namespace BindingUtilities } com_ptr tensorfactory = get_activation_factory(); com_ptr<::IUnknown> spUnkTensor; - tensorfactory->CreateFromD3D12Resource(pGPUResource.get(), vecShape.data(), static_cast(vecShape.size()), spUnkTensor.put()); + tensorfactory->CreateFromD3D12Resource(pGPUResource.get(), const_cast(tensorShape.data()), + static_cast(tensorShape.size()), spUnkTensor.put()); TensorValue returnTensor(nullptr); spUnkTensor.try_as(returnTensor); return returnTensor; } } - // Binds tensor floats, ints, doubles from CSV data. - ITensor CreateBindableTensor(const ILearningModelFeatureDescriptor& description, const CommandLineArgs& args, - const InputBindingType inputBindingType) + // Process the descriptor to gather and normalize the shape + void ProcessDescriptor(const ILearningModelFeatureDescriptor& description, std::vector& shape, + TensorKind& tensorKind, InputBufferDesc& inputBufferDesc) { - std::vector elementStrings; - if (!args.CsvPath().empty()) - { - elementStrings = ParseCSVElementStrings(args.CsvPath()); - } - // Try Image Feature Descriptor auto imageFeatureDescriptor = description.try_as(); if (imageFeatureDescriptor) { int64_t channels; - if (imageFeatureDescriptor.BitmapPixelFormat() == BitmapPixelFormat::Gray16 || - imageFeatureDescriptor.BitmapPixelFormat() == BitmapPixelFormat::Gray8) - { - channels = 1; - } - else if (imageFeatureDescriptor.BitmapPixelFormat() == BitmapPixelFormat::Bgra8 || - imageFeatureDescriptor.BitmapPixelFormat() == BitmapPixelFormat::Rgba16 || - imageFeatureDescriptor.BitmapPixelFormat() == BitmapPixelFormat::Rgba8) + inputBufferDesc.elementFormat = imageFeatureDescriptor.BitmapPixelFormat(); + switch (inputBufferDesc.elementFormat) { - channels = 3; - } - else - { - throw hresult_not_implemented(L"BitmapPixel format not handled by WinMLRunner."); + case BitmapPixelFormat::Gray8: + case BitmapPixelFormat::Gray16: + channels = 1; + break; + case BitmapPixelFormat::Bgra8: + case BitmapPixelFormat::Rgba16: + case BitmapPixelFormat::Rgba8: + channels = 3; + break; + default: + throw hresult_not_implemented(L"BitmapPixel format not yet handled by WinMLRunner."); } - std::vector shape = { 1, channels, imageFeatureDescriptor.Height(), - imageFeatureDescriptor.Width() }; - IVectorView shapeVectorView = single_threaded_vector(std::move(shape)).GetView(); - return CreateTensor(args, elementStrings, shapeVectorView, inputBindingType); + + tensorKind = TensorKind::Float; + shape.push_back(1); + shape.push_back(channels); + shape.push_back(static_cast(imageFeatureDescriptor.Height())); + shape.push_back(static_cast(imageFeatureDescriptor.Width())); + return; } auto tensorDescriptor = description.try_as(); if (tensorDescriptor) { - switch (tensorDescriptor.TensorKind()) + IVectorView tensorShape = tensorDescriptor.Shape(); + for (uint32_t dim = 0; dim < tensorShape.Size(); dim++) { - case TensorKind::Undefined: + int64_t dimSize = tensorShape.GetAt(dim); + if (dimSize > 0) // If the dimension is greater than 0, then it is known. { - std::cout << "BindingUtilities: TensorKind is undefined." << std::endl; - throw hresult_invalid_argument(); + shape.push_back(dimSize); } - case TensorKind::Float: - { - return CreateTensor(args, elementStrings, tensorDescriptor.Shape(), - inputBindingType); - } - break; - case TensorKind::Float16: - { - return CreateTensor(args, elementStrings, tensorDescriptor.Shape(), - inputBindingType); - } - break; - case TensorKind::Double: + else // otherwise, make sure that the dimension is -1, representing free dimension. If not, then it's an + // invalid model. { - return CreateTensor(args, elementStrings, tensorDescriptor.Shape(), - inputBindingType); - } - break; - case TensorKind::Int8: - { - return CreateTensor(args, elementStrings, tensorDescriptor.Shape(), - inputBindingType); - } - break; - case TensorKind::UInt8: - { - return CreateTensor(args, elementStrings, tensorDescriptor.Shape(), - inputBindingType); - } - break; - case TensorKind::Int16: - { - return CreateTensor(args, elementStrings, tensorDescriptor.Shape(), - inputBindingType); - } - break; - case TensorKind::UInt16: - { - return CreateTensor(args, elementStrings, tensorDescriptor.Shape(), - inputBindingType); - } - break; - case TensorKind::Int32: - { - return CreateTensor(args, elementStrings, tensorDescriptor.Shape(), - inputBindingType); - } - break; - case TensorKind::UInt32: - { - return CreateTensor(args, elementStrings, tensorDescriptor.Shape(), - inputBindingType); - } - break; - case TensorKind::Int64: - { - return CreateTensor(args, elementStrings, tensorDescriptor.Shape(), - inputBindingType); - } - break; - case TensorKind::UInt64: - { - return CreateTensor(args, elementStrings, tensorDescriptor.Shape(), - inputBindingType); + if (dimSize == -1) + { + shape.push_back(1); + } + else + { + throw hresult_invalid_argument(L"Failed to create a tensor with an unknown dimension of: " + + dimSize); + } } - break; } + + tensorKind = tensorDescriptor.TensorKind(); + return; + } + + throw hresult_invalid_argument(L"ProcessDescriptor: Unknown desription type!"); + } // namespace BindingUtilities + + // Binds tensor floats, ints, doubles from CSV data. + ITensor CreateBindableTensor(const ILearningModelFeatureDescriptor& description, const std::wstring &imagePath, + const InputBindingType inputBindingType, const InputDataType inputDataType, + const CommandLineArgs& args, uint32_t iterationNum) + { + InputBufferDesc inputBufferDesc = {}; + + std::vector shape = {}; + TensorKind tensorKind = TensorKind::Undefined; + ProcessDescriptor(description, shape, tensorKind, inputBufferDesc); + + SoftwareBitmap softwareBitmap(nullptr); + if (args.IsCSVInput()) + { + inputBufferDesc.channelFormat = TensorKind::Float; + inputBufferDesc.isPlanar = true; + + // Assumes shape is in the format of 'NCHW' + inputBufferDesc.numChannelsPerElement = static_cast(shape[1]); + + // Assumes no gaps in the input csv file + inputBufferDesc.elementStrideInBytes = inputBufferDesc.numChannelsPerElement * sizeof(float_t); + + inputBufferDesc.totalSizeInBytes = sizeof(float_t); + for (uint32_t i = 0; i < shape.size(); ++i) + inputBufferDesc.totalSizeInBytes *= static_cast(shape[i]); + + inputBufferDesc.elements = new uint8_t[inputBufferDesc.totalSizeInBytes]; + + ReadCSVIntoBuffer(args.CsvPath(), inputBufferDesc); + } + else if (args.IsImageInput()) + { + softwareBitmap = LoadImageFile(description, inputDataType, imagePath.c_str(), args, iterationNum); + + // Get Pointers to the SoftwareBitmap data buffers + const BitmapBuffer sbBitmapBuffer(softwareBitmap.LockBuffer(BitmapBufferAccessMode::Read)); + winrt::Windows::Foundation::IMemoryBufferReference sbReference = sbBitmapBuffer.CreateReference(); + auto sbByteAccess = sbReference.as<::Windows::Foundation::IMemoryBufferByteAccess>(); + winrt::check_hresult(sbByteAccess->GetBuffer(&inputBufferDesc.elements, &inputBufferDesc.totalSizeInBytes)); + + inputBufferDesc.isPlanar = false; + inputBufferDesc.elementFormat = softwareBitmap.BitmapPixelFormat(); + switch (inputBufferDesc.elementFormat) + { + case BitmapPixelFormat::Gray8: + inputBufferDesc.channelFormat = TensorKind::UInt8; + inputBufferDesc.numChannelsPerElement = 1; + inputBufferDesc.elementStrideInBytes = sizeof(uint8_t); + break; + case BitmapPixelFormat::Gray16: + inputBufferDesc.channelFormat = TensorKind::UInt16; + inputBufferDesc.numChannelsPerElement = 1; + inputBufferDesc.elementStrideInBytes = sizeof(uint16_t); + break; + case BitmapPixelFormat::Bgra8: + inputBufferDesc.channelFormat = TensorKind::UInt8; + inputBufferDesc.numChannelsPerElement = 3; + inputBufferDesc.elementStrideInBytes = 4 * sizeof(uint8_t); + break; + case BitmapPixelFormat::Rgba8: + inputBufferDesc.channelFormat = TensorKind::UInt8; + inputBufferDesc.numChannelsPerElement = 3; + inputBufferDesc.elementStrideInBytes = 4 * sizeof(uint8_t); + break; + case BitmapPixelFormat::Rgba16: + inputBufferDesc.channelFormat = TensorKind::UInt16; + inputBufferDesc.numChannelsPerElement = 3; + inputBufferDesc.elementStrideInBytes = 4 * sizeof(uint16_t); + break; + default: + throw hresult_invalid_argument(L"Unknown BitmapPixelFormat in input image."); + } + } + + switch (tensorKind) + { + case TensorKind::Undefined: + { + std::cout << "BindingUtilities: TensorKind is undefined." << std::endl; + throw hresult_invalid_argument(); + } + case TensorKind::Float: + { + return CreateTensor(args, shape, inputBindingType, inputBufferDesc); + } + break; + case TensorKind::Float16: + { + return CreateTensor(args, shape, inputBindingType, inputBufferDesc); + } + break; + case TensorKind::Double: + { + return CreateTensor(args, shape, inputBindingType, inputBufferDesc); + } + break; + case TensorKind::Int8: + { + return CreateTensor(args, shape, inputBindingType, inputBufferDesc); + } + break; + case TensorKind::UInt8: + { + return CreateTensor(args, shape, inputBindingType, inputBufferDesc); + } + break; + case TensorKind::Int16: + { + return CreateTensor(args, shape, inputBindingType, inputBufferDesc); + } + break; + case TensorKind::UInt16: + { + return CreateTensor(args, shape, inputBindingType, inputBufferDesc); + } + break; + case TensorKind::Int32: + { + return CreateTensor(args, shape, inputBindingType, inputBufferDesc); + } + break; + case TensorKind::UInt32: + { + return CreateTensor(args, shape, inputBindingType, inputBufferDesc); + } + break; + case TensorKind::Int64: + { + return CreateTensor(args, shape, inputBindingType, inputBufferDesc); + } + break; + case TensorKind::UInt64: + { + return CreateTensor(args, shape, inputBindingType, inputBufferDesc); + } + break; } std::cout << "BindingUtilities: TensorKind has not been implemented." << std::endl; throw hresult_not_implemented(); diff --git a/Tools/WinMLRunner/src/CommandLineArgs.cpp b/Tools/WinMLRunner/src/CommandLineArgs.cpp index 3a38dd9c..d5d43634 100644 --- a/Tools/WinMLRunner/src/CommandLineArgs.cpp +++ b/Tools/WinMLRunner/src/CommandLineArgs.cpp @@ -5,6 +5,7 @@ #include #include #include +#include #include "Filehelper.h" using namespace Windows::AI::MachineLearning; @@ -22,22 +23,31 @@ void CommandLineArgs::PrintUsage() std::cout << " -GPUHighPerformance : run model on GPU with highest performance" << std::endl; std::cout << " -GPUMinPower : run model on GPU with the least power" << std::endl; #ifdef DXCORE_SUPPORTED_BUILD - std::cout << " -GPUAdapterName : run model on GPU specified by its name. NOTE: Please only use this flag on DXCore supported machines." + std::cout << " -GPUAdapterName : run model on GPU specified by its name. NOTE: Please " + "only use this flag on DXCore supported machines." << std::endl; #endif - std::cout << " -CreateDeviceOnClient : create the D3D device on the client and pass it to WinML to create session" << std::endl; + std::cout << " -CreateDeviceOnClient : create the D3D device on the client and pass it to WinML to create session" + << std::endl; std::cout << " -CreateDeviceInWinML : create the device inside WinML" << std::endl; std::cout << " -CPUBoundInput : bind the input to the CPU" << std::endl; std::cout << " -GPUBoundInput : bind the input to the GPU" << std::endl; std::cout << " -RGB : load the input as an RGB image" << std::endl; std::cout << " -BGR : load the input as a BGR image" << std::endl; - std::cout << " -Tensor : load the input as a tensor" << std::endl; + std::cout << " -Tensor [function] : load the input as a tensor, with optional function for input preprocessing" + << std::endl; + std::cout << " Optional function arguments:" << std::endl; + std::cout << " Identity(default) : No input transformations will be performed." << std::endl; + std::cout << " Normalize : float scale factor and comma separated per channel " + "means and stddev for normalization." + << std::endl; std::cout << " -Perf [all]: capture performance measurements such as timing and memory usage. Specifying \"all\" " "will output all measurements" << std::endl; std::cout << " -Iterations : # times perf measurements will be run/averaged. (maximum: 1024 times)" << std::endl; std::cout << " -Input : binds image or CSV to model" << std::endl; - std::cout << " -InputImageFolder : specify folder of images to bind to model" << std::endl; + std::cout << " -InputImageFolder : specify folder of images to bind to model" + << std::endl; std::cout << " -TopK : print top values in the result. Default to 1" << std::endl; std::cout << " -BaseOutputPath [] : base output directory path for results, default to cwd" << std::endl; @@ -53,7 +63,7 @@ void CommandLineArgs::PrintUsage() std::cout << " -DebugEvaluate: Print evaluation debug output to debug console if debugger is present." << std::endl; std::cout << " -Terse: Terse Mode (suppresses repetitive console output)" << std::endl; - std::cout << " -AutoScale : Enable image autoscaling and set the interpolation mode [Nearest, " + std::cout << " -AutoScale : Enable image autoscaling and set the interpolation mode [Nearest, " "Linear, Cubic, Fant]" << std::endl; std::cout << std::endl; @@ -77,6 +87,9 @@ void CheckAPICall(int return_value) } } +#pragma warning(push) +#pragma warning(disable : 4996) + CommandLineArgs::CommandLineArgs(const std::vector& args) { std::wstring sPerfOutputPath; @@ -112,7 +125,7 @@ CommandLineArgs::CommandLineArgs(const std::vector& args) throw hresult_invalid_argument( L"ERROR: DXCORE isn't supported on this machine. " L"GpuAdapterName flag should only be used with DXCore supported machines."); - } + } m_adapterName = args[++i]; m_useGPU = true; } @@ -132,7 +145,7 @@ CommandLineArgs::CommandLineArgs(const std::vector& args) else if ((_wcsicmp(args[i].c_str(), L"-Model") == 0)) { CheckNextArgument(args, i); - m_modelPath = args[++i]; + m_modelPath = FileHelper::GetAbsolutePath(args[++i]); } else if ((_wcsicmp(args[i].c_str(), L"-Folder") == 0)) { @@ -165,9 +178,46 @@ CommandLineArgs::CommandLineArgs(const std::vector& args) { m_useBGR = true; } - else if ((_wcsicmp(args[i].c_str(), L"-Tensor") == 0)) + else if (_wcsicmp(args[i].c_str(), L"-Tensor") == 0) { m_useTensor = true; + m_tensorizeArgs.Func = TensorizeFuncs::Identity; + if (i + 1 < args.size() && args[i + 1][0] != L'-') + { + if (_wcsicmp(args[++i].c_str(), L"Identity") == 0) + { + } + else if (_wcsicmp(args[i].c_str(), L"Normalize") == 0) + { + CheckNextArgument(args, i, i + 1); + CheckNextArgument(args, i, i + 2); + CheckNextArgument(args, i, i + 3); + + m_tensorizeArgs.Func = TensorizeFuncs::Normalize; + m_tensorizeArgs.Normalize.Scale = (float)_wtof(args[++i].c_str()); + + std::wstring_convert> converter; + std::istringstream means(converter.to_bytes(args[++i])); + std::string mean; + while (std::getline(means, mean, ',')) + m_tensorizeArgs.Normalize.Means.push_back((float)std::stof(mean.c_str())); + + std::istringstream stddevs(converter.to_bytes(args[++i])); + std::string stddev; + while (std::getline(stddevs, stddev, ',')) + m_tensorizeArgs.Normalize.StdDevs.push_back((float)std::stof(stddev.c_str())); + + if (m_tensorizeArgs.Normalize.Means.size() != m_tensorizeArgs.Normalize.StdDevs.size()) + throw hresult_invalid_argument( + L"-Tensor Normalize: must be the same number of mean and stddev arguments!"); + } + else + { + std::wstring msg = L"-Tensor unknown option "; + msg += args[i].c_str(); + throw hresult_invalid_argument(msg.c_str()); + } + } } else if ((_wcsicmp(args[i].c_str(), L"-CPUBoundInput") == 0)) { @@ -257,7 +307,7 @@ CommandLineArgs::CommandLineArgs(const std::vector& args) throw hresult_invalid_argument(L"Unknown SaveTensorData Mode[" + m_saveTensorMode + L"]!"); } } - else if (_wcsicmp(args[i].c_str(), L"-version") == 0) + else if (_wcsicmp(args[i].c_str(), L"-Version") == 0) { TCHAR szExeFileName[MAX_PATH]; auto ret = GetModuleFileName(NULL, szExeFileName, MAX_PATH); @@ -356,6 +406,8 @@ CommandLineArgs::CommandLineArgs(const std::vector& args) CheckForInvalidArguments(); } +#pragma warning(pop) + void CommandLineArgs::PopulateInputImagePaths() { for (auto& it : std::filesystem::directory_iterator(m_inputImageFolderPath)) @@ -372,8 +424,7 @@ void CommandLineArgs::PopulateInputImagePaths() } } -void CommandLineArgs::SetupOutputDirectories(const std::wstring& sBaseOutputPath, - const std::wstring& sPerfOutputPath, +void CommandLineArgs::SetupOutputDirectories(const std::wstring& sBaseOutputPath, const std::wstring& sPerfOutputPath, const std::wstring& sPerIterationDataPath) { std::filesystem::path PerfOutputPath(sPerfOutputPath); @@ -431,12 +482,13 @@ void CommandLineArgs::SetupOutputDirectories(const std::wstring& sBaseOutputPath } } -void CommandLineArgs::CheckNextArgument(const std::vector& args, UINT i) +void CommandLineArgs::CheckNextArgument(const std::vector& args, UINT argIdx, UINT checkIdx) { - if (i + 1 >= args.size() || args[i + 1][0] == L'-') + UINT localCheckIdx = checkIdx == 0 ? argIdx + 1 : checkIdx; + if (localCheckIdx >= args.size() || args[localCheckIdx][0] == L'-') { std::wstring msg = L"Invalid parameter for "; - msg += args[i].c_str(); + msg += args[argIdx].c_str(); throw hresult_invalid_argument(msg.c_str()); } } diff --git a/Tools/WinMLRunner/src/CommandLineArgs.h b/Tools/WinMLRunner/src/CommandLineArgs.h index 51a6ca00..3f09b3d7 100644 --- a/Tools/WinMLRunner/src/CommandLineArgs.h +++ b/Tools/WinMLRunner/src/CommandLineArgs.h @@ -1,6 +1,29 @@ #pragma once #include "Common.h" +enum TensorizeFuncs +{ + Identity = 0, + Normalize +}; + +class TensorizeArgs +{ +public: + TensorizeFuncs Func; + struct _Normalize + { + float Scale; + std::vector Means; + std::vector StdDevs; + } Normalize; + + TensorizeArgs() : Func(TensorizeFuncs::Identity) + { + Normalize.Scale = 1.0f; + }; +}; + class CommandLineArgs { public: @@ -35,6 +58,8 @@ class CommandLineArgs const std::wstring& GetGPUAdapterName() const { return m_adapterName; } #endif + const TensorizeArgs& TensorizeArgs() const { return m_tensorizeArgs; } + bool UseRGB() const { // If an image is specified without flags, we load it as a BGR image by default @@ -152,6 +177,7 @@ class CommandLineArgs bool m_saveTensor = false; bool m_timeLimitIterations = false; std::wstring m_saveTensorMode = L"First"; + ::TensorizeArgs m_tensorizeArgs; std::wstring m_modelFolderPath; std::wstring m_modelPath; @@ -173,7 +199,7 @@ class CommandLineArgs uint32_t m_topK = 1; std::vector> m_perfFileMetadata; - void CheckNextArgument(const std::vector& args, UINT i); + void CheckNextArgument(const std::vector& args, UINT argIdx, UINT checkIdx = 0); void CheckForInvalidArguments(); void SetupOutputDirectories(const std::wstring& sBaseOutputPath, const std::wstring& sPerfOutputPath, const std::wstring& sPerIterationDataPath); diff --git a/Tools/WinMLRunner/src/OutputHelper.h b/Tools/WinMLRunner/src/OutputHelper.h index af3aa4b2..9972052b 100644 --- a/Tools/WinMLRunner/src/OutputHelper.h +++ b/Tools/WinMLRunner/src/OutputHelper.h @@ -452,18 +452,12 @@ class OutputHelper static std::wstring FeatureDescriptorToString(const ILearningModelFeatureDescriptor& descriptor) { - // IMPORTANT: This tensorKinds array needs to match the "enum class TensorKind" idl in - // Windows.AI.MachineLearning.0.h - const std::wstring tensorKind[] = { - L"Undefined", L"Float", L"UInt8", L"Int8", L"UInt16", L"Int16", L"Int32", L"Int64", - L"String", L"Boolean", L"Float16", L"Double", L"UInt32", L"UInt64", L"Complex64", L"Complex128", - }; switch (descriptor.Kind()) { case LearningModelFeatureKind::Tensor: { auto tensorDescriptor = descriptor.as(); - return tensorKind[(int)tensorDescriptor.TensorKind()]; + return TypeHelper::Stringify(tensorDescriptor.TensorKind()); } case LearningModelFeatureKind::Image: { @@ -475,7 +469,7 @@ class OutputHelper case LearningModelFeatureKind::Map: { auto mapDescriptor = descriptor.as(); - std::wstring str = L"Map<" + tensorKind[(int)mapDescriptor.KeyKind()] + L","; + std::wstring str = L"Map<" + TypeHelper::Stringify(mapDescriptor.KeyKind()) + L","; str += FeatureDescriptorToString(mapDescriptor.ValueDescriptor()); str += L">"; return str; diff --git a/Tools/WinMLRunner/src/Run.cpp b/Tools/WinMLRunner/src/Run.cpp index caa27bdd..ca434e44 100644 --- a/Tools/WinMLRunner/src/Run.cpp +++ b/Tools/WinMLRunner/src/Run.cpp @@ -19,14 +19,15 @@ std::vector GenerateInputFeatures(const LearningMode { std::wcout << L"Generating input feature(s) with image: " << imagePath << std::endl; } - for (uint32_t i = 0; i < model.InputFeatures().Size(); i++) + for (uint32_t inputNum = 0; inputNum < model.InputFeatures().Size(); inputNum++) { - auto&& description = model.InputFeatures().GetAt(i); + auto&& description = model.InputFeatures().GetAt(inputNum); if (inputDataType == InputDataType::Tensor) { // If CSV data is provided, then every input will contain the same CSV data - auto tensorFeature = BindingUtilities::CreateBindableTensor(description, args, inputBindingType); + auto tensorFeature = BindingUtilities::CreateBindableTensor(description, imagePath, inputBindingType, inputDataType, + args, iterationNum); inputFeatures.push_back(tensorFeature); } else diff --git a/Tools/WinMLRunner/src/TypeHelper.h b/Tools/WinMLRunner/src/TypeHelper.h index b17da804..db18a684 100644 --- a/Tools/WinMLRunner/src/TypeHelper.h +++ b/Tools/WinMLRunner/src/TypeHelper.h @@ -111,18 +111,61 @@ class TypeHelper throw "No name found for this DeviceCreationLocation."; } + static std::wstring Stringify(TensorKind tensorKind) + { + // IMPORTANT: This tensorKinds array needs to match the "enum class TensorKind" idl in + // Windows.AI.MachineLearning.0.h + switch (tensorKind) + { + case TensorKind::Undefined: + return L"Undefined"; + case TensorKind::Float: + return L"Float"; + case TensorKind::UInt8: + return L"UInt8"; + case TensorKind::Int8: + return L"Int8"; + case TensorKind::UInt16: + return L"UInt16"; + case TensorKind::Int16: + return L"Int16"; + case TensorKind::Int32: + return L"Int32"; + case TensorKind::Int64: + return L"Int64"; + case TensorKind::String: + return L"String"; + case TensorKind::Boolean: + return L"Boolean"; + case TensorKind::Float16: + return L"Float16"; + case TensorKind::Double: + return L"Double"; + case TensorKind::UInt32: + return L"UInt32"; + case TensorKind::UInt64: + return L"UInt64"; + case TensorKind::Complex64: + return L"Complex64"; + case TensorKind::Complex128: + return L"Complex128"; + }; + + throw "No name found for this TensorKind."; + } + static LearningModelDeviceKind GetWinmlDeviceKind(DeviceType deviceType) { switch (deviceType) { case DeviceType::CPU: - return LearningModelDeviceKind::Cpu; - case DeviceType::DefaultGPU: - return LearningModelDeviceKind::DirectX; - case DeviceType::MinPowerGPU: - return LearningModelDeviceKind::DirectXMinPower; - case DeviceType::HighPerfGPU: - return LearningModelDeviceKind::DirectXHighPerformance; + return LearningModelDeviceKind::Cpu; + case DeviceType::DefaultGPU: + return LearningModelDeviceKind::DirectX; + case DeviceType::MinPowerGPU: + return LearningModelDeviceKind::DirectXMinPower; + case DeviceType::HighPerfGPU: + return LearningModelDeviceKind::DirectXHighPerformance; } throw "No LearningModelDeviceKind found for this DeviceType.";