Skip to content

Commit

Permalink
Add -onClientTensorize [Identity|ScaleMeanStdDev] functionality + uni… (
Browse files Browse the repository at this point in the history
#223)

* Add -tensor preprocessing function options [Identity|ScaleMeanStdDev].

* Unify the csv and image reading mechanisms to unify the preprocessing code.
* Eliminate unnecessary copies of the csv data, expect for preprocessing pass.
* Update unit tests, including new DenseNet121 test for functional verification of preprocessing.

* -Tensor Normalize - update command to accept actual means and std devs on command line.  Fixup tests and target csvs with new values.

* Tensorize edits to update spacing, and add a throw if fetching native tensor buffer fails.
  • Loading branch information
pmbrown1055 authored and Ryan Lai committed Jul 24, 2019
1 parent 1e210e7 commit ed0b176
Show file tree
Hide file tree
Showing 15 changed files with 4,684 additions and 322 deletions.
Binary file added SharedContent/models/DenseNet121_fp16.onnx
Binary file not shown.
Binary file added SharedContent/models/DenseNet121_fp32.onnx
Binary file not shown.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

156 changes: 115 additions & 41 deletions Testing/WinMLRunnerTest/WinMLRunnerTest.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ namespace WinMLRunnerTest
{
Assert::Fail(L"Failed to open tensor files\n");
}

bool isFirstRow = true;
while (!tensorFileStream.eof())
{
Expand Down Expand Up @@ -554,6 +555,30 @@ namespace WinMLRunnerTest
Assert::AreEqual(S_OK, RunProc((wchar_t *)command.c_str()));
}

TEST_METHOD(ProvidedImageInputFolder)
{
// Make test_folder_input folder before starting the tests
std::string mkFolderCommand = "mkdir " + std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end());
system(mkFolderCommand.c_str());

std::vector<std::string> images = { "fish.png", "kitten_224.png" };

// Copy images from list to test_folder_input
for (auto image : images)
{
std::string copyCommand = "Copy ";
copyCommand += image;
copyCommand += ' ' + std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end());
system(copyCommand.c_str());
}
const std::wstring command = BuildCommand({ EXE_PATH, L"-model", L"SqueezeNet.onnx", L"-InputImageFolder", INPUT_FOLDER_PATH });
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));

std::string removeCommand = "rd /s /q ";
removeCommand += std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end());
system(removeCommand.c_str());
}

TEST_METHOD(AutoScaleImage)
{
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
Expand All @@ -563,15 +588,18 @@ namespace WinMLRunnerTest
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
}

TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensor)
TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuPerIterationPerformance)
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
const std::wstring inputPath = CURRENT_PATH + L"fish.png";
const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME;
const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath,
L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-GPU" });
const std::wstring command =
BuildCommand({ EXE_PATH, L"-model", modelPath, L"-input", inputPath, L"-PerfOutput", OUTPUT_PATH, L"-perf",
L"-SavePerIterationPerf", L"-BaseOutputPath", tensorDataPath,
L"-PerIterationPath PerIterationData", L"-CPU" });
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Squeezenet_fish_input_GPU.csv",
tensorDataPath + L"\\softmaxout_1GpuIteration1.csv"));

// We need to expect one more line because of the header
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount(tensorDataPath + L"\\PerIterationData\\Summary.csv"));
}

TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuSaveTensor)
Expand All @@ -585,15 +613,15 @@ namespace WinMLRunnerTest
tensorDataPath + L"\\softmaxout_1CpuIteration1.csv"));
}

TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensorImageDenotation)
const std::wstring modelPath = CURRENT_PATH + L"mnist.onnx";
const std::wstring inputPath = CURRENT_PATH + L"mnist_28.png";
TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensor)
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
const std::wstring inputPath = CURRENT_PATH + L"fish.png";
const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME;
const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath,
L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-GPU" });
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Mnist_8_input_GPU.csv",
tensorDataPath + L"\\Plus214_Output_0GpuIteration1.csv"));
Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Squeezenet_fish_input_GPU.csv",
tensorDataPath + L"\\softmaxout_1GpuIteration1.csv"));
}

TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuSaveTensorImageDenotation)
Expand All @@ -607,15 +635,15 @@ namespace WinMLRunnerTest
tensorDataPath + L"\\Plus214_Output_0CpuIteration1.csv"));
}

TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensorFp16)
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet_fp16.onnx";
const std::wstring inputPath = CURRENT_PATH + L"fish.png";
TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensorImageDenotation)
const std::wstring modelPath = CURRENT_PATH + L"mnist.onnx";
const std::wstring inputPath = CURRENT_PATH + L"mnist_28.png";
const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME;
const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath,
L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-GPU" });
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
Assert::AreEqual(true, CompareTensorsFP16(L"OutputTensorData\\Squeezenet_fp16_fish_input_GPU.csv",
tensorDataPath + L"\\softmaxout_1GpuIteration1.csv"));
Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Mnist_8_input_GPU.csv",
tensorDataPath + L"\\Plus214_Output_0GpuIteration1.csv"));
}

TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuSaveTensorFp16)
Expand All @@ -629,41 +657,87 @@ namespace WinMLRunnerTest
tensorDataPath + L"\\softmaxout_1CpuIteration1.csv"));
}

TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuPerIterationPerformance)
TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensorFp16)
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet_fp16.onnx";
const std::wstring inputPath = CURRENT_PATH + L"fish.png";
const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME;
const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath,
L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-GPU" });
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
Assert::AreEqual(true, CompareTensorsFP16(L"OutputTensorData\\Squeezenet_fp16_fish_input_GPU.csv",
tensorDataPath + L"\\softmaxout_1GpuIteration1.csv"));
}

TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuSaveTensorTensorizeIdentity)
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
const std::wstring inputPath = CURRENT_PATH + L"fish.png";
const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME;
const std::wstring command =
BuildCommand({ EXE_PATH, L"-model", modelPath, L"-PerfOutput", OUTPUT_PATH, L"-perf",
L"-SavePerIterationPerf", L"-BaseOutputPath", tensorDataPath,
L"-PerIterationPath PerIterationData", L"-CPU" });
const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath,
L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-CPU",
L"-Tensor" });
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Squeezenet_fish_input_CPU.csv",
tensorDataPath + L"\\softmaxout_1CpuIteration1.csv"));
}

// We need to expect one more line because of the header
Assert::AreEqual(static_cast<size_t>(2), GetOutputCSVLineCount(tensorDataPath + L"\\PerIterationData\\Summary.csv"));
TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensorTensorizeIdentity)
const std::wstring modelPath = CURRENT_PATH + L"SqueezeNet.onnx";
const std::wstring inputPath = CURRENT_PATH + L"fish.png";
const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME;
const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath,
L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-GPU",
L"-Tensor Identity" });
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\Squeezenet_fish_input_GPU.csv",
tensorDataPath + L"\\softmaxout_1GpuIteration1.csv"));
}

TEST_METHOD(ProvidedImageInputFolder)
{
// Make test_folder_input folder before starting the tests
std::string mkFolderCommand = "mkdir " + std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end());
system(mkFolderCommand.c_str());
TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuSaveTensorTensorizeScaleMeanStdDev)
const std::wstring modelPath = CURRENT_PATH + L"DenseNet121_fp32.onnx";
const std::wstring inputPath = CURRENT_PATH + L"kitten_224.png";
const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME;
const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath,
L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-CPU",
L"-Tensor Normalize 255 0.485,0.456,0.406 0.229,0.224,0.225" });
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\DenseNet121_fp32_kitten_224_input_CPU.csv",
tensorDataPath + L"\\fc6_1CpuIteration1.csv"));
}

std::vector<std::string> images = { "fish.png", "kitten_224.png" };
TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensorTensorizeScaleMeanStdDev)
const std::wstring modelPath = CURRENT_PATH + L"DenseNet121_fp32.onnx";
const std::wstring inputPath = CURRENT_PATH + L"kitten_224.png";
const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME;
const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath,
L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-GPU",
L"-Tensor Normalize 255 0.485,0.456,0.406 0.229,0.224,0.225" });
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\DenseNet121_fp32_kitten_224_input_GPU.csv",
tensorDataPath + L"\\fc6_1GpuIteration1.csv"));
}

// Copy images from list to test_folder_input
for (auto image : images)
{
std::string copyCommand = "Copy ";
copyCommand += image;
copyCommand += ' ' + std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end());
system(copyCommand.c_str());
}
const std::wstring command = BuildCommand({ EXE_PATH, L"-model", L"SqueezeNet.onnx", L"-InputImageFolder", INPUT_FOLDER_PATH });
TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyCpuSaveTensorTensorizeScaleMeanStdDevFP16)
const std::wstring modelPath = CURRENT_PATH + L"DenseNet121_fp16.onnx";
const std::wstring inputPath = CURRENT_PATH + L"kitten_224.png";
const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME;
const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath,
L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-CPU",
L"-Tensor Normalize 255 0.485,0.456,0.406 0.229,0.224,0.225" });
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
Assert::AreEqual(true, CompareTensors(L"OutputTensorData\\DenseNet121_fp16_kitten_224_input_CPU.csv",
tensorDataPath + L"\\fc6_1CpuIteration1.csv"));
}

std::string removeCommand = "rd /s /q ";
removeCommand += std::string(INPUT_FOLDER_PATH.begin(), INPUT_FOLDER_PATH.end());
system(removeCommand.c_str());
TEST_METHOD_WITH_NAME(ProvidedImageInputOnlyGpuSaveTensorTensorizeScaleMeanStdDevFP16)
const std::wstring modelPath = CURRENT_PATH + L"DenseNet121_fp16.onnx";
const std::wstring inputPath = CURRENT_PATH + L"kitten_224.png";
const std::wstring tensorDataPath = TENSOR_DATA_PATH + L"\\" + METHOD_NAME;
const std::wstring command = BuildCommand({ EXE_PATH, L"-model ", modelPath, L"-input", inputPath,
L"-SaveTensorData", L"First", L"-PerIterationPath", tensorDataPath, L"-GPU",
L"-Tensor Normalize 255 0.485,0.456,0.406 0.229,0.224,0.225" });
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
Assert::AreEqual(true, CompareTensorsFP16(L"OutputTensorData\\DenseNet121_fp16_kitten_224_input_GPU.csv",
tensorDataPath + L"\\fc6_1GpuIteration1.csv"));
}
};

Expand Down Expand Up @@ -823,7 +897,7 @@ namespace WinMLRunnerTest

TEST_METHOD(TestTopK)
{
const std::wstring command = BuildCommand({ EXE_PATH, L"-model", L"SqueezeNet.onnx", L"-TopK", L"5" });
const std::wstring command = BuildCommand({ EXE_PATH, L"-model", CURRENT_PATH + L"SqueezeNet.onnx", L"-TopK", L"5" });
Assert::AreEqual(S_OK, RunProc((wchar_t*)command.c_str()));
}

Expand Down
Loading

0 comments on commit ed0b176

Please sign in to comment.