Skip to content

Commit

Permalink
comments from pr
Browse files Browse the repository at this point in the history
  • Loading branch information
michaelgsharp committed Jan 23, 2025
1 parent fc37290 commit 5e8e8ab
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 19 deletions.
2 changes: 1 addition & 1 deletion csharp/src/Microsoft.ML.OnnxRuntime/OrtValue.shared.cs
Original file line number Diff line number Diff line change
Expand Up @@ -685,7 +685,7 @@ public static OrtValue CreateTensorValueFromMemory<T>(T[] data, long[] shape) wh

#if NET8_0_OR_GREATER
/// <summary>
/// This is a factory method creates a native Onnxruntime OrtValue containing a tensor.
/// This is a factory method creates a native Onnxruntime OrtValue containing a tensor on top of the existing tensor managed memory.
/// The method will attempt to pin managed memory so no copying occurs when data is passed down
/// to native code.
/// </summary>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,17 +1,14 @@
using Microsoft.ML.OnnxRuntime.Tensors;
using Microsoft.VisualStudio.TestPlatform.Utilities;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using System.Xml.Linq;
using Xunit;

#if NET8_0_OR_GREATER
using DotnetTensors = System.Numerics.Tensors;
using TensorPrimitives = System.Numerics.Tensors.TensorPrimitives;
using SystemNumericsTensors = System.Numerics.Tensors;
#endif

namespace Microsoft.ML.OnnxRuntime.Tests
Expand Down Expand Up @@ -91,28 +88,27 @@ private void CanRunInferenceOnAModelDotnetTensors(GraphOptimizationLevel graphOp
SessionOptions options = new SessionOptions();
cleanUp.Add(options);
options.GraphOptimizationLevel = graphOptimizationLevel;
if (enableParallelExecution) options.ExecutionMode = ExecutionMode.ORT_PARALLEL;

var session = new InferenceSession(model, options);
cleanUp.Add(session);

using var runOptions = new RunOptions();
using var inputOrtValues = new DisposableListTest<DisposableTestPair<OrtValue>>(session.InputMetadata.Count);
var inputMeta = session.InputMetadata;
var outputMeta = session.OutputMetadata;

float[] expectedOutput = TestDataLoader.LoadTensorFromEmbeddedResource("bench.expected_out");
long[] expectedDimensions = { 1, 1000, 1, 1 }; // hardcoded for now for the test data
ReadOnlySpan<long> expectedOutputDimensions = expectedDimensions;
string[] expectedOutputNames = new string[] { "softmaxout_1" };

float[] inputData = TestDataLoader.LoadTensorFromEmbeddedResource("bench.in"); // this is the data for only one input tensor for this model

using var inputOrtValues = new DisposableListTest<DisposableTestPair<OrtValue>>(session.InputMetadata.Count);

foreach (var name in inputMeta.Keys)
{
Assert.Equal(typeof(float), inputMeta[name].ElementType);
Assert.True(inputMeta[name].IsTensor);
var tensor = DotnetTensors.Tensor.Create<float>(inputData, inputMeta[name].Dimensions.Select(x => (nint)x).ToArray());
var tensor = SystemNumericsTensors.Tensor.Create<float>(inputData, inputMeta[name].Dimensions.Select(x => (nint)x).ToArray());
inputOrtValues.Add(new DisposableTestPair<OrtValue>(name, OrtValue.CreateTensorValueFromSystemNumericsTensorObject<float>(tensor)));

}
Expand All @@ -131,8 +127,6 @@ private void CanRunInferenceOnAModelDotnetTensors(GraphOptimizationLevel graphOp
ValidateRunResult(r, expectedOutput, expectedDimensions);
}
}

session.Dispose();
}
}

Expand Down Expand Up @@ -162,7 +156,7 @@ public void InferenceSessionDisposedDotnetTensors()
{
Assert.Equal(typeof(float), inputMeta[name].ElementType);
Assert.True(inputMeta[name].IsTensor);
var tensor = DotnetTensors.Tensor.Create<float>(inputData, inputMeta[name].Dimensions.Select(x => (nint) x).ToArray());
var tensor = SystemNumericsTensors.Tensor.Create<float>(inputData, inputMeta[name].Dimensions.Select(x => (nint) x).ToArray());
inputOrtValues.Add(new DisposableTestPair<OrtValue>(name, OrtValue.CreateTensorValueFromSystemNumericsTensorObject<float>(tensor)));
}

Expand Down Expand Up @@ -201,7 +195,7 @@ private void ThrowWrongOutputNameDotnetTensors()
using (var inputOrtValues = new DisposableListTest<DisposableTestPair<OrtValue>>(session.InputMetadata.Count))
using (var outputOrtValues = new DisposableListTest<DisposableTestPair<OrtValue>>(session.OutputMetadata.Count))
{
var tensor = DotnetTensors.Tensor.Create<float>(inputData, Array.ConvertAll<int, nint>(inputTensor.Dimensions.ToArray(), x => (nint)x));
var tensor = SystemNumericsTensors.Tensor.Create<float>(inputData, Array.ConvertAll<int, nint>(inputTensor.Dimensions.ToArray(), x => (nint)x));

inputOrtValues.Add(new DisposableTestPair<OrtValue>("data_0", OrtValue.CreateTensorValueFromSystemNumericsTensorObject<float>(tensor)));
outputOrtValues.Add(new DisposableTestPair<OrtValue>("bad_output_name", OrtValue.CreateTensorValueFromSystemNumericsTensorObject(tensor)));
Expand All @@ -220,13 +214,13 @@ private void ThrowWrongOutputDimensionDotnetTensors()
var session = tuple.Item1;
var inputData = tuple.Item2;
var inputTensor = tuple.Item3;
var outputTensor = DotnetTensors.Tensor.Create<float>([1, 1001, 1, 1]);
var outputTensor = SystemNumericsTensors.Tensor.Create<float>([1, 1001, 1, 1]);

using (var runOptions = new RunOptions())
using (var inputOrtValues = new DisposableListTest<DisposableTestPair<OrtValue>>(session.InputMetadata.Count))
using (var outputOrtValues = new DisposableListTest<DisposableTestPair<OrtValue>>(session.OutputMetadata.Count))
{
var tensor = DotnetTensors.Tensor.Create<float>(inputData, Array.ConvertAll<int, nint>(inputTensor.Dimensions.ToArray(), x => (nint)x));
var tensor = SystemNumericsTensors.Tensor.Create<float>(inputData, Array.ConvertAll<int, nint>(inputTensor.Dimensions.ToArray(), x => (nint)x));

inputOrtValues.Add(new DisposableTestPair<OrtValue>("data_0", OrtValue.CreateTensorValueFromSystemNumericsTensorObject<float>(tensor)));
outputOrtValues.Add(new DisposableTestPair<OrtValue>("softmaxout_1", OrtValue.CreateTensorValueFromSystemNumericsTensorObject(outputTensor)));
Expand All @@ -241,24 +235,25 @@ private void ThrowWrongOutputDimensionDotnetTensors()
private void ThrowInconsistentPinnedOutputsDotnetTensors()
{
var tuple = OpenSessionSqueezeNet();
using var cleanUp = new DisposableListTest<IDisposable>();
cleanUp.Add(tuple.Item1);
var session = tuple.Item1;
var inputData = tuple.Item2;
var inputTensor = tuple.Item3;
var outputTensor = DotnetTensors.Tensor.Create([1, 1001, 1, 1], [4]);
var outputTensor = SystemNumericsTensors.Tensor.Create([1, 1001, 1, 1], [4]);

using (var runOptions = new RunOptions())
using (var inputOrtValues = new DisposableListTest<DisposableTestPair<OrtValue>>(session.InputMetadata.Count))
using (var outputOrtValues = new DisposableListTest<DisposableTestPair<OrtValue>>(session.OutputMetadata.Count))
{
var tensor = DotnetTensors.Tensor.Create<float>(inputData, Array.ConvertAll<int, nint>(inputTensor.Dimensions.ToArray(), x => (nint)x));
var tensor = SystemNumericsTensors.Tensor.Create<float>(inputData, Array.ConvertAll<int, nint>(inputTensor.Dimensions.ToArray(), x => (nint)x));

inputOrtValues.Add(new DisposableTestPair<OrtValue>("data_0", OrtValue.CreateTensorValueFromSystemNumericsTensorObject<float>(tensor)));
outputOrtValues.Add(new DisposableTestPair<OrtValue>("softmaxout_1", OrtValue.CreateTensorValueFromSystemNumericsTensorObject(outputTensor)));
OrtValue[] outputs = [];
var ex = Assert.Throws<ArgumentException>(() => session.Run(runOptions, ["data_0"], [inputOrtValues[0].Value], ["softmaxout_1"], outputs));
Assert.StartsWith("Length of outputNames (1) must match that of outputValues (0).", ex.Message);
}
session.Dispose();
}
#pragma warning restore SYSLIB5001 // System.Numerics.Tensors is only in preview so we can continue receiving API feedback
#endif
Expand Down Expand Up @@ -1615,7 +1610,7 @@ private void VerifyNativeMethodsExist()

#if NET8_0_OR_GREATER
#pragma warning disable SYSLIB5001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
private void ValidateRunResultData(DotnetTensors.Tensor<float> resultTensor, float[] expectedOutput, int[] expectedDimensions)
private void ValidateRunResultData(SystemNumericsTensors.Tensor<float> resultTensor, float[] expectedOutput, int[] expectedDimensions)
{
Assert.Equal(expectedDimensions.Length, resultTensor.Rank);

Expand Down

0 comments on commit 5e8e8ab

Please sign in to comment.