Skip to content

Update to Onnxruntime 1.2 and reenable its support for GPU #4919

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 22 commits into from
Mar 10, 2020
Merged
Changes from 1 commit
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
47ef905
Update Onnx version to 1.2
antoniovs1029 Mar 3, 2020
7f5c219
Added onnx 1.2 private feed
antoniovs1029 Mar 3, 2020
e09c233
Added dependencies to GPU package only
antoniovs1029 Mar 3, 2020
68e719f
Actually use gpuDeviceId when applying ONNX model
antoniovs1029 Mar 3, 2020
94e1967
Add gpuDeviceId parameters to OnnxConversionTests
antoniovs1029 Mar 3, 2020
c91dd19
Use OnnxRuntime.Managed in OnnxTransformer
antoniovs1029 Mar 4, 2020
5092119
Added AI Infra for ONNX 1.2 nightly nugets
antoniovs1029 Mar 4, 2020
75427b7
Added onnxruntime new references for experiment
antoniovs1029 Mar 4, 2020
45b5709
Updated ML.Tests dependency for experiment on CI
antoniovs1029 Mar 4, 2020
477bdfe
Reverted mistake I made in Directory.Build.props
antoniovs1029 Mar 5, 2020
0cc8006
Update ML.Samples dependency
antoniovs1029 Mar 5, 2020
081b91c
Update ort feed
antoniovs1029 Mar 5, 2020
fa5a2ce
Second experiment
antoniovs1029 Mar 5, 2020
2ecb7b5
Update ML.Test to depend on Onnxruntime (no GPU)
antoniovs1029 Mar 5, 2020
94741fe
Revert "Add gpuDeviceId parameters to OnnxConversionTests"
antoniovs1029 Mar 5, 2020
d4501a0
Update ORT feed
antoniovs1029 Mar 10, 2020
dd9f56d
Revert "Update ML.Test to depend on Onnxruntime (no GPU)"
antoniovs1029 Mar 10, 2020
68b5489
Revert "Revert "Add gpuDeviceId parameters to OnnxConversionTests""
antoniovs1029 Mar 10, 2020
60b899b
Revert "Revert "Update ML.Test to depend on Onnxruntime (no GPU)""
antoniovs1029 Mar 10, 2020
932d6ed
Revert "Revert "Revert "Add gpuDeviceId parameters to OnnxConversionT…
antoniovs1029 Mar 10, 2020
2f30fd5
Removed ORT custom feed
antoniovs1029 Mar 10, 2020
98c0dff
Removed whitespaces
antoniovs1029 Mar 10, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Revert "Revert "Revert "Add gpuDeviceId parameters to OnnxConversionT…
…ests"""

This reverts commit 68b5489.
  • Loading branch information
antoniovs1029 committed Mar 10, 2020
commit 932d6ed3d123ca53a03b6cebb22e52f6745df75a
56 changes: 27 additions & 29 deletions test/Microsoft.ML.Tests/OnnxConversionTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,6 @@ namespace Microsoft.ML.Tests
{
public class OnnxConversionTest : BaseTestBaseline
{
private int _gpuid = 0;

private class AdultData
{
[LoadColumn(0, 10), ColumnName("FeatureVector")]
Expand Down Expand Up @@ -91,7 +89,7 @@ public void SimpleEndToEndOnnxConversionTest()
if (IsOnnxRuntimeSupported())
{
// Step 3: Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(data);
var onnxResult = onnxTransformer.Transform(data);

Expand Down Expand Up @@ -182,7 +180,7 @@ public void KmeansOnnxConversionTest()
if (IsOnnxRuntimeSupported())
{
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(data);
var onnxResult = onnxTransformer.Transform(data);
CompareSelectedColumns<float>("Score", "Score", transformedData, onnxResult, 3);
Expand Down Expand Up @@ -237,7 +235,7 @@ public void RegressionTrainersOnnxConversionTest()
var onnxModelPath = GetOutputPath(onnxFileName);
SaveOnnxModel(onnxModel, onnxModelPath, null);

var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareSelectedColumns<float>("Score", "Score", transformedData, onnxResult, 3);
Expand Down Expand Up @@ -298,7 +296,7 @@ public void BinaryClassificationTrainersOnnxConversionTest()
if (IsOnnxRuntimeSupported())
{
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareSelectedColumns<float>("Score", "Score", transformedData, onnxResult, 3); //compare scores
Expand Down Expand Up @@ -331,7 +329,7 @@ public void TestVectorWhiteningOnnxConversionTest()

if (IsOnnxRuntimeSupported())
{
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareSelectedColumns<float>("whitened1", "whitened1", transformedData, onnxResult);
Expand Down Expand Up @@ -384,7 +382,7 @@ public void PlattCalibratorOnnxConversionTest()
if (IsOnnxRuntimeSupported())
{
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareSelectedColumns<float>("Score", "Score", transformedData, onnxResult, 3);
Expand Down Expand Up @@ -431,7 +429,7 @@ public void PlattCalibratorOnnxConversionTest2()
// Compare model scores produced by ML.NET and ONNX's runtime.
if (IsOnnxRuntimeSupported())
{
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(data);
var onnxResult = onnxTransformer.Transform(data);
CompareSelectedColumns<float>("Probability", "Probability", transformedData, onnxResult, 3); //compare probabilities
Expand Down Expand Up @@ -464,7 +462,7 @@ public void TextNormalizingOnnxConversionTest()
if (IsOnnxRuntimeSupported() && !RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
{
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareSelectedColumns<ReadOnlyMemory<char>>("NormText", "NormText", transformedData, onnxResult);
Expand Down Expand Up @@ -513,7 +511,7 @@ public void LpNormOnnxConversionTest(
if (IsOnnxRuntimeSupported())
{
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareSelectedColumns<float>("Features", "Features", transformedData, onnxResult, 3);
Expand Down Expand Up @@ -580,7 +578,7 @@ public void KeyToVectorWithBagOnnxConversionTest()
if (IsOnnxRuntimeSupported())
{
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(data);
var onnxResult = onnxTransformer.Transform(data);
CompareSelectedColumns<float>("Score", "Score", transformedData, onnxResult);
Expand Down Expand Up @@ -901,7 +899,7 @@ public void ConcatenateOnnxConversionTest()
var onnxModelPath = GetOutputPath(onnxModelName);
SaveOnnxModel(onnxModel, onnxModelPath, null);
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(data);
var onnxResult = onnxTransformer.Transform(data);
CompareSelectedColumns<double>("Features", "Features", transformedData, onnxResult);
Expand Down Expand Up @@ -953,7 +951,7 @@ public void RemoveVariablesInPipelineTest()
if (IsOnnxRuntimeSupported())
{
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(data);
var onnxResult = onnxTransformer.Transform(data);
CompareSelectedColumns<float>("Score", "Score", transformedData, onnxResult);
Expand Down Expand Up @@ -1018,7 +1016,7 @@ public void TokenizingByCharactersOnnxConversionTest(bool useMarkerCharacters)
if (IsOnnxRuntimeSupported())
{
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareSelectedColumns<ushort>("TokenizedText", "TokenizedText", transformedData, onnxResult);
Expand Down Expand Up @@ -1093,7 +1091,7 @@ public void OnnxTypeConversionTest(DataKind fromKind, DataKind toKind)

if (IsOnnxRuntimeSupported())
{
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);

Expand Down Expand Up @@ -1130,7 +1128,7 @@ public void PcaOnnxConversionTest()
if (IsOnnxRuntimeSupported())
{
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareSelectedColumns<float>("pca", "pca", transformedData, onnxResult);
Expand Down Expand Up @@ -1189,7 +1187,7 @@ public void IndicateMissingValuesOnnxConversionTest()
if (IsOnnxRuntimeSupported())
{
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareSelectedColumns<int>("MissingIndicator", "MissingIndicator", transformedData, onnxResult);
Expand Down Expand Up @@ -1232,7 +1230,7 @@ public void ValueToKeyMappingOnnxConversionTest(DataKind valueType)

if (IsOnnxRuntimeSupported())
{
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareSelectedColumns<uint>("Key", "Key", mlnetResult, onnxResult);
Expand Down Expand Up @@ -1281,7 +1279,7 @@ public void KeyToValueMappingOnnxConversionTest(DataKind valueType)

if (IsOnnxRuntimeSupported())
{
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareResults("Value", "Value", mlnetResult, onnxResult);
Expand Down Expand Up @@ -1322,7 +1320,7 @@ public void WordTokenizerOnnxConversionTest()
if (IsOnnxRuntimeSupported())
{
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxFilePath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxFilePath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareSelectedColumns<ReadOnlyMemory<char>>("Tokens", "Tokens", transformedData, onnxResult);
Expand Down Expand Up @@ -1386,7 +1384,7 @@ public void NgramOnnxConversionTest(

if (IsOnnxRuntimeSupported())
{
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxFilePath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxFilePath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
var columnName = i == pipelines.Length - 1 ? "Tokens" : "NGrams";
Expand Down Expand Up @@ -1454,7 +1452,7 @@ public void OptionalColumnOnnxTest(DataKind dataKind)
{
string[] inputNames = onnxModel.Graph.Input.Select(valueInfoProto => valueInfoProto.Name).ToArray();
string[] outputNames = onnxModel.Graph.Output.Select(valueInfoProto => valueInfoProto.Name).ToArray();
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(outputNames, inputNames, onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(outputNames, inputNames, onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareResults("Label", "Label", outputData, onnxResult);
Expand Down Expand Up @@ -1521,7 +1519,7 @@ public void MulticlassTrainersOnnxConversionTest()
if (IsOnnxRuntimeSupported())
{
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareSelectedColumns<uint>("PredictedLabel", "PredictedLabel", transformedData, onnxResult);
Expand Down Expand Up @@ -1554,7 +1552,7 @@ public void CopyColumnsOnnxTest()
if (IsOnnxRuntimeSupported())
{
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareSelectedColumns<float>("Target", "Target1", transformedData, onnxResult);
Expand Down Expand Up @@ -1615,7 +1613,7 @@ public void UseKeyDataViewTypeAsUInt32InOnnxInput()
if (IsOnnxRuntimeSupported())
{
// Step 5: Apply Onnx Model
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(outputNames, inputNames, onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(outputNames, inputNames, onnxModelPath);
var onnxResult = onnxEstimator.Fit(reloadedData).Transform(reloadedData);

// Step 6: Compare results to an onnx model created using the mappedData IDataView
Expand All @@ -1627,7 +1625,7 @@ public void UseKeyDataViewTypeAsUInt32InOnnxInput()
string onnxModelPath2 = GetOutputPath("onnxmodel2-kdvt-as-uint32.onnx");
using (FileStream stream = new FileStream(onnxModelPath2, FileMode.Create))
mlContext.Model.ConvertToOnnx(model, mappedData, stream);
var onnxEstimator2 = mlContext.Transforms.ApplyOnnxModel(outputNames, inputNames, onnxModelPath2, gpuDeviceId: _gpuid);
var onnxEstimator2 = mlContext.Transforms.ApplyOnnxModel(outputNames, inputNames, onnxModelPath2);
var onnxResult2 = onnxEstimator2.Fit(originalData).Transform(originalData);

var stdSuffix = ".output";
Expand Down Expand Up @@ -1680,7 +1678,7 @@ public void FeatureSelectionOnnxTest()
if (IsOnnxRuntimeSupported())
{
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);
CompareSelectedColumns<float>("FeatureSelectMIScalarFloat", "FeatureSelectMIScalarFloat", transformedData, onnxResult);
Expand Down Expand Up @@ -1728,7 +1726,7 @@ public void SelectColumnsOnnxTest()
// Evaluate the saved ONNX model using the data used to train the ML.NET pipeline.
string[] inputNames = onnxModel.Graph.Input.Select(valueInfoProto => valueInfoProto.Name).ToArray();
string[] outputNames = onnxModel.Graph.Output.Select(valueInfoProto => valueInfoProto.Name).ToArray();
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(outputNames, inputNames, onnxModelPath, gpuDeviceId: _gpuid);
var onnxEstimator = mlContext.Transforms.ApplyOnnxModel(outputNames, inputNames, onnxModelPath);
var onnxTransformer = onnxEstimator.Fit(dataView);
var onnxResult = onnxTransformer.Transform(dataView);

Expand Down