Skip to content

Commit

Permalink
Merge pull request #35 from asus4/ort-v1.20.0
Browse files Browse the repository at this point in the history
Upgrade OnnxRuntime to v1.20.0
  • Loading branch information
asus4 authored Nov 21, 2024
2 parents 25369c7 + c7b0c4a commit 74b59f5
Show file tree
Hide file tree
Showing 34 changed files with 318 additions and 144 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/publish-packages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ on:
types: [published]
workflow_dispatch:
env:
ORT_VERSION: 1.19.2
ORT_VERSION: 1.20.0
ORT_EXTENSIONS_VERSION: 0.12.0
# Dry-run on workflow_dispatch
NPM_OPTS: ${{ github.event_name == 'workflow_dispatch' && '--dry-run' || '' }}
Expand Down
10 changes: 5 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,11 +66,11 @@ Pre-built libraries are available on [NPM](https://www.npmjs.com/package/com.git
}
]
"dependencies": {
"com.github.asus4.onnxruntime": "0.2.1",
"com.github.asus4.onnxruntime.unity": "0.2.1",
"com.github.asus4.onnxruntime.win-x64-gpu": "0.2.1",
"com.github.asus4.onnxruntime.linux-x64-gpu": "0.2.1",
"com.github.asus4.onnxruntime-extensions": "0.2.1",
"com.github.asus4.onnxruntime": "0.2.2",
"com.github.asus4.onnxruntime.unity": "0.2.2",
"com.github.asus4.onnxruntime.win-x64-gpu": "0.2.2",
"com.github.asus4.onnxruntime.linux-x64-gpu": "0.2.2",
"com.github.asus4.onnxruntime-extensions": "0.2.2",
... other dependencies
}
```
Expand Down
10 changes: 5 additions & 5 deletions com.github.asus4.onnxruntime-extensions/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,11 @@ Pre-built libraries are available on [NPM](https://www.npmjs.com/package/com.git
}
]
"dependencies": {
"com.github.asus4.onnxruntime": "0.2.1",
"com.github.asus4.onnxruntime.unity": "0.2.1",
"com.github.asus4.onnxruntime.win-x64-gpu": "0.2.1",
"com.github.asus4.onnxruntime.linux-x64-gpu": "0.2.1",
"com.github.asus4.onnxruntime.extensions": "0.2.1",
"com.github.asus4.onnxruntime": "0.2.2",
"com.github.asus4.onnxruntime.unity": "0.2.2",
"com.github.asus4.onnxruntime.win-x64-gpu": "0.2.2",
"com.github.asus4.onnxruntime.linux-x64-gpu": "0.2.2",
"com.github.asus4.onnxruntime.extensions": "0.2.2",
... other dependencies
}
```
Expand Down
4 changes: 2 additions & 2 deletions com.github.asus4.onnxruntime-extensions/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "com.github.asus4.onnxruntime-extensions",
"version": "0.2.1",
"version": "0.2.2",
"displayName": "ONNX Runtime Extensions",
"description": "ONNX Runtime Extensions for Unity",
"keywords": [
Expand All @@ -10,7 +10,7 @@
"unity": "2022.3",
"unityRelease": "0f1",
"dependencies": {
"com.github.asus4.onnxruntime": "0.2.1"
"com.github.asus4.onnxruntime": "0.2.2"
},
"repository": {
"type": "git",
Expand Down
10 changes: 5 additions & 5 deletions com.github.asus4.onnxruntime.linux-x64-gpu/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,11 @@ Pre-built libraries are available on [NPM](https://www.npmjs.com/package/com.git
}
]
"dependencies": {
"com.github.asus4.onnxruntime": "0.2.1",
"com.github.asus4.onnxruntime.unity": "0.2.1",
"com.github.asus4.onnxruntime.win-x64-gpu": "0.2.1",
"com.github.asus4.onnxruntime.linux-x64-gpu": "0.2.1",
"com.github.asus4.onnxruntime.extensions": "0.2.1",
"com.github.asus4.onnxruntime": "0.2.2",
"com.github.asus4.onnxruntime.unity": "0.2.2",
"com.github.asus4.onnxruntime.win-x64-gpu": "0.2.2",
"com.github.asus4.onnxruntime.linux-x64-gpu": "0.2.2",
"com.github.asus4.onnxruntime.extensions": "0.2.2",
... other dependencies
}
```
Expand Down
4 changes: 2 additions & 2 deletions com.github.asus4.onnxruntime.linux-x64-gpu/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "com.github.asus4.onnxruntime.linux-x64-gpu",
"version": "0.2.1",
"version": "0.2.2",
"displayName": "ONNX Runtime - Linux x64 GPU",
"description": "ONNX Runtime for Unity - Linux x64 GPU Provider",
"keywords": [
Expand All @@ -10,7 +10,7 @@
"unity": "2022.3",
"unityRelease": "0f1",
"dependencies": {
"com.github.asus4.onnxruntime": "0.2.1"
"com.github.asus4.onnxruntime": "0.2.2"
},
"repository": {
"type": "git",
Expand Down
10 changes: 5 additions & 5 deletions com.github.asus4.onnxruntime.unity/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,11 @@ Pre-built libraries are available on [NPM](https://www.npmjs.com/package/com.git
}
]
"dependencies": {
"com.github.asus4.onnxruntime": "0.2.1",
"com.github.asus4.onnxruntime.unity": "0.2.1",
"com.github.asus4.onnxruntime.win-x64-gpu": "0.2.1",
"com.github.asus4.onnxruntime.linux-x64-gpu": "0.2.1",
"com.github.asus4.onnxruntime.extensions": "0.2.1",
"com.github.asus4.onnxruntime": "0.2.2",
"com.github.asus4.onnxruntime.unity": "0.2.2",
"com.github.asus4.onnxruntime.win-x64-gpu": "0.2.2",
"com.github.asus4.onnxruntime.linux-x64-gpu": "0.2.2",
"com.github.asus4.onnxruntime.extensions": "0.2.2",
... other dependencies
}
```
Expand Down
4 changes: 2 additions & 2 deletions com.github.asus4.onnxruntime.unity/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "com.github.asus4.onnxruntime.unity",
"version": "0.2.1",
"version": "0.2.2",
"displayName": "ONNX Runtime Unity Utilities",
"description": "ONNX Runtime Utilities for Unity",
"keywords": [
Expand All @@ -10,7 +10,7 @@
"unity": "2022.3",
"unityRelease": "0f1",
"dependencies": {
"com.github.asus4.onnxruntime": "0.2.1"
"com.github.asus4.onnxruntime": "0.2.2"
},
"repository": {
"type": "git",
Expand Down
10 changes: 5 additions & 5 deletions com.github.asus4.onnxruntime.win-x64-gpu/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,11 @@ Pre-built libraries are available on [NPM](https://www.npmjs.com/package/com.git
}
]
"dependencies": {
"com.github.asus4.onnxruntime": "0.2.1",
"com.github.asus4.onnxruntime.unity": "0.2.1",
"com.github.asus4.onnxruntime.win-x64-gpu": "0.2.1",
"com.github.asus4.onnxruntime.linux-x64-gpu": "0.2.1",
"com.github.asus4.onnxruntime.extensions": "0.2.1",
"com.github.asus4.onnxruntime": "0.2.2",
"com.github.asus4.onnxruntime.unity": "0.2.2",
"com.github.asus4.onnxruntime.win-x64-gpu": "0.2.2",
"com.github.asus4.onnxruntime.linux-x64-gpu": "0.2.2",
"com.github.asus4.onnxruntime.extensions": "0.2.2",
... other dependencies
}
```
Expand Down
4 changes: 2 additions & 2 deletions com.github.asus4.onnxruntime.win-x64-gpu/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "com.github.asus4.onnxruntime.win-x64-gpu",
"version": "0.2.1",
"version": "0.2.2",
"displayName": "ONNX Runtime - Windows x64 GPU",
"description": "ONNX Runtime for Unity - Windows x64 GPU Provider",
"keywords": [
Expand All @@ -10,7 +10,7 @@
"unity": "2022.3",
"unityRelease": "0f1",
"dependencies": {
"com.github.asus4.onnxruntime": "0.2.1"
"com.github.asus4.onnxruntime": "0.2.2"
},
"repository": {
"type": "git",
Expand Down
10 changes: 5 additions & 5 deletions com.github.asus4.onnxruntime/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,11 @@ Pre-built libraries are available on [NPM](https://www.npmjs.com/package/com.git
}
]
"dependencies": {
"com.github.asus4.onnxruntime": "0.2.1",
"com.github.asus4.onnxruntime.unity": "0.2.1",
"com.github.asus4.onnxruntime.win-x64-gpu": "0.2.1",
"com.github.asus4.onnxruntime.linux-x64-gpu": "0.2.1",
"com.github.asus4.onnxruntime.extensions": "0.2.1",
"com.github.asus4.onnxruntime": "0.2.2",
"com.github.asus4.onnxruntime.unity": "0.2.2",
"com.github.asus4.onnxruntime.win-x64-gpu": "0.2.2",
"com.github.asus4.onnxruntime.linux-x64-gpu": "0.2.2",
"com.github.asus4.onnxruntime.extensions": "0.2.2",
... other dependencies
}
```
Expand Down
3 changes: 1 addition & 2 deletions com.github.asus4.onnxruntime/Runtime/AssemblyInfo.shared.cs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,5 @@

// Making these assembly's internals visible to the internal Test assembly
[assembly: System.Runtime.CompilerServices.InternalsVisibleTo("Microsoft.ML.OnnxRuntime.Tests.Common, PublicKey=002400000480000094000000060200000024000052534131000400000100010059013e94e4bc70136ca4c35f33acd6b62974536b698f9c7a21cee18d805c7ad860ad9eebfdc47a96ba2f8d03f4cf1c36b9d30787e276c7b9833b5bf2a6eba7e919e6b90083078a352262aed1d842e5f70a3085cbcf4c56ae851b161137920961c23fcc246598d61d258ccc615c927b2441359eea666a99ce1c3c07dca18fb0e1")]
[assembly: System.Runtime.CompilerServices.InternalsVisibleTo("Microsoft.ML.OnnxRuntime.Tests.Droid, PublicKey=002400000480000094000000060200000024000052534131000400000100010059013e94e4bc70136ca4c35f33acd6b62974536b698f9c7a21cee18d805c7ad860ad9eebfdc47a96ba2f8d03f4cf1c36b9d30787e276c7b9833b5bf2a6eba7e919e6b90083078a352262aed1d842e5f70a3085cbcf4c56ae851b161137920961c23fcc246598d61d258ccc615c927b2441359eea666a99ce1c3c07dca18fb0e1")]
[assembly: System.Runtime.CompilerServices.InternalsVisibleTo("Microsoft.ML.OnnxRuntime.Tests.iOS, PublicKey=002400000480000094000000060200000024000052534131000400000100010059013e94e4bc70136ca4c35f33acd6b62974536b698f9c7a21cee18d805c7ad860ad9eebfdc47a96ba2f8d03f4cf1c36b9d30787e276c7b9833b5bf2a6eba7e919e6b90083078a352262aed1d842e5f70a3085cbcf4c56ae851b161137920961c23fcc246598d61d258ccc615c927b2441359eea666a99ce1c3c07dca18fb0e1")]
[assembly: System.Runtime.CompilerServices.InternalsVisibleTo("Microsoft.ML.OnnxRuntime.Tests.NetCoreApp, PublicKey=002400000480000094000000060200000024000052534131000400000100010059013e94e4bc70136ca4c35f33acd6b62974536b698f9c7a21cee18d805c7ad860ad9eebfdc47a96ba2f8d03f4cf1c36b9d30787e276c7b9833b5bf2a6eba7e919e6b90083078a352262aed1d842e5f70a3085cbcf4c56ae851b161137920961c23fcc246598d61d258ccc615c927b2441359eea666a99ce1c3c07dca18fb0e1")]
[assembly: System.Runtime.CompilerServices.InternalsVisibleTo("Microsoft.ML.OnnxRuntime.Tests.MAUI, PublicKey=002400000480000094000000060200000024000052534131000400000100010059013e94e4bc70136ca4c35f33acd6b62974536b698f9c7a21cee18d805c7ad860ad9eebfdc47a96ba2f8d03f4cf1c36b9d30787e276c7b9833b5bf2a6eba7e919e6b90083078a352262aed1d842e5f70a3085cbcf4c56ae851b161137920961c23fcc246598d61d258ccc615c927b2441359eea666a99ce1c3c07dca18fb0e1")]
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ public class DisposableNamedOnnxValue : NamedOnnxValue, IDisposable
/// Ctor
/// </summary>
/// <param name="name">Name of the output value</param>
/// <param name="value">Managed object created to represent output value, such as DenseTensor<T>
/// <param name="value">Managed object created to represent output value, such as DenseTensor{T};
/// List or Dictionary
/// </param>
/// <param name="elementType">Tensor element type if value type is a Tensor</param>
Expand Down Expand Up @@ -133,7 +133,7 @@ private DisposableNamedOnnxValue(string name, Object value, MapHelper mapHelper,
public TensorElementType ElementType { get; }

/// <summary>
/// Overrides the base class method. With respect to pinnedMemoryHandle, it has no operation
/// Overrides the base class method. With respect to memoryHolder, it has no operation
/// to do, as this class maintains a native buffer via _ortValueHolder and the memory will be
/// disposed by it. This is the case when we are dealing with an OrtValue that is backed by native memory
/// and not by pinned managed memory.
Expand All @@ -142,15 +142,15 @@ private DisposableNamedOnnxValue(string name, Object value, MapHelper mapHelper,
/// but the interface (derived from NamedOnnxValue) allows it to be passed as output and one of the test
/// cases does it. Unless we deprecate and re-do the interface, we must support it.
/// </summary>
/// <param name="pinnedMemoryHandle">always set to null</param>
/// <param name="memoryHolder">always set to null</param>
/// <returns>Native OrtValue handle</returns>
internal override IntPtr InputToOrtValueHandle(NodeMetadata metadata, out IDisposable memoryHolder)
{
if (_ortValueHolder == null)
{
throw new InvalidOperationException("The instance of this class does not own an OrtValue");
}
// PinnedMemoryHandle holds the default value as DisposableNamedOnnxValue
// memoryHolder holds the default value as DisposableNamedOnnxValue
// doesn't hold any managed buffer (that needs to be pinned)
memoryHolder = null;
// Return non-owning instance of OrtValue
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,9 @@ public static FixedBufferOnnxValue CreateFromTensor<T>(Tensor<T> value)
///
/// var memInfo = OrtMemoryInfo.DefaultInstance; // CPU
///
/// using(var fixedBufferInput = FixedBufferOnnxvalue.CreateFromMemory<Half>(memInfo,
/// using(var fixedBufferInput = FixedBufferOnnxvalue.CreateFromMemory{Half}(memInfo,
/// input, TensorElementType.Float16, input_shape, input.Length * sizeof(ushort))
/// using(var fixedBufferOutput = FixedBufferOnnxvalue.CreateFromMemory<Half>(memInfo,
/// using(var fixedBufferOutput = FixedBufferOnnxvalue.CreateFromMemory{Half}(memInfo,
/// output, TensorElementType.Float16, output_shape, output.Length * sizeof(ushort))
/// {
/// FixedBufferOnnxvalue[] inputValues = new FixedBufferOnnxvalue[]{fixedBufferInput};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -908,7 +908,7 @@ private static IntPtr ExtractOrtValueHandleForOutput(NamedOnnxValue output, Node
/// </summary>
/// <param name="values">names to convert to zero terminated utf8 and pin</param>
/// <param name="nameExtractor">extractor functor that helps extracting names from inputs</param>
/// <param name="metaDict">inputs/outputs metadata</param>
/// <param name="metaLookup">inputs/outputs metadata</param>
/// <returns></returns>
private static IntPtr[] LookupUtf8Names<T>(IReadOnlyCollection<T> values, NameExtractor<T> nameExtractor,
MetadataLookup metaLookup)
Expand Down Expand Up @@ -1222,7 +1222,6 @@ private void Init(byte[] modelData, SessionOptions options,
/// Initializes the session object with a native session handle
/// </summary>
/// <param name="session">Value of a native session object</param>
/// <param name="options">Session options</param>
private void InitWithSessionHandle(IntPtr session)
{
_nativeHandle = session;
Expand Down Expand Up @@ -2075,7 +2074,7 @@ public long Version
/// <summary>
/// Custom metadata key/value pairs
/// </summary>
/// <value>An instance of a Dictionary<string,string></value>
/// <value>An instance of a Dictionary{string,string}</value>
public Dictionary<string, string> CustomMetadataMap
{
get
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ internal static OrtValue CreateProjection(NamedOnnxValue namedOnnxValue, NodeMet
/// The function creates OrtValue objects for each element of the sequence
/// and then creates an OrtValue for the whole sequence.
/// </summary>
/// <param name="namedOnnxValue">NamedOnnxValue containing a IEnumerable<NameOnnValue></param>
/// <param name="namedOnnxValue">NamedOnnxValue containing a IEnumerable{NamedOnnxValue}</param>
/// <param name="metadata">sequence metadata</param>
/// <returns>OrtValue that represents a sequence</returns>
/// <exception cref="OnnxRuntimeException"></exception>
Expand Down
23 changes: 6 additions & 17 deletions com.github.asus4.onnxruntime/Runtime/NamedOnnxValue.shared.cs
Original file line number Diff line number Diff line change
Expand Up @@ -53,13 +53,13 @@ internal MapHelper(TensorBase keys, TensorBase values)
/// Other sequences and maps. Although the OnnxValueType is exposed,
/// the caller is supposed to know the actual data type contained.
///
/// The convention is that for tensors, it would contain a DenseTensor<T> instance or
/// anything derived from Tensor<T>.
/// The convention is that for tensors, it would contain a DenseTensor{T} instance or
/// anything derived from Tensor{T}.
///
/// For sequences, it would contain a IList<T> where T is an instance of NamedOnnxValue that
/// For sequences, it would contain a IList{T} where T is an instance of NamedOnnxValue that
/// would contain a tensor or another type.
///
/// For Maps, it would contain a IDictionary<K, V> where K,V are primitive types or strings.
/// For Maps, it would contain a IDictionary{K, V} where K,V are primitive types or strings.
///
/// </summary>
public class NamedOnnxValue
Expand Down Expand Up @@ -153,7 +153,7 @@ public static NamedOnnxValue CreateFromSequence<T>(string name, IEnumerable<T> v
}

/// <summary>
/// Instantiates NamedOnnxValue that contains IDictionary<K, V>
/// Instantiates NamedOnnxValue that contains IDictionary{K, V}
/// </summary>
/// <typeparam name="K">Keys type</typeparam>
/// <typeparam name="V">Values type</typeparam>
Expand Down Expand Up @@ -225,7 +225,7 @@ public IDictionary<K, V> AsDictionary<K, V>()
/// based on the pinned managed memory. The caller is responsible for Disposing
/// both OrtValue and pinnedMemoryHandle
/// </summary>
/// <param name="pinnedMemoryHandle">dispose after returned OrtValus is disposed</param>
/// <param name="memoryOwner">dispose after returned OrtValue is disposed</param>
/// <returns>The native OrtValue handle</returns>
internal virtual IntPtr InputToOrtValueHandle(NodeMetadata metadata, out IDisposable memoryOwner)
{
Expand Down Expand Up @@ -272,12 +272,6 @@ internal virtual IntPtr OutputToOrtValueHandle(NodeMetadata metadata, out IDispo
$" Use Run() overloads that return DisposableNamedOnnxValue to get access to all Onnx value types that may be returned as output.");
}

/// <summary>
/// This method is used internally to feed dictionary keys
/// to create an OrtValue for map keys
/// </summary>
/// <typeparam name="K"></typeparam>
/// <returns>DenseTensor<K>"</returns>
internal TensorBase GetDictionaryKeys()
{
if (ValueType != OnnxValueType.ONNX_TYPE_MAP)
Expand All @@ -289,11 +283,6 @@ internal TensorBase GetDictionaryKeys()
return _mapHelper.Keys;
}

/// <summary>
///
/// </summary>
/// <typeparam name="V"></typeparam>
/// <returns>DenseTensor<V>"</returns>
internal TensorBase GetDictionaryValues()
{
if (ValueType != OnnxValueType.ONNX_TYPE_MAP)
Expand Down
Loading

0 comments on commit 74b59f5

Please sign in to comment.