Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ public CpuExecutionProviderFactory(bool useArena=true)
int useArenaInt = useArena ? 1 : 0;
try
{
NativeApiStatus.VerifySuccess(NativeMethods.ONNXRuntimeCreateCpuExecutionProviderFactory(useArenaInt, out handle));
NativeApiStatus.VerifySuccess(NativeMethods.OrtCreateCpuExecutionProviderFactory(useArenaInt, out handle));
}
catch(OnnxRuntimeException e)
{
Expand Down Expand Up @@ -48,7 +48,7 @@ public MklDnnExecutionProviderFactory(bool useArena = true)
int useArenaInt = useArena ? 1 : 0;
try
{
NativeApiStatus.VerifySuccess(NativeMethods.ONNXRuntimeCreateMkldnnExecutionProviderFactory(useArenaInt, out handle));
NativeApiStatus.VerifySuccess(NativeMethods.OrtCreateMkldnnExecutionProviderFactory(useArenaInt, out handle));
}
catch (OnnxRuntimeException e)
{
Expand Down
30 changes: 15 additions & 15 deletions csharp/src/Microsoft.ML.OnnxRuntime/InferenceSession.cs
Original file line number Diff line number Diff line change
Expand Up @@ -44,15 +44,15 @@ public InferenceSession(string modelPath, SessionOptions options)
_nativeHandle = IntPtr.Zero;
try
{
NativeApiStatus.VerifySuccess(NativeMethods.ONNXRuntimeCreateInferenceSession(envHandle, modelPath, options.NativeHandle, out _nativeHandle));
NativeApiStatus.VerifySuccess(NativeMethods.OrtCreateInferenceSession(envHandle, modelPath, options.NativeHandle, out _nativeHandle));

// Initialize input/output metadata
_inputMetadata = new Dictionary<string, NodeMetadata>();
_outputMetadata = new Dictionary<string, NodeMetadata>();

// get input count
ulong inputCount = 0;
NativeApiStatus.VerifySuccess(NativeMethods.ONNXRuntimeInferenceSessionGetInputCount(_nativeHandle, out inputCount));
NativeApiStatus.VerifySuccess(NativeMethods.OrtInferenceSessionGetInputCount(_nativeHandle, out inputCount));

// get all the output names
for (ulong i = 0; i < inputCount; i++)
Expand All @@ -62,7 +62,7 @@ public InferenceSession(string modelPath, SessionOptions options)

// get output count
ulong outputCount = 0;
NativeApiStatus.VerifySuccess(NativeMethods.ONNXRuntimeInferenceSessionGetOutputCount(_nativeHandle, out outputCount));
NativeApiStatus.VerifySuccess(NativeMethods.OrtInferenceSessionGetOutputCount(_nativeHandle, out outputCount));

// get all the output names
for (ulong i = 0; i < outputCount; i++)
Expand Down Expand Up @@ -148,7 +148,7 @@ internal IReadOnlyCollection<NamedOnnxValue> Run(IReadOnlyCollection<NamedOnnxVa
string[] outputNamesArray = outputNames.ToArray();
IntPtr[] outputValueArray = new IntPtr[outputNames.Count];

IntPtr status = NativeMethods.ONNXRuntimeRunInference(
IntPtr status = NativeMethods.OrtRunInference(
this._nativeHandle,
IntPtr.Zero, // TODO: use Run options when Run options creation API is available
// Passing null uses the default run options in the C-api
Expand Down Expand Up @@ -212,7 +212,7 @@ private string GetOutputName(ulong index)
IntPtr nameHandle = IntPtr.Zero;
string str = null;

IntPtr status = NativeMethods.ONNXRuntimeInferenceSessionGetOutputName(
IntPtr status = NativeMethods.OrtInferenceSessionGetOutputName(
_nativeHandle,
index,
NativeMemoryAllocator.DefaultInstance.Handle,
Expand All @@ -238,7 +238,7 @@ private string GetInputName(ulong index)
IntPtr nameHandle = IntPtr.Zero;
string str = null;

IntPtr status = NativeMethods.ONNXRuntimeInferenceSessionGetInputName(
IntPtr status = NativeMethods.OrtInferenceSessionGetInputName(
_nativeHandle,
index,
NativeMemoryAllocator.DefaultInstance.Handle,
Expand All @@ -265,14 +265,14 @@ private NodeMetadata GetInputMetadata(ulong index)
IntPtr typeInfo = IntPtr.Zero;
try
{
NativeApiStatus.VerifySuccess(NativeMethods.ONNXRuntimeInferenceSessionGetInputTypeInfo(_nativeHandle, index, out typeInfo));
NativeApiStatus.VerifySuccess(NativeMethods.OrtInferenceSessionGetInputTypeInfo(_nativeHandle, index, out typeInfo));
return GetMetadataFromTypeInfo(typeInfo);
}
finally
{
if (typeInfo != IntPtr.Zero)
{
NativeMethods.ONNXRuntimeReleaseObject(typeInfo);
NativeMethods.OrtReleaseObject(typeInfo);
}
}
}
Expand All @@ -282,30 +282,30 @@ private NodeMetadata GetOutputMetadata(ulong index)
IntPtr typeInfo = IntPtr.Zero;
try
{
NativeApiStatus.VerifySuccess(NativeMethods.ONNXRuntimeInferenceSessionGetOutputTypeInfo(_nativeHandle, index, out typeInfo));
NativeApiStatus.VerifySuccess(NativeMethods.OrtInferenceSessionGetOutputTypeInfo(_nativeHandle, index, out typeInfo));
return GetMetadataFromTypeInfo(typeInfo);
}
finally
{
if (typeInfo != IntPtr.Zero)
{
NativeMethods.ONNXRuntimeReleaseObject(typeInfo);
NativeMethods.OrtReleaseObject(typeInfo);
}
}
}

private NodeMetadata GetMetadataFromTypeInfo(IntPtr typeInfo)
{
IntPtr tensorInfo = NativeMethods.ONNXRuntimeCastTypeInfoToTensorInfo(typeInfo);
// Convert the newly introduced ONNXRuntimeTypeInfo* to the older ONNXRuntimeTypeAndShapeInfo*
IntPtr tensorInfo = NativeMethods.OrtCastTypeInfoToTensorInfo(typeInfo);
// Convert the newly introduced OrtTypeInfo* to the older OrtTypeAndShapeInfo*

TensorElementType type = NativeMethods.ONNXRuntimeGetTensorElementType(tensorInfo);
TensorElementType type = NativeMethods.OrtGetTensorElementType(tensorInfo);
Type dotnetType = null;
int width = 0;
TensorElementTypeConverter.GetTypeAndWidth(type, out dotnetType, out width);
ulong numDimensions = NativeMethods.ONNXRuntimeGetNumOfDimensions(tensorInfo);
ulong numDimensions = NativeMethods.OrtGetNumOfDimensions(tensorInfo);
long[] dimensions = new long[(int)numDimensions];
NativeMethods.ONNXRuntimeGetDimensions(tensorInfo, dimensions, numDimensions);
NativeMethods.OrtGetDimensions(tensorInfo, dimensions, numDimensions);
int[] intDimensions = new int[(int)numDimensions];
for (ulong i = 0; i < numDimensions; i++)
{
Expand Down
8 changes: 4 additions & 4 deletions csharp/src/Microsoft.ML.OnnxRuntime/NamedOnnxValue.cs
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ out nativeElementType
longShape[i] = (ulong)shape[i];
}

IntPtr status = NativeMethods.ONNXRuntimeCreateTensorWithDataAsONNXValue(
IntPtr status = NativeMethods.OrtCreateTensorWithDataAsONNXValue(
NativeMemoryAllocatorInfo.DefaultInstance.Handle,
dataBufferPointer,
(ulong)(dataBufferLength),
Expand Down Expand Up @@ -191,14 +191,14 @@ internal static NamedOnnxValue CreateFromOnnxValue(string name, IntPtr nativeOnn
TensorElementType elemType = TensorElementType.DataTypeMax;
try
{
NativeApiStatus.VerifySuccess(NativeMethods.ONNXRuntimeGetTensorShapeAndType(nativeOnnxValue, out typeAndShape));
elemType = NativeMethods.ONNXRuntimeGetTensorElementType(typeAndShape);
NativeApiStatus.VerifySuccess(NativeMethods.OrtGetTensorShapeAndType(nativeOnnxValue, out typeAndShape));
elemType = NativeMethods.OrtGetTensorElementType(typeAndShape);
}
finally
{
if (typeAndShape != IntPtr.Zero)
{
NativeMethods.ONNXRuntimeReleaseObject(typeAndShape);
NativeMethods.OrtReleaseObject(typeAndShape);
}
}

Expand Down
4 changes: 2 additions & 2 deletions csharp/src/Microsoft.ML.OnnxRuntime/NativeApiStatus.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ class NativeApiStatus
{
private static string GetErrorMessage(IntPtr /*(ONNXStatus*)*/status)
{
IntPtr nativeString = NativeMethods.ONNXRuntimeGetErrorMessage(status);
IntPtr nativeString = NativeMethods.OrtGetErrorMessage(status);
string str = Marshal.PtrToStringAnsi(nativeString); //assumes charset = ANSI
return str;
}
Expand All @@ -25,7 +25,7 @@ public static void VerifySuccess(IntPtr nativeStatus)
{
if (nativeStatus != IntPtr.Zero)
{
ErrorCode statusCode = NativeMethods.ONNXRuntimeGetErrorCode(nativeStatus);
ErrorCode statusCode = NativeMethods.OrtGetErrorCode(nativeStatus);
string errorMessage = GetErrorMessage(nativeStatus);
NativeMethods.ReleaseONNXStatus(nativeStatus);
throw new OnnxRuntimeException(statusCode, errorMessage);
Expand Down
10 changes: 5 additions & 5 deletions csharp/src/Microsoft.ML.OnnxRuntime/NativeMemoryAllocator.cs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ private static NativeMemoryAllocatorInfo CreateCpuAllocatorInfo()
IntPtr allocInfo = IntPtr.Zero;
try
{
IntPtr status = NativeMethods.ONNXRuntimeCreateCpuAllocatorInfo(NativeMethods.AllocatorType.DeviceAllocator, NativeMethods.MemoryType.Cpu, out allocInfo);
IntPtr status = NativeMethods.OrtCreateCpuAllocatorInfo(NativeMethods.AllocatorType.DeviceAllocator, NativeMethods.MemoryType.Cpu, out allocInfo);
NativeApiStatus.VerifySuccess(status);
}
catch (Exception e)
Expand Down Expand Up @@ -64,7 +64,7 @@ private NativeMemoryAllocatorInfo(IntPtr allocInfo)

private static void Delete(IntPtr nativePtr)
{
NativeMethods.ReleaseONNXRuntimeAllocatorInfo(nativePtr);
NativeMethods.ReleaseOrtAllocatorInfo(nativePtr);
}

protected override bool ReleaseHandle()
Expand All @@ -84,7 +84,7 @@ private static NativeMemoryAllocator CreateDefaultCpuAllocator()
IntPtr allocator = IntPtr.Zero;
try
{
IntPtr status = NativeMethods.ONNXRuntimeCreateDefaultAllocator(out allocator);
IntPtr status = NativeMethods.OrtCreateDefaultAllocator(out allocator);
NativeApiStatus.VerifySuccess(status);
}
catch (Exception e)
Expand Down Expand Up @@ -113,7 +113,7 @@ private static NativeMemoryAllocator CreateDefaultCpuAllocator()
/// <param name="memory"></param>
internal void FreeMemory(IntPtr memory)
{
NativeMethods.ONNXRuntimeAllocatorFree(handle, memory);
NativeMethods.OrtAllocatorFree(handle, memory);
}

public override bool IsInvalid
Expand Down Expand Up @@ -141,7 +141,7 @@ protected NativeMemoryAllocator(IntPtr allocator)

protected static void Delete(IntPtr allocator)
{
NativeMethods.ONNXRuntimeReleaseObject(allocator);
NativeMethods.OrtReleaseObject(allocator);
}

protected override bool ReleaseHandle()
Expand Down
Loading