Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rename OX->ORT #31

Merged
merged 3 commits into from
Sep 23, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ONNXRunTime"
uuid = "e034b28e-924e-41b2-b98f-d2bbeb830c6a"
authors = ["Jan Weidner <[email protected]> and contributors"]
version = "0.3.2"
version = "0.3.3"

[deps]
ArgCheck = "dce04be8-c92d-5529-be00-80e4d2c0e197"
Expand Down
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@ Contributions are welcome.
The high level API works as follows:
```julia

julia> import ONNXRunTime as OX
julia> import ONNXRunTime as ORT

julia> path = OX.testdatapath("increment2x3.onnx"); # path to a toy model
julia> path = ORT.testdatapath("increment2x3.onnx"); # path to a toy model

julia> model = OX.load_inference(path);
julia> model = ORT.load_inference(path);

julia> input = Dict("input" => randn(Float32,2,3))
Dict{String, Matrix{Float32}} with 1 entry:
Expand All @@ -34,7 +34,7 @@ pkg> add CUDA

julia> import CUDA

julia> OX.load_inference(path, execution_provider=:cuda)
julia> ORT.load_inference(path, execution_provider=:cuda)
```

The low level API mirrors the offical [C-API](https://github.com/microsoft/onnxruntime/blob/v1.8.1/include/onnxruntime/core/session/onnxruntime_c_api.h#L347). The above example looks like this:
Expand Down
14 changes: 7 additions & 7 deletions test/test_capi.jl
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
module TestCAPI
using Test
using ONNXRunTime.CAPI
import ONNXRunTime as OX
import ONNXRunTime as ORT

@testset "Session" begin
@testset "increment2x3" begin
api = GetApi()
env = CreateEnv(api, name="myenv")
path = OX.testdatapath("increment2x3.onnx")
path = ORT.testdatapath("increment2x3.onnx")
session_options = CreateSessionOptions(api)
@test (sprint(show, session_options); true)
@test_throws Exception CreateSession(api, env, "does_not_exits.onnx", session_options)
Expand All @@ -20,9 +20,9 @@ import ONNXRunTime as OX
allocator = CreateAllocator(api, session, mem)
@test (sprint(show, allocator); true)
@test SessionGetInputName(api, session, 0, allocator) == "input"
@test_throws OX.OrtException SessionGetInputName(api, session, 1, allocator)
@test_throws ORT.OrtException SessionGetInputName(api, session, 1, allocator)
@test SessionGetOutputName(api, session, 0, allocator) == "output"
@test_throws OX.OrtException SessionGetOutputName(api, session, 1, allocator)
@test_throws ORT.OrtException SessionGetOutputName(api, session, 1, allocator)
input_vec = randn(Float32, 6)
input_array = [
input_vec[1] input_vec[2] input_vec[3];
Expand All @@ -46,7 +46,7 @@ import ONNXRunTime as OX
@testset "increment2x3 ModelMetadata" begin
api = GetApi()
env = CreateEnv(api, name="myenv")
path = OX.testdatapath("increment2x3.onnx")
path = ORT.testdatapath("increment2x3.onnx")
session_options = CreateSessionOptions(api)
@test (sprint(show, session_options); true)
@test_throws Exception CreateSession(api, env, "does_not_exits.onnx", session_options)
Expand All @@ -67,12 +67,12 @@ end
api = GetApi()
mem = CreateCpuMemoryInfo(api)
data = randn(2,3)
tensor = CreateTensorWithDataAsOrtValue(api, mem, vec(OX.reversedims(data)), size(data))
tensor = CreateTensorWithDataAsOrtValue(api, mem, vec(ORT.reversedims(data)), size(data))
@test IsTensor(api, tensor)
info = GetTensorTypeAndShape(api, tensor)
onnxelty = GetTensorElementType(api, info)
@test onnxelty isa ONNXTensorElementDataType
@test OX.juliatype(onnxelty) == eltype(data)
@test ORT.juliatype(onnxelty) == eltype(data)
@test GetDimensionsCount(api, info) == 2
@test GetDimensions(api, info) == [2,3]
data2 = GetTensorMutableData(api, tensor)
Expand Down
14 changes: 7 additions & 7 deletions test/test_cuda.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,18 @@ module TestCUDA
import CUDA
using Test
using ONNXRunTime
const OX = ONNXRunTime
const ORT = ONNXRunTime
using ONNXRunTime: SessionOptionsAppendExecutionProvider_CUDA

#using Libdl
#Libdl.dlopen("/home/jan/.julia/artifacts/e2fd6cdf04b830a1d802fb35a6193788d0a3811a/lib/libcudart.so.11.0")

@testset "CUDA high level" begin
@testset "increment2x3.onnx" begin
path = OX.testdatapath("increment2x3.onnx")
model = OX.load_inference(path, execution_provider=:cuda)
@test OX.input_names(model) == ["input"]
@test OX.output_names(model) == ["output"]
path = ORT.testdatapath("increment2x3.onnx")
model = ORT.load_inference(path, execution_provider=:cuda)
@test ORT.input_names(model) == ["input"]
@test ORT.output_names(model) == ["output"]
input = randn(Float32, 2,3)
y = model((;input=input,), ["output"])
@test y == (output=input .+ 1f0,)
Expand All @@ -34,9 +34,9 @@ using ONNXRunTime.CAPI
mem = CreateCpuMemoryInfo(api)
allocator = CreateAllocator(api, session, mem)
@test SessionGetInputName(api, session, 0, allocator) == "input"
@test_throws OX.OrtException SessionGetInputName(api, session, 1, allocator)
@test_throws ORT.OrtException SessionGetInputName(api, session, 1, allocator)
@test SessionGetOutputName(api, session, 0, allocator) == "output"
@test_throws OX.OrtException SessionGetOutputName(api, session, 1, allocator)
@test_throws ORT.OrtException SessionGetOutputName(api, session, 1, allocator)
input_vec = randn(Float32, 6)
input_array = [
input_vec[1] input_vec[2] input_vec[3];
Expand Down
84 changes: 42 additions & 42 deletions test/test_highlevel.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,57 +2,57 @@ module TestHighlevel

using Test
using ONNXRunTime
const OX = ONNXRunTime
const ORT = ONNXRunTime
using ONNXRunTime: juliatype

@testset "high level" begin
@testset "increment2x3.onnx" begin
path = OX.testdatapath("increment2x3.onnx")
model = OX.load_inference(path, execution_provider=:cpu)
@test OX.input_names(model) == ["input"]
@test OX.output_names(model) == ["output"]
@test OX.input_names(model) === model.input_names
@test OX.output_names(model) === model.output_names
path = ORT.testdatapath("increment2x3.onnx")
model = ORT.load_inference(path, execution_provider=:cpu)
@test ORT.input_names(model) == ["input"]
@test ORT.output_names(model) == ["output"]
@test ORT.input_names(model) === model.input_names
@test ORT.output_names(model) === model.output_names
input = randn(Float32, 2,3)
#= this works =# model(Dict("input" => randn(Float32, 2,3)), ["output"])
@test_throws OX.ArgumentError model(Dict("nonsense" => input), ["output"])
@test_throws OX.ArgumentError model(Dict("input" => input), ["nonsense"])
@test_throws OX.OrtException model(Dict("input" => input), String[])
@test_throws OX.ArgumentError model(Dict("input" => input, "unused"=>input), ["output"])
@test_throws OX.ArgumentError model(Dict("input" => input, "unused"=>input), ["output"])
@test_throws OX.OrtException model(Dict("input" => randn(Float32, 3,2)), ["output"])
@test_throws ORT.ArgumentError model(Dict("nonsense" => input), ["output"])
@test_throws ORT.ArgumentError model(Dict("input" => input), ["nonsense"])
@test_throws ORT.OrtException model(Dict("input" => input), String[])
@test_throws ORT.ArgumentError model(Dict("input" => input, "unused"=>input), ["output"])
@test_throws ORT.ArgumentError model(Dict("input" => input, "unused"=>input), ["output"])
@test_throws ORT.OrtException model(Dict("input" => randn(Float32, 3,2)), ["output"])
@test_throws Exception model(Dict("input" => randn(Int, 2,3) ), ["output"])
@test_throws OX.OrtException model(Dict("input" => randn(Float64, 2,3)), ["output"])
@test_throws ORT.OrtException model(Dict("input" => randn(Float64, 2,3)), ["output"])
y = model(Dict("input" => input), ["output"])
@test y == Dict("output" => input .+ 1f0)
y = model(Dict("input" => input))
@test y == Dict("output" => input .+ 1f0)
end
@testset "adder1x2x3.onnx" begin
path = OX.testdatapath("adder1x2x3.onnx")
model = OX.load_inference(path)
@test OX.input_names(model) == ["x", "y"]
@test OX.output_names(model) == ["sum"]
path = ORT.testdatapath("adder1x2x3.onnx")
model = ORT.load_inference(path)
@test ORT.input_names(model) == ["x", "y"]
@test ORT.output_names(model) == ["sum"]
x = randn(Float32, 1,2,3)
y = randn(Float32, 1,2,3)
d = model(Dict("x" => x, "y"=>y))
@test d isa AbstractDict
@test d == Dict("sum" => x+y)
end
@testset "diagonal1x2x3x4.onnx" begin
path = OX.testdatapath("diagonal1x2x3x4.onnx")
model = OX.load_inference(path)
@test OX.input_names(model) == ["in"]
@test OX.output_names(model) == ["out1", "out2"]
path = ORT.testdatapath("diagonal1x2x3x4.onnx")
model = ORT.load_inference(path)
@test ORT.input_names(model) == ["in"]
@test ORT.output_names(model) == ["out1", "out2"]
x = randn(Float64, 1,2,3,4)
d = model(Dict("in" => x))
@test d == Dict("out1" => x, "out2" => x)
end
@testset "swap_x_.onnx" begin
path = OX.testdatapath("swap_x_.onnx")
model = OX.load_inference(path)
@test OX.input_names(model) == ["in1", "in2"]
@test OX.output_names(model) == ["out1", "out2"]
path = ORT.testdatapath("swap_x_.onnx")
model = ORT.load_inference(path)
@test ORT.input_names(model) == ["in1", "in2"]
@test ORT.output_names(model) == ["out1", "out2"]
in1 = randn(Float32, 2,3)
in2 = randn(Float32, 4,5)
res = model((;in1, in2))
Expand All @@ -68,22 +68,22 @@ using ONNXRunTime: juliatype
@test occursin("out2", s)
end
@testset "getindex_12.onnx" begin
path = OX.testdatapath("getindex_12.onnx")
model = OX.load_inference(path)
path = ORT.testdatapath("getindex_12.onnx")
model = ORT.load_inference(path)
inputs = (input=collect(reshape(1f0:20, 4,5)),)
out = model(inputs).output
@test inputs.input[2,3] == only(out)
end
@testset "copy2d.onnx" begin
path = OX.testdatapath("copy2d.onnx")
model = OX.load_inference(path)
path = ORT.testdatapath("copy2d.onnx")
model = ORT.load_inference(path)
inputs = (input=randn(Float32,3,4),)
out = model(inputs).output
@test inputs.input == out
end
@testset "matmul.onnx" begin
path = OX.testdatapath("matmul.onnx")
model = OX.load_inference(path)
path = ORT.testdatapath("matmul.onnx")
model = ORT.load_inference(path)
inputs = (
input1 = randn(Float32, 2,3),
input2 = randn(Float32, 3,4),
Expand All @@ -92,8 +92,8 @@ using ONNXRunTime: juliatype
@test out ≈ inputs.input1 * inputs.input2
end
@testset "xyz_3x4x5.onnx" begin
path = OX.testdatapath("xyz_3x4x5.onnx")
model = OX.load_inference(path)
path = ORT.testdatapath("xyz_3x4x5.onnx")
model = ORT.load_inference(path)
inputs = (input=randn(Float32,4,10),)
out = model(inputs)
@test out.identity == inputs.input
Expand All @@ -108,17 +108,17 @@ using ONNXRunTime: juliatype
end
end
@testset "Conv1d1.onnx" begin
path = OX.testdatapath("Conv1d1.onnx")
model = OX.load_inference(path)
path = ORT.testdatapath("Conv1d1.onnx")
model = ORT.load_inference(path)
inputs = (input=randn(Float32,4,2,10),)
out = model(inputs)
expected = fill(0f0, 4,3,8)
expected[:,2,:] .= 1
@test out.output == expected
end
@testset "Conv1d2.onnx" begin
path = OX.testdatapath("Conv1d2.onnx")
model = OX.load_inference(path)
path = ORT.testdatapath("Conv1d2.onnx")
model = ORT.load_inference(path)
input = Array{Float32,3}(undef, (1,2,3))
input[1,1,1] = 1
input[1,1,2] = 2
Expand All @@ -136,10 +136,10 @@ using ONNXRunTime: juliatype
@test out[1,2,3] == 0
end
@testset "Dict2Dict.onnx" begin
path = OX.testdatapath("Dict2Dict.onnx")
model = OX.load_inference(path, execution_provider=:cpu)
@test OX.input_names(model) == ["x", "y"]
@test OX.output_names(model) == ["x_times_y", "x_plus_y", "x_minus_y", "x_plus_1", "y_plus_2"]
path = ORT.testdatapath("Dict2Dict.onnx")
model = ORT.load_inference(path, execution_provider=:cpu)
@test ORT.input_names(model) == ["x", "y"]
@test ORT.output_names(model) == ["x_times_y", "x_plus_y", "x_minus_y", "x_plus_1", "y_plus_2"]
nb = rand(1:10)
x = randn(Float32, nb,3)
y = randn(Float32, nb,3)
Expand Down