Skip to content

Commit dfb67d6

Browse files
authored
Merge pull request #31 from jw3126/up
Rename OX->ORT
2 parents c442596 + e0bb88b commit dfb67d6

File tree

5 files changed

+61
-61
lines changed

5 files changed

+61
-61
lines changed

Project.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "ONNXRunTime"
22
uuid = "e034b28e-924e-41b2-b98f-d2bbeb830c6a"
33
authors = ["Jan Weidner <[email protected]> and contributors"]
4-
version = "0.3.2"
4+
version = "0.3.3"
55

66
[deps]
77
ArgCheck = "dce04be8-c92d-5529-be00-80e4d2c0e197"

README.md

+4-4
Original file line numberDiff line numberDiff line change
@@ -14,11 +14,11 @@ Contributions are welcome.
1414
The high level API works as follows:
1515
```julia
1616

17-
julia> import ONNXRunTime as OX
17+
julia> import ONNXRunTime as ORT
1818

19-
julia> path = OX.testdatapath("increment2x3.onnx"); # path to a toy model
19+
julia> path = ORT.testdatapath("increment2x3.onnx"); # path to a toy model
2020

21-
julia> model = OX.load_inference(path);
21+
julia> model = ORT.load_inference(path);
2222

2323
julia> input = Dict("input" => randn(Float32,2,3))
2424
Dict{String, Matrix{Float32}} with 1 entry:
@@ -34,7 +34,7 @@ pkg> add CUDA
3434

3535
julia> import CUDA
3636

37-
julia> OX.load_inference(path, execution_provider=:cuda)
37+
julia> ORT.load_inference(path, execution_provider=:cuda)
3838
```
3939

4040
The low level API mirrors the offical [C-API](https://github.com/microsoft/onnxruntime/blob/v1.8.1/include/onnxruntime/core/session/onnxruntime_c_api.h#L347). The above example looks like this:

test/test_capi.jl

+7-7
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,13 @@
11
module TestCAPI
22
using Test
33
using ONNXRunTime.CAPI
4-
import ONNXRunTime as OX
4+
import ONNXRunTime as ORT
55

66
@testset "Session" begin
77
@testset "increment2x3" begin
88
api = GetApi()
99
env = CreateEnv(api, name="myenv")
10-
path = OX.testdatapath("increment2x3.onnx")
10+
path = ORT.testdatapath("increment2x3.onnx")
1111
session_options = CreateSessionOptions(api)
1212
@test (sprint(show, session_options); true)
1313
@test_throws Exception CreateSession(api, env, "does_not_exits.onnx", session_options)
@@ -20,9 +20,9 @@ import ONNXRunTime as OX
2020
allocator = CreateAllocator(api, session, mem)
2121
@test (sprint(show, allocator); true)
2222
@test SessionGetInputName(api, session, 0, allocator) == "input"
23-
@test_throws OX.OrtException SessionGetInputName(api, session, 1, allocator)
23+
@test_throws ORT.OrtException SessionGetInputName(api, session, 1, allocator)
2424
@test SessionGetOutputName(api, session, 0, allocator) == "output"
25-
@test_throws OX.OrtException SessionGetOutputName(api, session, 1, allocator)
25+
@test_throws ORT.OrtException SessionGetOutputName(api, session, 1, allocator)
2626
input_vec = randn(Float32, 6)
2727
input_array = [
2828
input_vec[1] input_vec[2] input_vec[3];
@@ -46,7 +46,7 @@ import ONNXRunTime as OX
4646
@testset "increment2x3 ModelMetadata" begin
4747
api = GetApi()
4848
env = CreateEnv(api, name="myenv")
49-
path = OX.testdatapath("increment2x3.onnx")
49+
path = ORT.testdatapath("increment2x3.onnx")
5050
session_options = CreateSessionOptions(api)
5151
@test (sprint(show, session_options); true)
5252
@test_throws Exception CreateSession(api, env, "does_not_exits.onnx", session_options)
@@ -67,12 +67,12 @@ end
6767
api = GetApi()
6868
mem = CreateCpuMemoryInfo(api)
6969
data = randn(2,3)
70-
tensor = CreateTensorWithDataAsOrtValue(api, mem, vec(OX.reversedims(data)), size(data))
70+
tensor = CreateTensorWithDataAsOrtValue(api, mem, vec(ORT.reversedims(data)), size(data))
7171
@test IsTensor(api, tensor)
7272
info = GetTensorTypeAndShape(api, tensor)
7373
onnxelty = GetTensorElementType(api, info)
7474
@test onnxelty isa ONNXTensorElementDataType
75-
@test OX.juliatype(onnxelty) == eltype(data)
75+
@test ORT.juliatype(onnxelty) == eltype(data)
7676
@test GetDimensionsCount(api, info) == 2
7777
@test GetDimensions(api, info) == [2,3]
7878
data2 = GetTensorMutableData(api, tensor)

test/test_cuda.jl

+7-7
Original file line numberDiff line numberDiff line change
@@ -2,18 +2,18 @@ module TestCUDA
22
import CUDA
33
using Test
44
using ONNXRunTime
5-
const OX = ONNXRunTime
5+
const ORT = ONNXRunTime
66
using ONNXRunTime: SessionOptionsAppendExecutionProvider_CUDA
77

88
#using Libdl
99
#Libdl.dlopen("/home/jan/.julia/artifacts/e2fd6cdf04b830a1d802fb35a6193788d0a3811a/lib/libcudart.so.11.0")
1010

1111
@testset "CUDA high level" begin
1212
@testset "increment2x3.onnx" begin
13-
path = OX.testdatapath("increment2x3.onnx")
14-
model = OX.load_inference(path, execution_provider=:cuda)
15-
@test OX.input_names(model) == ["input"]
16-
@test OX.output_names(model) == ["output"]
13+
path = ORT.testdatapath("increment2x3.onnx")
14+
model = ORT.load_inference(path, execution_provider=:cuda)
15+
@test ORT.input_names(model) == ["input"]
16+
@test ORT.output_names(model) == ["output"]
1717
input = randn(Float32, 2,3)
1818
y = model((;input=input,), ["output"])
1919
@test y == (output=input .+ 1f0,)
@@ -34,9 +34,9 @@ using ONNXRunTime.CAPI
3434
mem = CreateCpuMemoryInfo(api)
3535
allocator = CreateAllocator(api, session, mem)
3636
@test SessionGetInputName(api, session, 0, allocator) == "input"
37-
@test_throws OX.OrtException SessionGetInputName(api, session, 1, allocator)
37+
@test_throws ORT.OrtException SessionGetInputName(api, session, 1, allocator)
3838
@test SessionGetOutputName(api, session, 0, allocator) == "output"
39-
@test_throws OX.OrtException SessionGetOutputName(api, session, 1, allocator)
39+
@test_throws ORT.OrtException SessionGetOutputName(api, session, 1, allocator)
4040
input_vec = randn(Float32, 6)
4141
input_array = [
4242
input_vec[1] input_vec[2] input_vec[3];

test/test_highlevel.jl

+42-42
Original file line numberDiff line numberDiff line change
@@ -2,57 +2,57 @@ module TestHighlevel
22

33
using Test
44
using ONNXRunTime
5-
const OX = ONNXRunTime
5+
const ORT = ONNXRunTime
66
using ONNXRunTime: juliatype
77

88
@testset "high level" begin
99
@testset "increment2x3.onnx" begin
10-
path = OX.testdatapath("increment2x3.onnx")
11-
model = OX.load_inference(path, execution_provider=:cpu)
12-
@test OX.input_names(model) == ["input"]
13-
@test OX.output_names(model) == ["output"]
14-
@test OX.input_names(model) === model.input_names
15-
@test OX.output_names(model) === model.output_names
10+
path = ORT.testdatapath("increment2x3.onnx")
11+
model = ORT.load_inference(path, execution_provider=:cpu)
12+
@test ORT.input_names(model) == ["input"]
13+
@test ORT.output_names(model) == ["output"]
14+
@test ORT.input_names(model) === model.input_names
15+
@test ORT.output_names(model) === model.output_names
1616
input = randn(Float32, 2,3)
1717
#= this works =# model(Dict("input" => randn(Float32, 2,3)), ["output"])
18-
@test_throws OX.ArgumentError model(Dict("nonsense" => input), ["output"])
19-
@test_throws OX.ArgumentError model(Dict("input" => input), ["nonsense"])
20-
@test_throws OX.OrtException model(Dict("input" => input), String[])
21-
@test_throws OX.ArgumentError model(Dict("input" => input, "unused"=>input), ["output"])
22-
@test_throws OX.ArgumentError model(Dict("input" => input, "unused"=>input), ["output"])
23-
@test_throws OX.OrtException model(Dict("input" => randn(Float32, 3,2)), ["output"])
18+
@test_throws ORT.ArgumentError model(Dict("nonsense" => input), ["output"])
19+
@test_throws ORT.ArgumentError model(Dict("input" => input), ["nonsense"])
20+
@test_throws ORT.OrtException model(Dict("input" => input), String[])
21+
@test_throws ORT.ArgumentError model(Dict("input" => input, "unused"=>input), ["output"])
22+
@test_throws ORT.ArgumentError model(Dict("input" => input, "unused"=>input), ["output"])
23+
@test_throws ORT.OrtException model(Dict("input" => randn(Float32, 3,2)), ["output"])
2424
@test_throws Exception model(Dict("input" => randn(Int, 2,3) ), ["output"])
25-
@test_throws OX.OrtException model(Dict("input" => randn(Float64, 2,3)), ["output"])
25+
@test_throws ORT.OrtException model(Dict("input" => randn(Float64, 2,3)), ["output"])
2626
y = model(Dict("input" => input), ["output"])
2727
@test y == Dict("output" => input .+ 1f0)
2828
y = model(Dict("input" => input))
2929
@test y == Dict("output" => input .+ 1f0)
3030
end
3131
@testset "adder1x2x3.onnx" begin
32-
path = OX.testdatapath("adder1x2x3.onnx")
33-
model = OX.load_inference(path)
34-
@test OX.input_names(model) == ["x", "y"]
35-
@test OX.output_names(model) == ["sum"]
32+
path = ORT.testdatapath("adder1x2x3.onnx")
33+
model = ORT.load_inference(path)
34+
@test ORT.input_names(model) == ["x", "y"]
35+
@test ORT.output_names(model) == ["sum"]
3636
x = randn(Float32, 1,2,3)
3737
y = randn(Float32, 1,2,3)
3838
d = model(Dict("x" => x, "y"=>y))
3939
@test d isa AbstractDict
4040
@test d == Dict("sum" => x+y)
4141
end
4242
@testset "diagonal1x2x3x4.onnx" begin
43-
path = OX.testdatapath("diagonal1x2x3x4.onnx")
44-
model = OX.load_inference(path)
45-
@test OX.input_names(model) == ["in"]
46-
@test OX.output_names(model) == ["out1", "out2"]
43+
path = ORT.testdatapath("diagonal1x2x3x4.onnx")
44+
model = ORT.load_inference(path)
45+
@test ORT.input_names(model) == ["in"]
46+
@test ORT.output_names(model) == ["out1", "out2"]
4747
x = randn(Float64, 1,2,3,4)
4848
d = model(Dict("in" => x))
4949
@test d == Dict("out1" => x, "out2" => x)
5050
end
5151
@testset "swap_x_.onnx" begin
52-
path = OX.testdatapath("swap_x_.onnx")
53-
model = OX.load_inference(path)
54-
@test OX.input_names(model) == ["in1", "in2"]
55-
@test OX.output_names(model) == ["out1", "out2"]
52+
path = ORT.testdatapath("swap_x_.onnx")
53+
model = ORT.load_inference(path)
54+
@test ORT.input_names(model) == ["in1", "in2"]
55+
@test ORT.output_names(model) == ["out1", "out2"]
5656
in1 = randn(Float32, 2,3)
5757
in2 = randn(Float32, 4,5)
5858
res = model((;in1, in2))
@@ -68,22 +68,22 @@ using ONNXRunTime: juliatype
6868
@test occursin("out2", s)
6969
end
7070
@testset "getindex_12.onnx" begin
71-
path = OX.testdatapath("getindex_12.onnx")
72-
model = OX.load_inference(path)
71+
path = ORT.testdatapath("getindex_12.onnx")
72+
model = ORT.load_inference(path)
7373
inputs = (input=collect(reshape(1f0:20, 4,5)),)
7474
out = model(inputs).output
7575
@test inputs.input[2,3] == only(out)
7676
end
7777
@testset "copy2d.onnx" begin
78-
path = OX.testdatapath("copy2d.onnx")
79-
model = OX.load_inference(path)
78+
path = ORT.testdatapath("copy2d.onnx")
79+
model = ORT.load_inference(path)
8080
inputs = (input=randn(Float32,3,4),)
8181
out = model(inputs).output
8282
@test inputs.input == out
8383
end
8484
@testset "matmul.onnx" begin
85-
path = OX.testdatapath("matmul.onnx")
86-
model = OX.load_inference(path)
85+
path = ORT.testdatapath("matmul.onnx")
86+
model = ORT.load_inference(path)
8787
inputs = (
8888
input1 = randn(Float32, 2,3),
8989
input2 = randn(Float32, 3,4),
@@ -92,8 +92,8 @@ using ONNXRunTime: juliatype
9292
@test out inputs.input1 * inputs.input2
9393
end
9494
@testset "xyz_3x4x5.onnx" begin
95-
path = OX.testdatapath("xyz_3x4x5.onnx")
96-
model = OX.load_inference(path)
95+
path = ORT.testdatapath("xyz_3x4x5.onnx")
96+
model = ORT.load_inference(path)
9797
inputs = (input=randn(Float32,4,10),)
9898
out = model(inputs)
9999
@test out.identity == inputs.input
@@ -108,17 +108,17 @@ using ONNXRunTime: juliatype
108108
end
109109
end
110110
@testset "Conv1d1.onnx" begin
111-
path = OX.testdatapath("Conv1d1.onnx")
112-
model = OX.load_inference(path)
111+
path = ORT.testdatapath("Conv1d1.onnx")
112+
model = ORT.load_inference(path)
113113
inputs = (input=randn(Float32,4,2,10),)
114114
out = model(inputs)
115115
expected = fill(0f0, 4,3,8)
116116
expected[:,2,:] .= 1
117117
@test out.output == expected
118118
end
119119
@testset "Conv1d2.onnx" begin
120-
path = OX.testdatapath("Conv1d2.onnx")
121-
model = OX.load_inference(path)
120+
path = ORT.testdatapath("Conv1d2.onnx")
121+
model = ORT.load_inference(path)
122122
input = Array{Float32,3}(undef, (1,2,3))
123123
input[1,1,1] = 1
124124
input[1,1,2] = 2
@@ -136,10 +136,10 @@ using ONNXRunTime: juliatype
136136
@test out[1,2,3] == 0
137137
end
138138
@testset "Dict2Dict.onnx" begin
139-
path = OX.testdatapath("Dict2Dict.onnx")
140-
model = OX.load_inference(path, execution_provider=:cpu)
141-
@test OX.input_names(model) == ["x", "y"]
142-
@test OX.output_names(model) == ["x_times_y", "x_plus_y", "x_minus_y", "x_plus_1", "y_plus_2"]
139+
path = ORT.testdatapath("Dict2Dict.onnx")
140+
model = ORT.load_inference(path, execution_provider=:cpu)
141+
@test ORT.input_names(model) == ["x", "y"]
142+
@test ORT.output_names(model) == ["x_times_y", "x_plus_y", "x_minus_y", "x_plus_1", "y_plus_2"]
143143
nb = rand(1:10)
144144
x = randn(Float32, nb,3)
145145
y = randn(Float32, nb,3)

0 commit comments

Comments
 (0)