Skip to content

Commit a33bc37

Browse files
authored
[Examples] Update the wasi-nn crate dependency to 0.4.0 (second-state#23)
Signed-off-by: yanghaku <1961882079@qq.com>
1 parent 11876b3 commit a33bc37

File tree

11 files changed

+71
-145
lines changed

11 files changed

+71
-145
lines changed

‎openvino-mobilenet-image/README.md‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ This crate depends on the `wasi-nn` in the `Cargo.toml`:
1010

1111
```toml
1212
[dependencies]
13-
wasi-nn = "0.3.0"
13+
wasi-nn = "0.4.0"
1414
```
1515

1616
## Build

‎openvino-mobilenet-image/rust/Cargo.toml‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,6 @@ publish = false
88

99
[dependencies]
1010
image = { version = "0.23.14", default-features = false, features = ["gif", "jpeg", "ico", "png", "pnm", "tga", "tiff", "webp", "bmp", "hdr", "dxt", "dds", "farbfeld"] }
11-
wasi-nn = { version = "0.3.0" }
11+
wasi-nn = { version = "0.4.0" }
1212

1313
[workspace]

‎openvino-mobilenet-image/rust/src/main.rs‎

Lines changed: 14 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
use image::io::Reader;
22
use image::DynamicImage;
3-
use std::convert::TryInto;
43
use std::env;
54
use std::fs;
65
use wasi_nn;
@@ -18,46 +17,29 @@ pub fn main() {
1817
let weights = fs::read(model_bin_name).unwrap();
1918
println!("Read graph weights, size in bytes: {}", weights.len());
2019

21-
let graph = unsafe {
22-
wasi_nn::load(
23-
&[&xml.into_bytes(), &weights],
24-
wasi_nn::GRAPH_ENCODING_OPENVINO,
25-
wasi_nn::EXECUTION_TARGET_CPU,
26-
)
27-
.unwrap()
28-
};
29-
println!("Loaded graph into wasi-nn with ID: {}", graph);
20+
let graph = wasi_nn::GraphBuilder::new(
21+
wasi_nn::GraphEncoding::Openvino,
22+
wasi_nn::ExecutionTarget::CPU,
23+
)
24+
.build_from_bytes(&[xml.into_bytes(), weights])
25+
.unwrap();
26+
println!("Loaded graph into wasi-nn with ID: {:?}", graph);
3027

31-
let context = unsafe { wasi_nn::init_execution_context(graph).unwrap() };
32-
println!("Created wasi-nn execution context with ID: {}", context);
28+
let mut context = graph.init_execution_context().unwrap();
29+
println!("Created wasi-nn execution context with ID: {:?}", context);
3330

3431
// Load a tensor that precisely matches the graph input tensor (see
3532
let tensor_data = image_to_tensor(image_name.to_string(), 224, 224);
3633
println!("Read input tensor, size in bytes: {}", tensor_data.len());
37-
let tensor = wasi_nn::Tensor {
38-
dimensions: &[1, 3, 224, 224],
39-
type_: wasi_nn::TENSOR_TYPE_F32,
40-
data: &tensor_data,
41-
};
42-
unsafe {
43-
wasi_nn::set_input(context, 0, tensor).unwrap();
44-
}
34+
context
35+
.set_input(0, wasi_nn::TensorType::F32, &[1, 3, 224, 224], &tensor_data)
36+
.unwrap();
4537
// Execute the inference.
46-
unsafe {
47-
wasi_nn::compute(context).unwrap();
48-
}
38+
context.compute().unwrap();
4939
println!("Executed graph inference");
5040
// Retrieve the output.
5141
let mut output_buffer = vec![0f32; 1001];
52-
unsafe {
53-
wasi_nn::get_output(
54-
context,
55-
0,
56-
&mut output_buffer[..] as *mut [f32] as *mut u8,
57-
(output_buffer.len() * 4).try_into().unwrap(),
58-
)
59-
.unwrap();
60-
}
42+
context.get_output(0, &mut output_buffer).unwrap();
6143

6244
let results = sort_results(&output_buffer);
6345
for i in 0..5 {

‎openvino-mobilenet-raw/README.md‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ This crate depends on the `wasi-nn` in the `Cargo.toml`:
1010

1111
```toml
1212
[dependencies]
13-
wasi-nn = "0.3.0"
13+
wasi-nn = "0.4.0"
1414
```
1515

1616
## Build

‎openvino-mobilenet-raw/rust/Cargo.toml‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,6 @@ edition = "2021"
77
publish = false
88

99
[dependencies]
10-
wasi-nn = { version = "0.3.0" }
10+
wasi-nn = { version = "0.4.0" }
1111

1212
[workspace]

‎openvino-mobilenet-raw/rust/src/main.rs‎

Lines changed: 14 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
use std::convert::TryInto;
21
use std::env;
32
use std::fs;
43
use wasi_nn;
@@ -16,18 +15,16 @@ pub fn main() {
1615
let weights = fs::read(model_bin_name).unwrap();
1716
println!("Read graph weights, size in bytes: {}", weights.len());
1817

19-
let graph = unsafe {
20-
wasi_nn::load(
21-
&[&xml.into_bytes(), &weights],
22-
wasi_nn::GRAPH_ENCODING_OPENVINO,
23-
wasi_nn::EXECUTION_TARGET_CPU,
24-
)
25-
.unwrap()
26-
};
27-
println!("Loaded graph into wasi-nn with ID: {}", graph);
18+
let graph = wasi_nn::GraphBuilder::new(
19+
wasi_nn::GraphEncoding::Openvino,
20+
wasi_nn::ExecutionTarget::CPU,
21+
)
22+
.build_from_bytes(&[xml.into_bytes(), weights])
23+
.unwrap();
24+
println!("Loaded graph into wasi-nn with ID: {:?}", graph);
2825

29-
let context = unsafe { wasi_nn::init_execution_context(graph).unwrap() };
30-
println!("Created wasi-nn execution context with ID: {}", context);
26+
let mut context = graph.init_execution_context().unwrap();
27+
println!("Created wasi-nn execution context with ID: {:?}", context);
3128

3229
// Load a tensor that precisely matches the graph input tensor (see
3330
// `fixture/frozen_inference_graph.xml`).
@@ -36,30 +33,15 @@ pub fn main() {
3633
// for i in 0..10{
3734
// println!("tensor -> {}", tensor_data[i]);
3835
// }
39-
let tensor = wasi_nn::Tensor {
40-
dimensions: &[1, 3, 224, 224],
41-
type_: wasi_nn::TENSOR_TYPE_F32,
42-
data: &tensor_data,
43-
};
44-
unsafe {
45-
wasi_nn::set_input(context, 0, tensor).unwrap();
46-
}
36+
context
37+
.set_input(0, wasi_nn::TensorType::F32, &[1, 3, 224, 224], &tensor_data)
38+
.unwrap();
4739
// Execute the inference.
48-
unsafe {
49-
wasi_nn::compute(context).unwrap();
50-
}
40+
context.compute().unwrap();
5141
println!("Executed graph inference");
5242
// Retrieve the output.
5343
let mut output_buffer = vec![0f32; 1001];
54-
unsafe {
55-
wasi_nn::get_output(
56-
context,
57-
0,
58-
&mut output_buffer[..] as *mut [f32] as *mut u8,
59-
(output_buffer.len() * 4).try_into().unwrap(),
60-
)
61-
.unwrap();
62-
}
44+
context.get_output(0, &mut output_buffer).unwrap();
6345

6446
let results = sort_results(&output_buffer);
6547
for i in 0..5 {

‎openvino-road-segmentation-adas/rust/openvino-road-segmentation-adas-basic/Cargo.toml‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,4 @@ name = "rust-road-segmentation-adas"
55
version = "0.1.0"
66

77
[dependencies]
8-
wasi-nn = "0.1.0"
8+
wasi-nn = "0.4.0"

‎openvino-road-segmentation-adas/rust/openvino-road-segmentation-adas-basic/src/main.rs‎

Lines changed: 22 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
//!
77
//! In the environment where WasmEdge is deployed, run `wasmedge --dir .:. <path to rust-road-segmentation-adas.wasm> ../model/road-segmentation-adas-0001.xml ../model/road-segmentation-adas-0001.bin <path to input tensor>`. After the inference, the result tensor `wasinn-openvino-inference-output-1x4x512x896xf32.tensor` will be returned. You can use opencv or other tools to visualize the inference result.
88
9-
use std::{convert::TryInto, env, fs};
9+
use std::{env, fs};
1010
use wasi_nn as nn;
1111

1212
fn main() -> Result<(), Box<dyn std::error::Error>> {
@@ -24,14 +24,15 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
2424

2525
let tensor_data = fs::read(tensor_name).unwrap();
2626
println!("Load input tensor, size in bytes: {}", tensor_data.len());
27-
let tensor = nn::Tensor {
28-
dimensions: &[1, 3, 512, 896],
29-
r#type: nn::TENSOR_TYPE_F32,
30-
data: &tensor_data,
31-
};
3227

3328
// do inference
34-
let output_buffer = infer(xml_bytes.as_slice(), weights.as_slice(), tensor)?;
29+
let output_buffer = infer(
30+
xml_bytes.as_slice(),
31+
weights.as_slice(),
32+
nn::TensorType::F32,
33+
&[1, 3, 512, 896],
34+
&tensor_data,
35+
)?;
3536

3637
// dump result
3738
dump(
@@ -46,41 +47,28 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
4647
fn infer(
4748
xml_bytes: impl AsRef<[u8]>,
4849
weights: impl AsRef<[u8]>,
49-
in_tensor: nn::Tensor,
50+
in_tensor_type: nn::TensorType,
51+
in_tensor_dimensions: &[usize],
52+
in_tensor_data: &Vec<u8>,
5053
) -> Result<Vec<f32>, Box<dyn std::error::Error>> {
51-
let graph = unsafe {
52-
wasi_nn::load(
53-
&[xml_bytes.as_ref(), weights.as_ref()],
54-
wasi_nn::GRAPH_ENCODING_OPENVINO,
55-
wasi_nn::EXECUTION_TARGET_CPU,
56-
)
57-
.unwrap()
58-
};
59-
println!("Loaded graph into wasi-nn with ID: {}", graph);
54+
let graph = nn::GraphBuilder::new(nn::GraphEncoding::Openvino, nn::ExecutionTarget::CPU)
55+
.build_from_bytes(&[xml_bytes.as_ref(), weights.as_ref()])
56+
.unwrap();
57+
println!("Loaded graph into wasi-nn with ID: {:?}", graph);
6058

61-
let context = unsafe { wasi_nn::init_execution_context(graph).unwrap() };
62-
println!("Created wasi-nn execution context with ID: {}", context);
59+
let mut context = graph.init_execution_context().unwrap();
60+
println!("Created wasi-nn execution context with ID: {:?}", context);
6361

64-
unsafe {
65-
wasi_nn::set_input(context, 0, in_tensor).unwrap();
66-
}
62+
context
63+
.set_input(0, in_tensor_type, in_tensor_dimensions, in_tensor_data)
64+
.unwrap();
6765
// Execute the inference.
68-
unsafe {
69-
wasi_nn::compute(context).unwrap();
70-
}
66+
context.compute().unwrap();
7167
println!("Executed graph inference");
7268

7369
// Retrieve the output.
7470
let mut output_buffer = vec![0f32; 1 * 4 * 512 * 896];
75-
let bytes_written = unsafe {
76-
wasi_nn::get_output(
77-
context,
78-
0,
79-
&mut output_buffer[..] as *mut [f32] as *mut u8,
80-
(output_buffer.len() * 4).try_into().unwrap(),
81-
)
82-
.unwrap()
83-
};
71+
let bytes_written = context.get_output(0, &mut output_buffer).unwrap();
8472

8573
println!("bytes_written: {:?}", bytes_written);
8674

‎pytorch-mobilenet-image/README.md‎

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ This crate depends on the `wasi-nn` in the `Cargo.toml`:
1010

1111
```toml
1212
[dependencies]
13-
wasi-nn = "0.3.0"
13+
wasi-nn = "0.4.0"
1414
```
1515

1616
## Build
@@ -67,7 +67,6 @@ wasmedge --dir .:. wasmedge-wasinn-example-mobilenet-image.wasm mobilenet.pt inp
6767
You will get the output:
6868

6969
```console
70-
Read torchscript binaries, size in bytes: 14376924
7170
Loaded graph into wasi-nn with ID: 0
7271
Created wasi-nn execution context with ID: 0
7372
Read input tensor, size in bytes: 602112

‎pytorch-mobilenet-image/rust/Cargo.toml‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,6 @@ publish = false
88

99
[dependencies]
1010
image = { version = "0.23.14", default-features = false, features = ["gif", "jpeg", "ico", "png", "pnm", "tga", "tiff", "webp", "bmp", "hdr", "dxt", "dds", "farbfeld"] }
11-
wasi-nn = { version = "0.3.0" }
11+
wasi-nn = { version = "0.4.0" }
1212

1313
[workspace]

0 commit comments

Comments
 (0)