ONE - On-device Neural Engine
Loading...
Searching...
No Matches
dynamic_shape_inference.py
Go to the documentation of this file.
1#!/usr/bin/env python3
2
3import numpy as np
4import random
5import sys
6from onert import infer
7
8
9def main(nnpackage_path, backends="cpu"):
10 # Create session and load nnpackage
11 session = infer.session(nnpackage_path, backends)
12
13 # Prepare input. Here we just allocate dummy input arrays.
14 input_infos = session.get_inputs_tensorinfo()
15
16 # Call infer() 10 times
17 for i in range(10):
18 dummy_inputs = []
19 for info in input_infos:
20 # Retrieve the dimensions list from tensorinfo property.
21 dims = list(info.dims)
22 # Replace -1 with a random value between 1 and 10
23 dims = [random.randint(1, 10) if d == -1 else d for d in dims]
24 # Build the shape tuple from tensorinfo dimensions.
25 shape = tuple(dims[:info.rank])
26 # Create a dummy numpy array filled with uniform random values in [0,1).
27 dummy_inputs.append(
28 np.random.uniform(low=0.0, high=1.0, size=shape).astype(info.dtype))
29
30 outputs = session.infer(dummy_inputs)
31 print(f"Inference run {i+1}/10 completed.")
32
33 print(f"nnpackage {nnpackage_path.split('/')[-1]} runs successfully.")
34 return
35
36
37if __name__ == "__main__":
38 argv = sys.argv[1:]
39 main(*argv)
int main(void)