|
ONE - On-device Neural Engine
|
Go to the source code of this file.
Namespaces | |
| namespace | infer |
Functions | |
| infer.extract_test_args (s) | |
| Managing paths for the artifacts required by the test. | |
| infer.check_for_errors () | |
| infer.error_checked (func) | |
Variables | |
| infer.parser = argparse.ArgumentParser() | |
| infer.type | |
| infer.str | |
| infer.required | |
| infer.args = parser.parse_args() | |
| list | infer.contents = [line.rstrip() for line in f] |
| list | infer.eval_lines = [line for line in contents if line.startswith('eval(')] |
| list | infer.test_args = [extract_test_args(line) for line in eval_lines] |
| list | infer.test_models = [Path(args.artifact_dir) / f'{arg}.circle' for arg in test_args] |
| list | infer.input_data |
| list | infer.expected_output_data |
| infer.ffi = FFI() | |
| infer.C = ffi.dlopen(args.lib_path) | |
| infer.Interpreter_new = error_checked(C.Interpreter_new) | |
| infer.Interpreter_delete = error_checked(C.Interpreter_delete) | |
| infer.Interpreter_interpret = error_checked(C.Interpreter_interpret) | |
| infer.Interpreter_writeInputTensor = error_checked(C.Interpreter_writeInputTensor) | |
| infer.Interpreter_readOutputTensor = error_checked(C.Interpreter_readOutputTensor) | |
| infer.model_data = ffi.from_buffer(bytearray(f.read())) | |
| infer.intp = Interpreter_new(model_data, len(model_data)) | |
| infer.h5 = h5py.File(input_data[idx]) | |
| infer.input_values = h5.get('value') | |
| infer.input_num = len(input_values) | |
| infer.arr = np.array(input_values.get(str(input_idx))) | |
| infer.c_arr = ffi.from_buffer(arr) | |
| infer.output_values = h5.get('value') | |
| infer.output_num = len(output_values) | |
| infer.result = np.empty(arr.shape, dtype=arr.dtype) | |