4from circle.Model
import Model
5from circle.TensorType
import TensorType
12 """Yield successive n-sized chunks from the list"""
13 for i
in range(0, len(lst), n):
18 if circle_type == TensorType.UINT8:
20 if circle_type == TensorType.FLOAT32:
22 if circle_type == TensorType.INT16:
39parser = argparse.ArgumentParser()
40parser.add_argument(
'--output_dir',
43 help=
'Output directory where the inputs are generated')
44parser.add_argument(
'--artifact_dir',
47 help=
'Artifact directory where test files exist')
48parser.add_argument(
'--input_dir',
51 help=
'Input directory where input text files exist')
58 'All the param list to test. e.g. ${RECIPE_NAME_0} ${GRANULARITY_0} ${DTYPE_0} ${RECIPE_NAME_1} ${GRANULARITY_1} ${DTYPE_1}..'
60parser.add_argument(
'--config', action=
'store_true', help=
'Generate inputs with config')
61parser.add_argument(
'--mode', type=str, default=
'default', help=
'Mode to test')
62args = parser.parse_args()
64output_dir = args.output_dir
65artifact_dir = args.artifact_dir
66input_dir = args.input_dir
67test_param = args.test_param
71modes_to_input_h5_suffix = {
72 'default':
'input.h5',
73 'mixed_quantization':
'mixed.input.h5',
76test_params = test_param[0].split()
78assert (len(test_params) % PARAM_SET_SIZE) == 0
79test_params = list(
chunks(test_params, PARAM_SET_SIZE))
80for idx
in range(len(test_params)):
81 model_name = test_params[idx][0]
82 granularity = test_params[idx][1]
83 dtype = test_params[idx][2]
85 model = os.path.join(artifact_dir, model_name +
'.circle')
86 with open(model,
'rb')
as f:
88 circle_model = Model.GetRootAsModel(buf, 0)
91 assert (circle_model.SubgraphsLength() == 1)
92 graph = circle_model.Subgraphs(0)
93 inputs = graph.InputsAsNumpy()
95 testcase = f
'{model_name}.{granularity}.{dtype}'
96 output = os.path.join(output_dir, f
'{testcase}.{modes_to_input_h5_suffix[mode]}')
99 h5_file = h5.File(output,
'w')
100 group = h5_file.create_group(
"value")
101 group.attrs[
'desc'] =
"Input data for " + model
104 input_text_dir = os.path.join(input_dir,
105 f
'{model_name}_config/{granularity}/{dtype}')
107 input_text_dir = os.path.join(input_dir, f
'{model_name}/{granularity}/{dtype}')
109 records = sorted(glob.glob(input_text_dir +
"/*.txt"))
110 for i, record
in enumerate(records):
111 sample = group.create_group(
str(i))
112 sample.attrs[
'desc'] =
"Input data " +
str(i)
113 with open(record,
'r')
as f:
114 lines = f.readlines()
115 for j, line
in enumerate(lines):
116 data = np.array(line.split(
','))
117 input_index = inputs[j]
118 tensor = graph.Tensors(input_index)
120 input_data = np.array(data.reshape(tensor.ShapeAsNumpy()), np_type)
121 sample.create_dataset(
str(j), data=input_data)