ONE - On-device Neural Engine
Loading...
Searching...
No Matches
nnc::InterpreterBackend Class Referencefinal

#include <InterpreterBackend.h>

Public Member Functions

 InterpreterBackend (std::string input_dir, std::string output_dir)
 
void run (mir::Graph *data)
 

Detailed Description

Definition at line 27 of file InterpreterBackend.h.

Constructor & Destructor Documentation

◆ InterpreterBackend()

nnc::InterpreterBackend::InterpreterBackend ( std::string  input_dir,
std::string  output_dir 
)

Definition at line 132 of file InterpreterBackend.cpp.

133 : _input_dir(std::move(input_dir)), _output_dir(std::move(output_dir))
134{
135}

Member Function Documentation

◆ run()

void nnc::InterpreterBackend::run ( mir::Graph data)

Definition at line 137 of file InterpreterBackend.cpp.

138{
139 assert(graph);
140
142
143 for (const auto *input_op : graph->getInputs())
144 {
145 const Operation::Output *input = input_op->getOutput(0);
146
147 std::string tensor_name = input->getName();
148 assert(!tensor_name.empty());
149 std::replace(tensor_name.begin(), tensor_name.end(), '/', '_');
150 std::string filename = _input_dir + "/" + tensor_name + ".dat";
151
152 TensorVariant tensor = readTensorFromFile(filename, input->getType());
153 interpreter.setTensor(input, std::move(tensor));
154 }
155
156 graph->accept(&interpreter);
157
158 for (const auto *output_op : graph->getOutputs())
159 {
160 const auto &output_name = output_op->getInput(0)->getName();
161
162#ifdef NNC_HDF5_SUPPORTED
163 const auto &tensor = interpreter.getTensor(output_op->getInput(0));
164 writeTensorToHDF5File(tensor, output_name, _output_dir);
165#else
166 std::cout << "Result <" << output_name << "> wasn't saved, due to lack of HDF5" << std::endl;
167#endif // NNC_HDF5_SUPPORTED
168 }
169}
const char * tensor_name(const circle::Tensor *tensor)

Referenced by package.infer.session::inference().


The documentation for this class was generated from the following files: