ONE - On-device Neural Engine
Loading...
Searching...
No Matches
nnkit::support::moco::tf::Backend Class Referencefinal

#include <Backend.h>

Collaboration diagram for nnkit::support::moco::tf::Backend:

Public Member Functions

 Backend (const char *pb_path, const char *info_path)
 
void setInputOutputFromGraph (const std::unique_ptr< loco::Graph > &loco_graph, ParsedTensors &parsed_tensors)
 
void prepare (const std::function< void(nnkit::TensorContext &)> &f) override
 
void run (void) override
 
void teardown (const std::function< void(nnkit::TensorContext &)> &f)
 
- Public Member Functions inherited from nnkit::Backend
virtual ~Backend ()=default
 
virtual void prepare (const std::function< void(TensorContext &)> &f)=0
 
virtual void teardown (const std::function< void(TensorContext &)> &f)=0
 

Detailed Description

Definition at line 39 of file Backend.h.

Constructor & Destructor Documentation

◆ Backend()

nnkit::support::moco::tf::Backend::Backend ( const char *  pb_path,
const char *  info_path 
)

Definition at line 87 of file Backend.cpp.

88{
89 // read test.info
91
92 auto parsed_tensors = nnkit::support::tftestinfo::parse(info_path);
93
94 for (auto &parsed_tensor : parsed_tensors)
95 {
96 if (parsed_tensor->kind() == ParsedTensor::Kind::Input)
97 {
98 sig.add_input(::moco::TensorName(parsed_tensor->name()));
99 }
100 else
101 {
102 sig.add_output(::moco::TensorName(parsed_tensor->name()));
103 }
104 if (parsed_tensor->hasShape())
105 sig.shape(parsed_tensor->name(), parsed_tensor->shape());
106 }
107
108 // get loco::Graph
110
111 // After converting, all shapes will be determined.
112 auto loco_graph = moco.load(sig, pb_path, ::moco::tf::Frontend::FileType::Binary);
113
114 // Set input and output from loco graph.
115 setInputOutputFromGraph(loco_graph, parsed_tensors);
116
117 // set member vars
118 _loco_graph = std::move(loco_graph);
119 _sess = std::make_unique<locomotiv::Session>(_loco_graph.get());
120}
void setInputOutputFromGraph(const std::unique_ptr< loco::Graph > &loco_graph, ParsedTensors &parsed_tensors)
Definition Backend.cpp:45
Definition Log.h:23
std::vector< std::unique_ptr< ParsedTensor > > parse(const char *info_path)
Function to parse test.info.
Class to store information to run a model. Normally this info comes from users via CLI params or conf...
void add_input(const TensorName &input)
void add_output(const TensorName &output)
void shape(const std::string &node_name, const angkor::TensorShape &shape)
Adds node name and its shape provided from user.

References moco::ModelSignature::add_input(), moco::ModelSignature::add_output(), moco::tf::Frontend::Binary, nnkit::support::tftestinfo::ParsedTensor::Input, nnkit::support::tftestinfo::parse(), setInputOutputFromGraph(), and moco::ModelSignature::shape().

Member Function Documentation

◆ prepare()

void nnkit::support::moco::tf::Backend::prepare ( const std::function< void(nnkit::TensorContext &)> &  f)
override

Definition at line 122 of file Backend.cpp.

123{
127
128 // allocate memory for inputs of loco interpreter
129 std::vector<std::unique_ptr<Buffer<float>>> buf_list; // TODO Support more types other than float
130
131 for (int n = 0; n < _inputs.size(); n++)
132 {
133 auto buf = make_buffer<float, LexicalLayout>(_inputs.at(n)->shape());
134 buf_list.emplace_back(std::make_unique<nncc::core::ADT::tensor::Buffer<float>>(buf));
135 }
136
137 // fill test input values
138 InputTensorContext ctx(_inputs, buf_list);
139 f(ctx);
140
141 // set input of locomotiv
142 for (int n = 0; n < buf_list.size(); n++)
143 {
144 auto buf = buf_list.at(n).get();
145 auto node_data = locomotiv::make_data(*buf);
146 _sess->set_input(n, std::move(node_data));
147 }
148}
std::unique_ptr< NodeData > make_data(const NodeData::Buffer< DT > &buffer)
Copy buffer to make NodeData.
Buffer< T > make_buffer(const Shape &shape)
Definition Buffer.h:47

References nncc::core::ADT::tensor::make_buffer(), and locomotiv::make_data().

◆ run()

void nnkit::support::moco::tf::Backend::run ( void  )
overridevirtual

Implements nnkit::Backend.

Definition at line 150 of file Backend.cpp.

150{ _sess->infer(); }

Referenced by package.infer.session::inference().

◆ setInputOutputFromGraph()

void nnkit::support::moco::tf::Backend::setInputOutputFromGraph ( const std::unique_ptr< loco::Graph > &  loco_graph,
ParsedTensors &  parsed_tensors 
)

Definition at line 45 of file Backend.cpp.

47{
48 auto inputs = loco_graph.get()->inputs();
49 auto outputs = loco_graph.get()->outputs();
50 uint32_t input_idx = 0;
51 uint32_t output_idx = 0;
52 for (auto &parsed_tensor : parsed_tensors)
53 {
54 if (parsed_tensor->kind() == ParsedTensor::Kind::Input)
55 {
56 if (!parsed_tensor->hasShape())
57 {
58 auto input_shape = inputs->at(input_idx++)->shape();
59
60 uint32_t size = input_shape->rank();
61 parsed_tensor->mutable_shape().resize(size);
62 for (uint32_t d = 0; d < size; d++)
63 {
64 parsed_tensor->mutable_shape().dim(d) = input_shape->dim(d).value();
65 }
66 }
67 _inputs.emplace_back(std::move(parsed_tensor));
68 }
69 else // Output
70 {
71 if (!parsed_tensor->hasShape())
72 {
73 auto output_shape = outputs->at(output_idx++)->shape();
74
75 uint32_t size = output_shape->rank();
76 parsed_tensor->mutable_shape().resize(size);
77 for (uint32_t d = 0; d < size; d++)
78 {
79 parsed_tensor->mutable_shape().dim(d) = output_shape->dim(d).value();
80 }
81 }
82 _outputs.emplace_back(std::move(parsed_tensor));
83 }
84 }
85}
void resize(int dimensions_count)
Definition Tensor.h:121
const luci_interpreter::RuntimeShape output_shape
int32_t size[5]
Definition Slice.cpp:35

References nnkit::support::tftestinfo::ParsedTensor::Input, output_shape, luci_interpreter::RuntimeShape::resize(), and size.

Referenced by Backend().

◆ teardown()

void nnkit::support::moco::tf::Backend::teardown ( const std::function< void(nnkit::TensorContext &)> &  f)

Definition at line 152 of file Backend.cpp.

153{
154 // get output
155 OutputTensorContext ctx(_outputs, _sess.get());
156 f(ctx);
157}

The documentation for this class was generated from the following files: