ONE - On-device Neural Engine
Loading...
Searching...
No Matches
ConvBackend.h
Go to the documentation of this file.
1/*
2 * Copyright (c) 2018 Samsung Electronics Co., Ltd. All Rights Reserved
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef __CONV_BACKEND_H__
18#define __CONV_BACKEND_H__
19
20#include <nnsuite/conv/Model.h>
22
23#include <vector>
24
26{
27public:
28 explicit ConvBackend(const nnsuite::conv::Model &model);
29
30public:
31 ::tflite::Interpreter &interpreter(void) override { return _interp; }
32
33private:
34 // NOTE tflite interpreter just stores the pointer of its name
35 const std::string _ifm_name;
36 const std::string _ofm_name;
37
38 // NOTE kernel data should live longer than tflite interpreter itself
39 std::vector<float> _kernel;
40
41 // NOTE bias is mssing in conv sample model, but conv op kernel in
42 // tensorflow lite interpreter does not work without bias.
43 //
44 // Let's feed zero-bias as a workaround
45 std::vector<float> _bias;
46
47private:
48 ::tflite::Interpreter _interp;
49};
50
51#endif // __BACKEND_H__
::tflite::Interpreter & interpreter(void) override
Definition ConvBackend.h:31