ONE - On-device Neural Engine
Loading...
Searching...
No Matches
OperationUtils.h
Go to the documentation of this file.
1
/*
2
* Copyright (c) 2020 Samsung Electronics Co., Ltd. All Rights Reserved
3
*
4
* Licensed under the Apache License, Version 2.0 (the "License");
5
* you may not use this file except in compliance with the License.
6
* You may obtain a copy of the License at
7
*
8
* http://www.apache.org/licenses/LICENSE-2.0
9
*
10
* Unless required by applicable law or agreed to in writing, software
11
* distributed under the License is distributed on an "AS IS" BASIS,
12
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
* See the License for the specific language governing permissions and
14
* limitations under the License.
15
*/
16
17
#ifndef __ONERT_BACKEND_RUY_OPS_OPERATION_UTILS_H__
18
#define __ONERT_BACKEND_RUY_OPS_OPERATION_UTILS_H__
19
20
#include <
backend/IPortableTensor.h
>
21
#include <ir/DataType.h>
22
#include <
ir/Padding.h
>
23
#include <
util/CalculateActivationRange.h
>
24
25
#include <
ruy/Shape.h
>
26
#include <
ruy/Types.h
>
27
28
#include <limits>
29
30
using
OperandType
=
onert::ir::DataType
;
31
using namespace
onert::util
;
32
33
namespace
onert
34
{
35
namespace
backend
36
{
37
namespace
ruy
38
{
39
namespace
ops
40
{
41
42
inline
nnfw::ruy::Shape
getTensorShape
(
const
IPortableTensor
*tensor)
43
{
44
if
(tensor ==
nullptr
)
45
return
nnfw::ruy::Shape
();
46
47
const
ir::Shape
&shape = tensor->get_info().shape();
48
auto
rank = shape.rank();
49
nnfw::ruy::Shape
ret(rank);
50
auto
data = ret.
DimsData
();
51
for
(
int
i = 0; i < rank; ++i)
52
{
53
data[i] = shape.dim(i);
54
}
55
return
ret;
56
}
57
58
inline
nnfw::ruy::FusedActivationFunctionType
convertActivationType
(
const
ir::Activation
activation)
59
{
60
switch
(activation)
61
{
62
case
ir::Activation::NONE
:
63
return
nnfw::ruy::FusedActivationFunctionType::kNone
;
64
case
ir::Activation::RELU
:
65
return
nnfw::ruy::FusedActivationFunctionType::kRelu
;
66
case
ir::Activation::RELU1
:
67
return
nnfw::ruy::FusedActivationFunctionType::kRelu1
;
68
case
ir::Activation::RELU6
:
69
return
nnfw::ruy::FusedActivationFunctionType::kRelu6
;
70
case
ir::Activation::TANH
:
71
return
nnfw::ruy::FusedActivationFunctionType::kTanh
;
72
case
ir::Activation::SIGMOID
:
73
return
nnfw::ruy::FusedActivationFunctionType::kSigmoid
;
74
default
:
75
throw
std::runtime_error{
"RUY backend: Cannot convert activation type"
};
76
}
77
}
78
79
nnfw::ruy::PaddingType
getPaddingType(
ir::PaddingType
ir_padding_type);
80
81
}
// namespace ops
82
}
// namespace ruy
83
}
// namespace backend
84
}
// namespace onert
85
86
#endif
// __ONERT_BACKEND_RUY_OPS_OPERATION_UTILS_H__
CalculateActivationRange.h
IPortableTensor.h
OperandType
OperandType
Definition
OperandType.h:24
nnfw::ruy::Shape
Definition
Shape.h:32
nnfw::ruy::Shape::DimsData
int32_t * DimsData()
Definition
Shape.h:112
onert::backend::IPortableTensor
A tensor class that is portable for other backends.
Definition
IPortableTensor.h:39
onert::ir::operation::Shape
Definition
Shape.h:32
Shape.h
Types.h
mir::ops
Definition
AbsOp.h:25
nnfw::ruy::PaddingType
PaddingType
Definition
Types.h:44
nnfw::ruy::FusedActivationFunctionType
FusedActivationFunctionType
Definition
Types.h:34
nnfw::ruy::FusedActivationFunctionType::kRelu
@ kRelu
nnfw::ruy::FusedActivationFunctionType::kNone
@ kNone
nnfw::ruy::FusedActivationFunctionType::kTanh
@ kTanh
nnfw::ruy::FusedActivationFunctionType::kRelu6
@ kRelu6
nnfw::ruy::FusedActivationFunctionType::kSigmoid
@ kSigmoid
nnfw::ruy::FusedActivationFunctionType::kRelu1
@ kRelu1
onert::backend::ruy::ops::getTensorShape
nnfw::ruy::Shape getTensorShape(const IPortableTensor *tensor)
Definition
OperationUtils.h:42
onert::backend::ruy::ops::convertActivationType
nnfw::ruy::FusedActivationFunctionType convertActivationType(const ir::Activation activation)
Definition
OperationUtils.h:58
onert::ir::DataType
DataType
Definition
DataType.h:28
onert::ir::Activation
Activation
Definition
InternalType.h:28
onert::ir::Activation::TANH
@ TANH
onert::ir::Activation::RELU1
@ RELU1
onert::ir::Activation::SIGMOID
@ SIGMOID
onert::ir::Activation::NONE
@ NONE
onert::ir::Activation::RELU6
@ RELU6
onert::ir::Activation::RELU
@ RELU
onert::ir::PaddingType
PaddingType
Definition
Padding.h:32
onert::util
Definition
CalculateActivationRange.h:27
onert
Definition
CustomKernel.cc:20
Padding.h
runtime
onert
backend
ruy
ops
OperationUtils.h
Generated by
1.9.8