ONE - On-device Neural Engine
Loading...
Searching...
No Matches
ElementwiseActivation.cc
Go to the documentation of this file.
1/*
2 * Copyright (c) 2020 Samsung Electronics Co., Ltd. All Rights Reserved
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
18#include "ir/OperationVisitor.h"
19
20#include <unordered_map>
21
22namespace onert
23{
24namespace ir
25{
26namespace operation
27{
28
29void ElementwiseActivation::accept(OperationVisitor &v) const { v.visit(*this); }
30
32 const OperandIndexSequence &outputs,
33 const Param &param)
34 : Operation{OperandConstraint::createExact(1u), inputs, outputs}, _param{param}
35{
37 {
38 assert(param.alpha == 0.0f && param.beta == 0.0f &&
39 "Logistic will be supported only as "
40 "sigmoid function(L=1, k=1, x0=0). So, do "
41 "not use alpha and beta");
42 }
43 else if (param.op_type == Type::RELU)
44 {
45 assert(param.alpha >= param.beta && "ReLU's alpha must be equal or greater than beta");
46 }
47 else if (param.op_type == Type::TANH)
48 {
49 assert(param.alpha == 1.0f && param.beta == 1.0f &&
50 "f(x) = alpha * tanh(beta * x), Tanh is "
51 "supported only the values of alpha and "
52 "beta are 1.f");
53 }
54}
55
56std::string ElementwiseActivation::name() const
57{
58 using ElementwiseActivationType = onert::ir::operation::ElementwiseActivation::Type;
59 static const std::unordered_map<Type, std::string> name_map{
60 {ElementwiseActivationType::ELU, "ELU"},
61 {ElementwiseActivationType::LOGISTIC, "Logistic"},
62 {ElementwiseActivationType::RELU, "ReLU"},
63 {ElementwiseActivationType::TANH, "Tanh"},
64 {ElementwiseActivationType::LEAKY_RELU, "LeakyRelu"}};
65 return name_map.at(_param.op_type);
66}
67
68float ElementwiseActivation::infinity = std::numeric_limits<float>::infinity();
69
70} // namespace operation
71} // namespace ir
72} // namespace onert
void accept(OperationVisitor &v) const override
ElementwiseActivation(const OperandIndexSequence &inputs, const OperandIndexSequence &outputs, const Param &param)