ONE - On-device Neural Engine
All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
onert::backend::train::ops::ElementwiseActivationLayer Class Reference

#include <ElementwiseActivationLayer.h>

Collaboration diagram for onert::backend::train::ops::ElementwiseActivationLayer:

Public Member Functions

 ElementwiseActivationLayer ()
 
void configureBackward (const IPortableTensor *input, IPortableTensor *back_prop_input, const IPortableTensor *back_prop_output, float alpha, float beta, ElementwiseActivationType op_type)
 
void forward (bool training) override
 
void backward () override
 
- Public Member Functions inherited from onert::exec::train::ITrainableFunction
virtual ~ITrainableFunction ()=default
 
virtual std::optional< backend::train::LayerScopeTensorsregisterLayerScopeTensors ()
 
- Public Member Functions inherited from onert::backend::cpu::ops::ElementwiseActivationLayer
 ElementwiseActivationLayer ()
 
void configure (const IPortableTensor *input, IPortableTensor *output, float alpha, float beta, bool approximate, const ElementwiseActivationType op_type)
 
void run () override
 
void PopulateLookupTable (const ElementwiseActivationType op_type)
 
void EvalUsingLookupTable (const IPortableTensor *input, IPortableTensor *output)
 
- Public Member Functions inherited from onert::exec::IFunction
virtual ~IFunction ()=default
 
virtual void prepare ()
 

Additional Inherited Members

- Protected Attributes inherited from onert::backend::cpu::ops::ElementwiseActivationLayer
const IPortableTensor_input
 
IPortableTensor_output
 
uint8_t _table [256]
 
std::function< void(const IPortableTensor *input, IPortableTensor *output)> _kernel
 

Detailed Description

Definition at line 33 of file ElementwiseActivationLayer.h.

Constructor & Destructor Documentation

◆ ElementwiseActivationLayer()

onert::backend::train::ops::ElementwiseActivationLayer::ElementwiseActivationLayer ( )

Definition at line 27 of file ElementwiseActivationLayer.cc.

27 : cpu::ops::ElementwiseActivationLayer()
28{
29 // DO NOTHING
30}

Member Function Documentation

◆ backward()

void onert::backend::train::ops::ElementwiseActivationLayer::backward ( )
overridevirtual

Implements onert::exec::train::ITrainableFunction.

Definition at line 88 of file ElementwiseActivationLayer.cc.

89{
90 _backward_kernel(_output, _back_prop_output, _back_prop_input);
91}

References onert::backend::cpu::ops::ElementwiseActivationLayer::_output.

◆ configureBackward()

void onert::backend::train::ops::ElementwiseActivationLayer::configureBackward ( const IPortableTensor input,
IPortableTensor back_prop_input,
const IPortableTensor back_prop_output,
float  alpha,
float  beta,
ElementwiseActivationType  op_type 
)

Definition at line 32 of file ElementwiseActivationLayer.cc.

37{
38 assert(input != nullptr);
39 assert(back_prop_input != nullptr);
40 assert(back_prop_output != nullptr);
41
42 _back_prop_input = back_prop_input;
43 _back_prop_output = back_prop_output;
44
45 _op_type = op_type;
46
47 switch (op_type)
48 {
50 if (input->data_type() == OperandType::FLOAT32)
51 {
52 if ((alpha == std::numeric_limits<float>::infinity() || alpha == 6.0f) && beta == 0.f)
53 {
54 auto relu_cker = [&alpha]() {
55 if (alpha == std::numeric_limits<float>::infinity())
57 else if (alpha == 6.0f)
59 else
60 throw std::runtime_error{"no supported relu kernel"};
61 }();
62
63 _backward_kernel = [relu_cker](const IPortableTensor *output,
64 const IPortableTensor *incoming,
65 IPortableTensor *outgoing) {
66 relu_cker(getShape(output), getBuffer<float>(output), getShape(incoming),
67 getBuffer<float>(incoming), getShape(outgoing), getBuffer<float>(outgoing));
68 };
69 }
70 else
71 {
72 throw std::runtime_error(
73 "train ElementwiseActivationLayer : Unsupported ReLU activation type");
74 }
75 }
76 else
77 {
78 throw std::runtime_error("train ElementwiseActivationLayer: Unsupported datatype");
79 }
80 break;
81 default:
82 throw std::runtime_error("train ElementwiseActivationLayer: Unsupported activation type yet");
83 }
84}
void ReLUGrad(const Shape &output_shape, const float *output_data, const Shape &incoming_shape, const float *incoming_data, const Shape &grad_shape, float *grad_data)
Definition ReLU.h:32
void ReLU6Grad(const Shape &output_shape, const float *output_data, const Shape &incoming_shape, const float *incoming_data, const Shape &grad_shape, float *grad_data)
Definition ReLU6.h:31
nnfw::cker::Shape getShape(const IPortableTensor *tensor)
Get shape of tensor.

References onert::backend::train::ops::getShape(), onert::backend::train::ops::kReLU, nnfw::cker::train::ReLU6Grad(), and nnfw::cker::train::ReLUGrad().

◆ forward()

void onert::backend::train::ops::ElementwiseActivationLayer::forward ( bool  training)
overridevirtual

The documentation for this class was generated from the following files: