39 _activation{ir::Activation::
NONE}, _act_back_prop_output{nullptr}
50 _back_prop_lhs = back_prop_lhs;
51 _back_prop_rhs = back_prop_rhs;
52 _back_prop_output = back_prop_output;
53 _arithmetic_type = arithmetic_type;
54 _activation = activation;
59 _act_back_prop_output->setBuffer(std::make_shared<basic::Allocator>(
_output->
total_size()));
68 if (_back_prop_output->
data_type() != OperandType::FLOAT32)
69 throw std::runtime_error{
"Unsupported Data Type"};
77 catch (
const std::exception &e)
79 throw std::runtime_error{
"train BinaryArithmeticLayer: " + std::string(e.what())};
81 assert(backprop_act !=
nullptr);
85 getShape(backprop_act), getBuffer<float>(backprop_act),
getShape(_back_prop_lhs),
86 getBuffer<float>(_back_prop_lhs),
getShape(_back_prop_rhs), getBuffer<float>(_back_prop_rhs),
A tensor class that is portable for other backends.
size_t total_size() const override final
const ir::OperandInfo & get_info() const
ir::DataType data_type() const override final
const IPortableTensor * _rhs
const IPortableTensor * _lhs
IPortableTensor * _output
void configureBackward(IPortableTensor *back_prop_lhs, IPortableTensor *back_prop_rhs, const IPortableTensor *back_prop_output, const ir::Activation activation, const ArithmeticType arithmetic_type)
void forward(bool training) override
void BinaryArithmeticGrad(const Shape &lhs_shape, const T *lhs_data, const Shape &rhs_shape, const T *rhs_data, const Shape &incoming_shape, const T *incoming_data, const Shape &lhs_grad_shape, T *lhs_grad_data, const Shape &rhs_grad_shape, T *rhs_grad_data, ArithmeticType arithmetic_type)
const IPortableTensor * backpropActivation(const ir::Activation &activation, const IPortableTensor *output, const IPortableTensor *input_backprop, IPortableTensor *output_backprop)
backpropagate acitvation
nnfw::cker::Shape getShape(const IPortableTensor *tensor)
Get shape of tensor.