41 float alpha,
float beta,
44 assert(input !=
nullptr);
45 assert(back_prop_input !=
nullptr);
46 assert(back_prop_output !=
nullptr);
48 _back_prop_input = back_prop_input;
49 _back_prop_output = back_prop_output;
56 if (input->data_type() == OperandType::FLOAT32)
58 if ((alpha == std::numeric_limits<float>::infinity() || alpha == 6.0f) && beta == 0.f)
60 auto relu_cker = [&alpha]() {
61 if (alpha == std::numeric_limits<float>::infinity())
63 else if (alpha == 6.0f)
66 throw std::runtime_error{
"no supported relu kernel"};
73 getBuffer<float>(incoming),
getShape(outgoing), getBuffer<float>(outgoing));
78 throw std::runtime_error(
79 "train ElementwiseActivationLayer : Unsupported ReLU activation type");
84 throw std::runtime_error(
"train ElementwiseActivationLayer: Unsupported datatype");
88 throw std::runtime_error(
"train ElementwiseActivationLayer: Unsupported activation type yet");
void ReLUGrad(const Shape &output_shape, const float *output_data, const Shape &incoming_shape, const float *incoming_data, const Shape &grad_shape, float *grad_data)
void ReLU6Grad(const Shape &output_shape, const float *output_data, const Shape &incoming_shape, const float *incoming_data, const Shape &grad_shape, float *grad_data)