35 float alpha,
float beta,
38 assert(input !=
nullptr);
39 assert(back_prop_input !=
nullptr);
40 assert(back_prop_output !=
nullptr);
42 _back_prop_input = back_prop_input;
43 _back_prop_output = back_prop_output;
50 if (input->data_type() == OperandType::FLOAT32)
52 if ((alpha == std::numeric_limits<float>::infinity() || alpha == 6.0f) && beta == 0.f)
54 auto relu_cker = [&alpha]() {
55 if (alpha == std::numeric_limits<float>::infinity())
57 else if (alpha == 6.0f)
60 throw std::runtime_error{
"no supported relu kernel"};
67 getBuffer<float>(incoming),
getShape(outgoing), getBuffer<float>(outgoing));
72 throw std::runtime_error(
73 "train ElementwiseActivationLayer : Unsupported ReLU activation type");
78 throw std::runtime_error(
"train ElementwiseActivationLayer: Unsupported datatype");
82 throw std::runtime_error(
"train ElementwiseActivationLayer: Unsupported activation type yet");
void ReLUGrad(const Shape &output_shape, const float *output_data, const Shape &incoming_shape, const float *incoming_data, const Shape &grad_shape, float *grad_data)
void ReLU6Grad(const Shape &output_shape, const float *output_data, const Shape &incoming_shape, const float *incoming_data, const Shape &grad_shape, float *grad_data)