34 #ifndef ACTIVATION_HPP
35 #define ACTIVATION_HPP
68 double relu(
double z);
83 std::vector<double>
softmax(
const std::vector<double> &inputs);
90 std::vector<std::vector<double>>
105 double tanh(
double z);
121 double smht(
double z);
136 double gelu(
double z);
159 double elu(
double z,
double alpha = 10);
178 double selu(
double z,
double alpha = 167326,
double scale = 10507);
189 selu_derivative(
double z,
double alpha = 167326,
double scale = 10507);
198 double leaky_relu(
double z,
double alpha = 001);
216 double prelu(
double z,
double alpha);
233 double silu(
double z);
std::vector< std::vector< double > > softmax_derivative(const std::vector< double > &inputs)
Computes the derivative of the softmax activation function.
double gelu(double z)
Computes the Gaussian Error Linear Unit (GELU) activation function.
double gelu_derivative(double z)
Computes the derivative of the Gaussian Error Linear Unit (GELU) activation function.
double binary_step(double z)
Computes the binary step activation function.
double sigmoid_derivative(double z)
Computes the derivative of the sigmoid activation function.
double silu_derivative(double z)
Computes the derivative of the Sigmoid Linear Unit (SiLU or Swish) activation function.
double smht_derivative(double z)
Computes the derivative of the Soboleva modified hyperbolic tangent (smht) activation function.
double relu_derivative(double z)
Computes the derivative of the ReLU (Rectified Linear Unit) activation function.
double tanh_derivative(double z)
Computes the derivative of the hyperbolic tangent (tanh) activation function.
double silu(double z)
Computes the Sigmoid Linear Unit (SiLU or Swish) activation function.
double softplus(double z)
Computes the softplus activation function.
double sigmoid(double z)
Computes the sigmoid activation function.
double leaky_relu(double z, double alpha=001)
Computes the Leaky Rectified Linear Unit (Leaky ReLU) activation function.
double leaky_relu_derivative(double z, double alpha=001)
Computes the derivative of the Leaky Rectified Linear Unit (Leaky ReLU) activation function.
double selu(double z, double alpha=167326, double scale=10507)
Computes the Scaled Exponential Linear Unit (SELU) activation function.
double prelu(double z, double alpha)
Computes the Parametric Rectified Linear Unit (PReLU) activation function.
double elu(double z, double alpha=10)
Computes the Exponential Linear Unit (ELU) activation function.
double selu_derivative(double z, double alpha=167326, double scale=10507)
Computes the derivative of the Scaled Exponential Linear Unit (SELU) activation function.
double smht(double z)
Computes the Soboleva modified hyperbolic tangent (smht) activation function.
double tanh(double z)
Computes the hyperbolic tangent (tanh) activation function.
std::vector< double > softmax(const std::vector< double > &inputs)
Computes the softmax activation function.
double relu(double z)
Computes the ReLU (Rectified Linear Unit) activation function.
double elu_derivative(double z, double alpha=10)
Computes the derivative of the Exponential Linear Unit (ELU) activation function.
double prelu_derivative(double z, double alpha)
Computes the derivative of the Parametric Rectified Linear Unit (PReLU) activation function.
The source C++ openGPMP namespace.