openGPMP
Open Source Mathematics Package
|
Activation Methods. More...
#include <activators.hpp>
Public Member Functions | |
double | sigmoid (double z) |
Computes the sigmoid activation function. More... | |
double | sigmoid_derivative (double z) |
Computes the derivative of the sigmoid activation function. More... | |
double | relu (double z) |
Computes the ReLU (Rectified Linear Unit) activation function. More... | |
double | relu_derivative (double z) |
Computes the derivative of the ReLU (Rectified Linear Unit) activation function. More... | |
std::vector< double > | softmax (const std::vector< double > &inputs) |
Computes the softmax activation function. More... | |
std::vector< std::vector< double > > | softmax_derivative (const std::vector< double > &inputs) |
Computes the derivative of the softmax activation function. More... | |
double | binary_step (double z) |
Computes the binary step activation function. More... | |
double | tanh (double z) |
Computes the hyperbolic tangent (tanh) activation function. More... | |
double | tanh_derivative (double z) |
Computes the derivative of the hyperbolic tangent (tanh) activation function. More... | |
double | smht (double z) |
Computes the Soboleva modified hyperbolic tangent (smht) activation function. More... | |
double | smht_derivative (double z) |
Computes the derivative of the Soboleva modified hyperbolic tangent (smht) activation function. More... | |
double | gelu (double z) |
Computes the Gaussian Error Linear Unit (GELU) activation function. More... | |
double | gelu_derivative (double z) |
Computes the derivative of the Gaussian Error Linear Unit (GELU) activation function. More... | |
double | softplus (double z) |
Computes the softplus activation function. More... | |
double | elu (double z, double alpha=10) |
Computes the Exponential Linear Unit (ELU) activation function. More... | |
double | elu_derivative (double z, double alpha=10) |
Computes the derivative of the Exponential Linear Unit (ELU) activation function. More... | |
double | selu (double z, double alpha=167326, double scale=10507) |
Computes the Scaled Exponential Linear Unit (SELU) activation function. More... | |
double | selu_derivative (double z, double alpha=167326, double scale=10507) |
Computes the derivative of the Scaled Exponential Linear Unit (SELU) activation function. More... | |
double | leaky_relu (double z, double alpha=001) |
Computes the Leaky Rectified Linear Unit (Leaky ReLU) activation function. More... | |
double | leaky_relu_derivative (double z, double alpha=001) |
Computes the derivative of the Leaky Rectified Linear Unit (Leaky ReLU) activation function. More... | |
double | prelu (double z, double alpha) |
Computes the Parametric Rectified Linear Unit (PReLU) activation function. More... | |
double | prelu_derivative (double z, double alpha) |
Computes the derivative of the Parametric Rectified Linear Unit (PReLU) activation function. More... | |
double | silu (double z) |
Computes the Sigmoid Linear Unit (SiLU or Swish) activation function. More... | |
double | silu_derivative (double z) |
Computes the derivative of the Sigmoid Linear Unit (SiLU or Swish) activation function. More... | |
Activation Methods.
Definition at line 47 of file activators.hpp.
double gpmp::ml::Activation::binary_step | ( | double | z | ) |
Computes the binary step activation function.
z | The input value |
Definition at line 84 of file activators.cpp.
double gpmp::ml::Activation::elu | ( | double | z, |
double | alpha = 10 |
||
) |
Computes the Exponential Linear Unit (ELU) activation function.
z | The input value |
alpha | The ELU parameter (default: 10) |
Definition at line 126 of file activators.cpp.
double gpmp::ml::Activation::elu_derivative | ( | double | z, |
double | alpha = 10 |
||
) |
Computes the derivative of the Exponential Linear Unit (ELU) activation function.
z | The input value |
alpha | The ELU parameter (default: 10) |
Definition at line 130 of file activators.cpp.
double gpmp::ml::Activation::gelu | ( | double | z | ) |
Computes the Gaussian Error Linear Unit (GELU) activation function.
z | The input value |
Definition at line 106 of file activators.cpp.
double gpmp::ml::Activation::gelu_derivative | ( | double | z | ) |
Computes the derivative of the Gaussian Error Linear Unit (GELU) activation function.
z | The input value |
Definition at line 111 of file activators.cpp.
double gpmp::ml::Activation::leaky_relu | ( | double | z, |
double | alpha = 001 |
||
) |
Computes the Leaky Rectified Linear Unit (Leaky ReLU) activation function.
z | The input value |
alpha | The Leaky ReLU parameter (default: 001) |
Definition at line 143 of file activators.cpp.
double gpmp::ml::Activation::leaky_relu_derivative | ( | double | z, |
double | alpha = 001 |
||
) |
Computes the derivative of the Leaky Rectified Linear Unit (Leaky ReLU) activation function.
z | The input value |
alpha | The Leaky ReLU parameter (default: 001) |
Definition at line 147 of file activators.cpp.
double gpmp::ml::Activation::prelu | ( | double | z, |
double | alpha | ||
) |
Computes the Parametric Rectified Linear Unit (PReLU) activation function.
z | The input value |
alpha | The PReLU parameter (slope) |
Definition at line 151 of file activators.cpp.
double gpmp::ml::Activation::prelu_derivative | ( | double | z, |
double | alpha | ||
) |
Computes the derivative of the Parametric Rectified Linear Unit (PReLU) activation function.
z | The input value |
alpha | The PReLU parameter (slope) |
Definition at line 155 of file activators.cpp.
double gpmp::ml::Activation::relu | ( | double | z | ) |
Computes the ReLU (Rectified Linear Unit) activation function.
z | The input value |
Definition at line 45 of file activators.cpp.
double gpmp::ml::Activation::relu_derivative | ( | double | z | ) |
Computes the derivative of the ReLU (Rectified Linear Unit) activation function.
z | The input value |
Definition at line 49 of file activators.cpp.
double gpmp::ml::Activation::selu | ( | double | z, |
double | alpha = 167326 , |
||
double | scale = 10507 |
||
) |
Computes the Scaled Exponential Linear Unit (SELU) activation function.
z | The input value |
alpha | The SELU parameter (default: 167326) |
scale | The SELU parameter (default: 10507) |
Definition at line 134 of file activators.cpp.
double gpmp::ml::Activation::selu_derivative | ( | double | z, |
double | alpha = 167326 , |
||
double | scale = 10507 |
||
) |
Computes the derivative of the Scaled Exponential Linear Unit (SELU) activation function.
z | The input value |
alpha | The SELU parameter (default: 167326) |
scale | The SELU parameter (default: 10507) |
Definition at line 139 of file activators.cpp.
double gpmp::ml::Activation::sigmoid | ( | double | z | ) |
Computes the sigmoid activation function.
z | The input value |
Definition at line 36 of file activators.cpp.
double gpmp::ml::Activation::sigmoid_derivative | ( | double | z | ) |
Computes the derivative of the sigmoid activation function.
z | The input value |
Definition at line 40 of file activators.cpp.
double gpmp::ml::Activation::silu | ( | double | z | ) |
Computes the Sigmoid Linear Unit (SiLU or Swish) activation function.
z | The input value |
Definition at line 159 of file activators.cpp.
double gpmp::ml::Activation::silu_derivative | ( | double | z | ) |
Computes the derivative of the Sigmoid Linear Unit (SiLU or Swish) activation function.
z | The input value |
Definition at line 163 of file activators.cpp.
double gpmp::ml::Activation::smht | ( | double | z | ) |
Computes the Soboleva modified hyperbolic tangent (smht) activation function.
z | The input value |
Definition at line 97 of file activators.cpp.
double gpmp::ml::Activation::smht_derivative | ( | double | z | ) |
Computes the derivative of the Soboleva modified hyperbolic tangent (smht) activation function.
z | The input value |
Definition at line 101 of file activators.cpp.
std::vector< double > gpmp::ml::Activation::softmax | ( | const std::vector< double > & | inputs | ) |
Computes the softmax activation function.
inputs | The input values |
Definition at line 54 of file activators.cpp.
std::vector< std::vector< double > > gpmp::ml::Activation::softmax_derivative | ( | const std::vector< double > & | inputs | ) |
Computes the derivative of the softmax activation function.
inputs | The input values |
Definition at line 67 of file activators.cpp.
double gpmp::ml::Activation::softplus | ( | double | z | ) |
Computes the softplus activation function.
z | The input value |
Definition at line 122 of file activators.cpp.
double gpmp::ml::Activation::tanh | ( | double | z | ) |
Computes the hyperbolic tangent (tanh) activation function.
z | The input value |
Definition at line 88 of file activators.cpp.
double gpmp::ml::Activation::tanh_derivative | ( | double | z | ) |
Computes the derivative of the hyperbolic tangent (tanh) activation function.
z | The input value |
Definition at line 92 of file activators.cpp.