openGPMP
Open Source Mathematics Package
Public Member Functions | List of all members
gpmp::ml::Activation Class Reference

Activation Methods. More...

#include <activators.hpp>

Public Member Functions

double sigmoid (double z)
 Computes the sigmoid activation function. More...
 
double sigmoid_derivative (double z)
 Computes the derivative of the sigmoid activation function. More...
 
double relu (double z)
 Computes the ReLU (Rectified Linear Unit) activation function. More...
 
double relu_derivative (double z)
 Computes the derivative of the ReLU (Rectified Linear Unit) activation function. More...
 
std::vector< double > softmax (const std::vector< double > &inputs)
 Computes the softmax activation function. More...
 
std::vector< std::vector< double > > softmax_derivative (const std::vector< double > &inputs)
 Computes the derivative of the softmax activation function. More...
 
double binary_step (double z)
 Computes the binary step activation function. More...
 
double tanh (double z)
 Computes the hyperbolic tangent (tanh) activation function. More...
 
double tanh_derivative (double z)
 Computes the derivative of the hyperbolic tangent (tanh) activation function. More...
 
double smht (double z)
 Computes the Soboleva modified hyperbolic tangent (smht) activation function. More...
 
double smht_derivative (double z)
 Computes the derivative of the Soboleva modified hyperbolic tangent (smht) activation function. More...
 
double gelu (double z)
 Computes the Gaussian Error Linear Unit (GELU) activation function. More...
 
double gelu_derivative (double z)
 Computes the derivative of the Gaussian Error Linear Unit (GELU) activation function. More...
 
double softplus (double z)
 Computes the softplus activation function. More...
 
double elu (double z, double alpha=10)
 Computes the Exponential Linear Unit (ELU) activation function. More...
 
double elu_derivative (double z, double alpha=10)
 Computes the derivative of the Exponential Linear Unit (ELU) activation function. More...
 
double selu (double z, double alpha=167326, double scale=10507)
 Computes the Scaled Exponential Linear Unit (SELU) activation function. More...
 
double selu_derivative (double z, double alpha=167326, double scale=10507)
 Computes the derivative of the Scaled Exponential Linear Unit (SELU) activation function. More...
 
double leaky_relu (double z, double alpha=001)
 Computes the Leaky Rectified Linear Unit (Leaky ReLU) activation function. More...
 
double leaky_relu_derivative (double z, double alpha=001)
 Computes the derivative of the Leaky Rectified Linear Unit (Leaky ReLU) activation function. More...
 
double prelu (double z, double alpha)
 Computes the Parametric Rectified Linear Unit (PReLU) activation function. More...
 
double prelu_derivative (double z, double alpha)
 Computes the derivative of the Parametric Rectified Linear Unit (PReLU) activation function. More...
 
double silu (double z)
 Computes the Sigmoid Linear Unit (SiLU or Swish) activation function. More...
 
double silu_derivative (double z)
 Computes the derivative of the Sigmoid Linear Unit (SiLU or Swish) activation function. More...
 

Detailed Description

Activation Methods.

Definition at line 47 of file activators.hpp.

Member Function Documentation

◆ binary_step()

double gpmp::ml::Activation::binary_step ( double  z)

Computes the binary step activation function.

Parameters
zThe input value
Returns
1 if z >= 0, otherwise 0

Definition at line 84 of file activators.cpp.

84  {
85  return z >= 0 ? 1 : 0;
86 }

◆ elu()

double gpmp::ml::Activation::elu ( double  z,
double  alpha = 10 
)

Computes the Exponential Linear Unit (ELU) activation function.

Parameters
zThe input value
alphaThe ELU parameter (default: 10)
Returns
The ELU of z

Definition at line 126 of file activators.cpp.

126  {
127  return z >= 0 ? z : alpha * (exp(z) - 1);
128 }

◆ elu_derivative()

double gpmp::ml::Activation::elu_derivative ( double  z,
double  alpha = 10 
)

Computes the derivative of the Exponential Linear Unit (ELU) activation function.

Parameters
zThe input value
alphaThe ELU parameter (default: 10)
Returns
The derivative of ELU at z

Definition at line 130 of file activators.cpp.

130  {
131  return z >= 0 ? 1 : elu(z, alpha) + alpha;
132 }
double elu(double z, double alpha=10)
Computes the Exponential Linear Unit (ELU) activation function.
Definition: activators.cpp:126

◆ gelu()

double gpmp::ml::Activation::gelu ( double  z)

Computes the Gaussian Error Linear Unit (GELU) activation function.

Parameters
zThe input value
Returns
The GELU of z

Definition at line 106 of file activators.cpp.

106  {
107  return 0.5 * z *
108  (1.0 + tanh(sqrt(2.0 / M_PI) * (z + 0.044715 * z * z * z)));
109 }
double tanh(double z)
Computes the hyperbolic tangent (tanh) activation function.
Definition: activators.cpp:88

◆ gelu_derivative()

double gpmp::ml::Activation::gelu_derivative ( double  z)

Computes the derivative of the Gaussian Error Linear Unit (GELU) activation function.

Parameters
zThe input value
Returns
The derivative of GELU at z

Definition at line 111 of file activators.cpp.

111  {
112  double cdf =
113  0.5 * (1.0 + tanh(sqrt(2.0 / M_PI) * (0.044715 * z * z * z + 3 * z)));
114  double pdf = exp(-0.5 * z * z) / sqrt(2.0 * M_PI);
115  return 0.5 * (1.0 + cdf +
116  z * pdf * (1.0 / M_PI) *
117  (0.5 * (1.0 + tanh(sqrt(2.0 / M_PI) *
118  (0.044715 * z * z * z + 3 * z)))) +
119  (1.0 / M_PI) * (1.0 - cdf * cdf));
120 }

◆ leaky_relu()

double gpmp::ml::Activation::leaky_relu ( double  z,
double  alpha = 001 
)

Computes the Leaky Rectified Linear Unit (Leaky ReLU) activation function.

Parameters
zThe input value
alphaThe Leaky ReLU parameter (default: 001)
Returns
The Leaky ReLU of z

Definition at line 143 of file activators.cpp.

143  {
144  return z >= 0 ? z : alpha * z;
145 }

◆ leaky_relu_derivative()

double gpmp::ml::Activation::leaky_relu_derivative ( double  z,
double  alpha = 001 
)

Computes the derivative of the Leaky Rectified Linear Unit (Leaky ReLU) activation function.

Parameters
zThe input value
alphaThe Leaky ReLU parameter (default: 001)
Returns
The derivative of Leaky ReLU at z

Definition at line 147 of file activators.cpp.

147  {
148  return z >= 0 ? 1 : alpha;
149 }

◆ prelu()

double gpmp::ml::Activation::prelu ( double  z,
double  alpha 
)

Computes the Parametric Rectified Linear Unit (PReLU) activation function.

Parameters
zThe input value
alphaThe PReLU parameter (slope)
Returns
The PReLU of z

Definition at line 151 of file activators.cpp.

151  {
152  return z >= 0 ? z : alpha * z;
153 }

◆ prelu_derivative()

double gpmp::ml::Activation::prelu_derivative ( double  z,
double  alpha 
)

Computes the derivative of the Parametric Rectified Linear Unit (PReLU) activation function.

Parameters
zThe input value
alphaThe PReLU parameter (slope)
Returns
The derivative of PReLU at z

Definition at line 155 of file activators.cpp.

155  {
156  return z >= 0 ? 1 : alpha;
157 }

◆ relu()

double gpmp::ml::Activation::relu ( double  z)

Computes the ReLU (Rectified Linear Unit) activation function.

Parameters
zThe input value
Returns
The ReLU of z

Definition at line 45 of file activators.cpp.

45  {
46  return z > 0 ? z : 0;
47 }

◆ relu_derivative()

double gpmp::ml::Activation::relu_derivative ( double  z)

Computes the derivative of the ReLU (Rectified Linear Unit) activation function.

Parameters
zThe input value
Returns
The derivative of ReLU at z

Definition at line 49 of file activators.cpp.

49  {
50  return z > 0 ? 1 : 0;
51 }

◆ selu()

double gpmp::ml::Activation::selu ( double  z,
double  alpha = 167326,
double  scale = 10507 
)

Computes the Scaled Exponential Linear Unit (SELU) activation function.

Parameters
zThe input value
alphaThe SELU parameter (default: 167326)
scaleThe SELU parameter (default: 10507)
Returns
The SELU of z

Definition at line 134 of file activators.cpp.

134  {
135  return scale * (z >= 0 ? z : alpha * (exp(z) - 1));
136 }

◆ selu_derivative()

double gpmp::ml::Activation::selu_derivative ( double  z,
double  alpha = 167326,
double  scale = 10507 
)

Computes the derivative of the Scaled Exponential Linear Unit (SELU) activation function.

Parameters
zThe input value
alphaThe SELU parameter (default: 167326)
scaleThe SELU parameter (default: 10507)
Returns
The derivative of SELU at z

Definition at line 139 of file activators.cpp.

139  {
140  return scale * (z >= 0 ? 1 : alpha * exp(z));
141 }

◆ sigmoid()

double gpmp::ml::Activation::sigmoid ( double  z)

Computes the sigmoid activation function.

Parameters
zThe input value
Returns
The sigmoid of z

Definition at line 36 of file activators.cpp.

36  {
37  return 1.0 / (1.0 + exp(-z));
38 }

◆ sigmoid_derivative()

double gpmp::ml::Activation::sigmoid_derivative ( double  z)

Computes the derivative of the sigmoid activation function.

Parameters
zThe input value
Returns
The derivative of sigmoid at z

Definition at line 40 of file activators.cpp.

40  {
41  double sig = sigmoid(z);
42  return sig * (1.0 - sig);
43 }
double sigmoid(double z)
Computes the sigmoid activation function.
Definition: activators.cpp:36

◆ silu()

double gpmp::ml::Activation::silu ( double  z)

Computes the Sigmoid Linear Unit (SiLU or Swish) activation function.

Parameters
zThe input value
Returns
The SiLU of z

Definition at line 159 of file activators.cpp.

159  {
160  return z / (1 + exp(-z));
161 }

◆ silu_derivative()

double gpmp::ml::Activation::silu_derivative ( double  z)

Computes the derivative of the Sigmoid Linear Unit (SiLU or Swish) activation function.

Parameters
zThe input value
Returns
The derivative of SiLU at z

Definition at line 163 of file activators.cpp.

163  {
164  double sig_z = sigmoid(z);
165  return sig_z + (1 - sig_z) * z * sig_z;
166 }

◆ smht()

double gpmp::ml::Activation::smht ( double  z)

Computes the Soboleva modified hyperbolic tangent (smht) activation function.

Parameters
zThe input value
Returns
The smht of z

Definition at line 97 of file activators.cpp.

97  {
98  return z / (1.0 + exp(-z));
99 }

◆ smht_derivative()

double gpmp::ml::Activation::smht_derivative ( double  z)

Computes the derivative of the Soboleva modified hyperbolic tangent (smht) activation function.

Parameters
zThe input value
Returns
The derivative of smht at z

Definition at line 101 of file activators.cpp.

101  {
102  double smht_z = smht(z);
103  return smht_z * (1.0 - smht_z);
104 }
double smht(double z)
Computes the Soboleva modified hyperbolic tangent (smht) activation function.
Definition: activators.cpp:97

◆ softmax()

std::vector< double > gpmp::ml::Activation::softmax ( const std::vector< double > &  inputs)

Computes the softmax activation function.

Parameters
inputsThe input values
Returns
The softmax of inputs

Definition at line 54 of file activators.cpp.

54  {
55  std::vector<double> result;
56  double sum_exp = 0.0;
57  for (double input : inputs) {
58  sum_exp += exp(input);
59  }
60  for (double input : inputs) {
61  result.push_back(exp(input) / sum_exp);
62  }
63  return result;
64 }

◆ softmax_derivative()

std::vector< std::vector< double > > gpmp::ml::Activation::softmax_derivative ( const std::vector< double > &  inputs)

Computes the derivative of the softmax activation function.

Parameters
inputsThe input values
Returns
The derivatives of softmax at inputs

Definition at line 67 of file activators.cpp.

67  {
68  std::vector<std::vector<double>> result(
69  inputs.size(),
70  std::vector<double>(inputs.size(), 0.0));
71  std::vector<double> softmax_values = softmax(inputs);
72  for (size_t i = 0; i < inputs.size(); ++i) {
73  for (size_t j = 0; j < inputs.size(); ++j) {
74  if (i == j) {
75  result[i][j] = softmax_values[i] * (1.0 - softmax_values[i]);
76  } else {
77  result[i][j] = -softmax_values[i] * softmax_values[j];
78  }
79  }
80  }
81  return result;
82 }
std::vector< double > softmax(const std::vector< double > &inputs)
Computes the softmax activation function.
Definition: activators.cpp:54

◆ softplus()

double gpmp::ml::Activation::softplus ( double  z)

Computes the softplus activation function.

Parameters
zThe input value
Returns
The softplus of z

Definition at line 122 of file activators.cpp.

122  {
123  return log(1.0 + exp(z));
124 }

◆ tanh()

double gpmp::ml::Activation::tanh ( double  z)

Computes the hyperbolic tangent (tanh) activation function.

Parameters
zThe input value
Returns
The tanh of z

Definition at line 88 of file activators.cpp.

88  {
89  return std::tanh(z);
90 }

◆ tanh_derivative()

double gpmp::ml::Activation::tanh_derivative ( double  z)

Computes the derivative of the hyperbolic tangent (tanh) activation function.

Parameters
zThe input value
Returns
The derivative of tanh at z

Definition at line 92 of file activators.cpp.

92  {
93  double tanh_z = std::tanh(z);
94  return 1.0 - tanh_z * tanh_z;
95 }

The documentation for this class was generated from the following files: