openGPMP
Open Source Mathematics Package
activators.hpp
Go to the documentation of this file.
1 /*************************************************************************
2  *
3  * Project
4  * _____ _____ __ __ _____
5  * / ____| __ \| \/ | __ \
6  * ___ _ __ ___ _ __ | | __| |__) | \ / | |__) |
7  * / _ \| '_ \ / _ \ '_ \| | |_ | ___/| |\/| | ___/
8  *| (_) | |_) | __/ | | | |__| | | | | | | |
9  * \___/| .__/ \___|_| |_|\_____|_| |_| |_|_|
10  * | |
11  * |_|
12  *
13  * Copyright (C) Akiel Aries, <akiel@akiel.org>, et al.
14  *
15  * This software is licensed as described in the file LICENSE, which
16  * you should have received as part of this distribution. The terms
17  * among other details are referenced in the official documentation
18  * seen here : https://akielaries.github.io/openGPMP/ along with
19  * important files seen in this project.
20  *
21  * You may opt to use, copy, modify, merge, publish, distribute
22  * and/or sell copies of the Software, and permit persons to whom
23  * the Software is furnished to do so, under the terms of the
24  * LICENSE file. As this is an Open Source effort, all implementations
25  * must be of the same methodology.
26  *
27  *
28  *
29  * This software is distributed on an AS IS basis, WITHOUT
30  * WARRANTY OF ANY KIND, either express or implied.
31  *
32  ************************************************************************/
33 
34 #ifndef ACTIVATION_HPP
35 #define ACTIVATION_HPP
36 
37 #include <cmath>
38 #include <vector>
39 
40 namespace gpmp {
41 
42 namespace ml {
43 
47 class Activation {
48  public:
54  double sigmoid(double z);
55 
61  double sigmoid_derivative(double z);
62 
68  double relu(double z);
69 
76  double relu_derivative(double z);
77 
83  std::vector<double> softmax(const std::vector<double> &inputs);
84 
90  std::vector<std::vector<double>>
91  softmax_derivative(const std::vector<double> &inputs);
92 
98  double binary_step(double z);
99 
105  double tanh(double z);
106 
113  double tanh_derivative(double z);
114 
121  double smht(double z);
122 
129  double smht_derivative(double z);
130 
136  double gelu(double z);
137 
144  double gelu_derivative(double z);
145 
151  double softplus(double z);
152 
159  double elu(double z, double alpha = 10);
160 
168  double elu_derivative(double z, double alpha = 10);
169 
178  double selu(double z, double alpha = 167326, double scale = 10507);
179 
188  double
189  selu_derivative(double z, double alpha = 167326, double scale = 10507);
190 
198  double leaky_relu(double z, double alpha = 001);
199 
207  double leaky_relu_derivative(double z, double alpha = 001);
208 
216  double prelu(double z, double alpha);
217 
225  double prelu_derivative(double z, double alpha);
226 
233  double silu(double z);
234 
241  double silu_derivative(double z);
242 };
243 
244 } // namespace ml
245 
246 } // namespace gpmp
247 
248 #endif
Activation Methods.
Definition: activators.hpp:47
std::vector< std::vector< double > > softmax_derivative(const std::vector< double > &inputs)
Computes the derivative of the softmax activation function.
Definition: activators.cpp:67
double gelu(double z)
Computes the Gaussian Error Linear Unit (GELU) activation function.
Definition: activators.cpp:106
double gelu_derivative(double z)
Computes the derivative of the Gaussian Error Linear Unit (GELU) activation function.
Definition: activators.cpp:111
double binary_step(double z)
Computes the binary step activation function.
Definition: activators.cpp:84
double sigmoid_derivative(double z)
Computes the derivative of the sigmoid activation function.
Definition: activators.cpp:40
double silu_derivative(double z)
Computes the derivative of the Sigmoid Linear Unit (SiLU or Swish) activation function.
Definition: activators.cpp:163
double smht_derivative(double z)
Computes the derivative of the Soboleva modified hyperbolic tangent (smht) activation function.
Definition: activators.cpp:101
double relu_derivative(double z)
Computes the derivative of the ReLU (Rectified Linear Unit) activation function.
Definition: activators.cpp:49
double tanh_derivative(double z)
Computes the derivative of the hyperbolic tangent (tanh) activation function.
Definition: activators.cpp:92
double silu(double z)
Computes the Sigmoid Linear Unit (SiLU or Swish) activation function.
Definition: activators.cpp:159
double softplus(double z)
Computes the softplus activation function.
Definition: activators.cpp:122
double sigmoid(double z)
Computes the sigmoid activation function.
Definition: activators.cpp:36
double leaky_relu(double z, double alpha=001)
Computes the Leaky Rectified Linear Unit (Leaky ReLU) activation function.
Definition: activators.cpp:143
double leaky_relu_derivative(double z, double alpha=001)
Computes the derivative of the Leaky Rectified Linear Unit (Leaky ReLU) activation function.
Definition: activators.cpp:147
double selu(double z, double alpha=167326, double scale=10507)
Computes the Scaled Exponential Linear Unit (SELU) activation function.
Definition: activators.cpp:134
double prelu(double z, double alpha)
Computes the Parametric Rectified Linear Unit (PReLU) activation function.
Definition: activators.cpp:151
double elu(double z, double alpha=10)
Computes the Exponential Linear Unit (ELU) activation function.
Definition: activators.cpp:126
double selu_derivative(double z, double alpha=167326, double scale=10507)
Computes the derivative of the Scaled Exponential Linear Unit (SELU) activation function.
Definition: activators.cpp:139
double smht(double z)
Computes the Soboleva modified hyperbolic tangent (smht) activation function.
Definition: activators.cpp:97
double tanh(double z)
Computes the hyperbolic tangent (tanh) activation function.
Definition: activators.cpp:88
std::vector< double > softmax(const std::vector< double > &inputs)
Computes the softmax activation function.
Definition: activators.cpp:54
double relu(double z)
Computes the ReLU (Rectified Linear Unit) activation function.
Definition: activators.cpp:45
double elu_derivative(double z, double alpha=10)
Computes the derivative of the Exponential Linear Unit (ELU) activation function.
Definition: activators.cpp:130
double prelu_derivative(double z, double alpha)
Computes the derivative of the Parametric Rectified Linear Unit (PReLU) activation function.
Definition: activators.cpp:155
The source C++ openGPMP namespace.