openGPMP
Open Source Mathematics Package
Public Member Functions | Public Attributes | Private Member Functions | Private Attributes | List of all members
gpmp::ml::PrimaryMLP Class Reference

Primary Multi-Layer Perceptron Class. More...

#include <mlp_net.hpp>

Public Member Functions

 PrimaryMLP (int64_t nl, int64_t npl[])
 
 ~PrimaryMLP ()
 
int64_t train (const char *fnames)
 
int64_t test (const char *fname)
 
int64_t evaluate ()
 
void run (const char *fname, const int64_t &max_iters)
 

Public Attributes

long double _Eta
 
long double _Alpha
 
long double _Gain
 
long double _AvgTestError
 

Private Member Functions

void rand_init ()
 
int64_t rand_int (int64_t low, int64_t hi)
 
long double rand_real (long double low, long double hi)
 
void set_signal_in (long double *input)
 
void get_signal_out (long double *output)
 
void weights_save ()
 
void weights_rand ()
 
void weights_restore ()
 
void weights_adjust ()
 
void prop_signal ()
 
void output_err (long double *target)
 
void back_prop_err ()
 
void simulate (long double *input, long double *output, long double *target, bool training)
 

Private Attributes

int64_t num_layers
 
layerlayer_ptr
 
long double _MSE
 Mean Squared Error. More...
 
long double _MAE
 Mean Absolute Error. More...
 

Detailed Description

Primary Multi-Layer Perceptron Class.

Definition at line 87 of file mlp_net.hpp.

Constructor & Destructor Documentation

◆ PrimaryMLP()

gpmp::ml::PrimaryMLP::PrimaryMLP ( int64_t  nl,
int64_t  npl[] 
)

Definition at line 64 of file mlp_network.cpp.

65  : num_layers(0), layer_ptr(0), _MSE(0.0), _MAE(0.0), _Eta(0.25),
66  _Alpha(0.9), _Gain(1.0), _AvgTestError(0.0) {
67  int64_t _LAYER, _NEURON;
68 
69  // create network layers
70  num_layers = nl;
71  layer_ptr = new layer[nl];
72 
73  // intialize the data of the created network layers
74  for (_LAYER = 0; _LAYER < nl; _LAYER++) {
75  // intialize values to neuron struct information
76  layer_ptr[_LAYER].num_neurons = npl[_LAYER];
77  layer_ptr[_LAYER].neuron_ptr = new neuron[npl[_LAYER]];
78 
79  // intialize date of the neurons of the created network layers
80  for (_NEURON = 0; _NEURON < npl[_LAYER]; _NEURON++) {
81  // initialize exit value
82  layer_ptr[_LAYER].neuron_ptr[_NEURON].sortir = 1.0;
83  // save the error
84  layer_ptr[_LAYER].neuron_ptr[_NEURON].err = 0.0;
85 
86  // check if there is at least 1 layer
87  if (_LAYER > 0) {
88  /* initialize weight, last weight, and saved weight
89  * values to _LAYER - 1
90  */
91  layer_ptr[_LAYER].neuron_ptr[_NEURON].wt =
92  new long double[npl[_LAYER - 1]];
93 
94  layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_last =
95  new long double[npl[_LAYER - 1]];
96 
97  layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_saved =
98  new long double[npl[_LAYER - 1]];
99  }
100  // otherwise
101  else {
102  /*
103  * initialize weight, last weight, and saved weight
104  * to NULL
105  */
106  layer_ptr[_LAYER].neuron_ptr[_NEURON].wt = NULL;
107  layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_last = NULL;
108  layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_saved = NULL;
109  }
110  }
111  }
112 }
long double _MSE
Mean Squared Error.
Definition: mlp_net.hpp:100
long double _Gain
Definition: mlp_net.hpp:130
long double _AvgTestError
Definition: mlp_net.hpp:131
long double _Eta
Definition: mlp_net.hpp:128
long double _Alpha
Definition: mlp_net.hpp:129
long double _MAE
Mean Absolute Error.
Definition: mlp_net.hpp:104
int64_t num_neurons
Definition: mlp_net.hpp:80
neuron * neuron_ptr
Definition: mlp_net.hpp:81
long double err
Definition: mlp_net.hpp:70
long double * wt_saved
Definition: mlp_net.hpp:76
long double * wt
Definition: mlp_net.hpp:72
long double sortir
Definition: mlp_net.hpp:68
long double * wt_last
Definition: mlp_net.hpp:74

References gpmp::ml::neuron::err, layer_ptr, gpmp::ml::layer::neuron_ptr, num_layers, gpmp::ml::layer::num_neurons, gpmp::ml::neuron::sortir, gpmp::ml::neuron::wt, gpmp::ml::neuron::wt_last, and gpmp::ml::neuron::wt_saved.

◆ ~PrimaryMLP()

gpmp::ml::PrimaryMLP::~PrimaryMLP ( )

Definition at line 115 of file mlp_network.cpp.

115  {
116  int64_t _LAYER, _NEURON;
117 
118  // TODO : thread the loops dealing with rudimentary computations
119 
120  for (_LAYER = 0; _LAYER < num_layers; _LAYER++) {
121  if (layer_ptr[_LAYER].neuron_ptr) {
122  for (_NEURON = 0; _NEURON < layer_ptr[_LAYER].num_neurons;
123  _NEURON++) {
124  if (layer_ptr[_LAYER].neuron_ptr[_NEURON].wt) {
125  delete[] layer_ptr[_LAYER].neuron_ptr[_NEURON].wt;
126  }
127 
128  if (layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_last) {
129  delete[] layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_last;
130  }
131 
132  if (layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_saved) {
133  delete[] layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_saved;
134  }
135  }
136  }
137  delete[] layer_ptr[_LAYER].neuron_ptr;
138  }
139  delete[] layer_ptr;
140 }

Member Function Documentation

◆ back_prop_err()

void gpmp::ml::PrimaryMLP::back_prop_err ( )
private

◆ evaluate()

int64_t gpmp::ml::PrimaryMLP::evaluate ( )

◆ get_signal_out()

void gpmp::ml::PrimaryMLP::get_signal_out ( long double *  output)
private

◆ output_err()

void gpmp::ml::PrimaryMLP::output_err ( long double *  target)
private

◆ prop_signal()

void gpmp::ml::PrimaryMLP::prop_signal ( )
private

◆ rand_init()

void gpmp::ml::PrimaryMLP::rand_init ( )
private

Definition at line 48 of file mlp_network.cpp.

48  {
49  srand(4711);
50  // srand((uint64_t)time(NULL));
51 }

◆ rand_int()

int64_t gpmp::ml::PrimaryMLP::rand_int ( int64_t  low,
int64_t  hi 
)
private

Definition at line 54 of file mlp_network.cpp.

54  {
55  return rand() % (hi - low + 1) + low;
56 }

◆ rand_real()

long double gpmp::ml::PrimaryMLP::rand_real ( long double  low,
long double  hi 
)
private

Definition at line 59 of file mlp_network.cpp.

59  {
60  return ((long double)rand() / RAND_MAX) * (hi - low) + low;
61 }

◆ run()

void gpmp::ml::PrimaryMLP::run ( const char *  fname,
const int64_t &  max_iters 
)

◆ set_signal_in()

void gpmp::ml::PrimaryMLP::set_signal_in ( long double *  input)
private

◆ simulate()

void gpmp::ml::PrimaryMLP::simulate ( long double *  input,
long double *  output,
long double *  target,
bool  training 
)
private

◆ test()

int64_t gpmp::ml::PrimaryMLP::test ( const char *  fname)

◆ train()

int64_t gpmp::ml::PrimaryMLP::train ( const char *  fnames)

◆ weights_adjust()

void gpmp::ml::PrimaryMLP::weights_adjust ( )
private

◆ weights_rand()

void gpmp::ml::PrimaryMLP::weights_rand ( )
private

◆ weights_restore()

void gpmp::ml::PrimaryMLP::weights_restore ( )
private

◆ weights_save()

void gpmp::ml::PrimaryMLP::weights_save ( )
private

Member Data Documentation

◆ _Alpha

long double gpmp::ml::PrimaryMLP::_Alpha

Definition at line 129 of file mlp_net.hpp.

◆ _AvgTestError

long double gpmp::ml::PrimaryMLP::_AvgTestError

Definition at line 131 of file mlp_net.hpp.

◆ _Eta

long double gpmp::ml::PrimaryMLP::_Eta

Definition at line 128 of file mlp_net.hpp.

◆ _Gain

long double gpmp::ml::PrimaryMLP::_Gain

Definition at line 130 of file mlp_net.hpp.

◆ _MAE

long double gpmp::ml::PrimaryMLP::_MAE
private

Mean Absolute Error.

Definition at line 104 of file mlp_net.hpp.

◆ _MSE

long double gpmp::ml::PrimaryMLP::_MSE
private

Mean Squared Error.

Definition at line 100 of file mlp_net.hpp.

◆ layer_ptr

layer* gpmp::ml::PrimaryMLP::layer_ptr
private

Definition at line 96 of file mlp_net.hpp.

Referenced by PrimaryMLP().

◆ num_layers

int64_t gpmp::ml::PrimaryMLP::num_layers
private

Definition at line 95 of file mlp_net.hpp.

Referenced by PrimaryMLP().


The documentation for this class was generated from the following files: