Line data Source code
1 : /************************************************************************* 2 : * 3 : * Project 4 : * _____ _____ __ __ _____ 5 : * / ____| __ \| \/ | __ \ 6 : * ___ _ __ ___ _ __ | | __| |__) | \ / | |__) | 7 : * / _ \| '_ \ / _ \ '_ \| | |_ | ___/| |\/| | ___/ 8 : *| (_) | |_) | __/ | | | |__| | | | | | | | 9 : * \___/| .__/ \___|_| |_|\_____|_| |_| |_|_| 10 : * | | 11 : * |_| 12 : * 13 : * Copyright (C) Akiel Aries, <akiel@akiel.org>, et al. 14 : * 15 : * This software is licensed as described in the file LICENSE, which 16 : * you should have received as part of this distribution. The terms 17 : * among other details are referenced in the official documentation 18 : * seen here : https://akielaries.github.io/openGPMP/ along with 19 : * important files seen in this project. 20 : * 21 : * You may opt to use, copy, modify, merge, publish, distribute 22 : * and/or sell copies of the Software, and permit persons to whom 23 : * the Software is furnished to do so, under the terms of the 24 : * LICENSE file. As this is an Open Source effort, all implementations 25 : * must be of the same methodology. 26 : * 27 : * 28 : * 29 : * This software is distributed on an AS IS basis, WITHOUT 30 : * WARRANTY OF ANY KIND, either express or implied. 31 : * 32 : ************************************************************************/ 33 : 34 : /* 35 : * Implementation of a Multi-Layered Perceptron Neural Network 36 : */ 37 : #include <math.h> 38 : #include <openGPMP/ml/mlp_net.hpp> 39 : #include <stdio.h> 40 : #include <string.h> 41 : #include <time.h> 42 : 43 : using namespace gpmp::ml; 44 : 45 : /* 46 : * Initialize randomly generated values for network's method 47 : */ 48 0 : void gpmp::ml::PrimaryMLP::rand_init() { 49 0 : srand(4711); 50 : // srand((uint64_t)time(NULL)); 51 0 : } 52 : 53 : /* verify the random is an integer */ 54 0 : int64_t gpmp::ml::PrimaryMLP::rand_int(int64_t hi, int64_t low) { 55 0 : return rand() % (hi - low + 1) + low; 56 : } 57 : 58 : /* verify generated random is a real number */ 59 0 : long double gpmp::ml::PrimaryMLP::rand_real(long double low, long double hi) { 60 0 : return ((long double)rand() / RAND_MAX) * (hi - low) + low; 61 : } 62 : 63 : /* PRIMARY MLP CONSTRUCTOR */ 64 0 : gpmp::ml::PrimaryMLP::PrimaryMLP(int64_t nl, int64_t npl[]) 65 0 : : num_layers(0), layer_ptr(0), _MSE(0.0), _MAE(0.0), _Eta(0.25), 66 0 : _Alpha(0.9), _Gain(1.0), _AvgTestError(0.0) { 67 : int64_t _LAYER, _NEURON; 68 : 69 : // create network layers 70 0 : num_layers = nl; 71 0 : layer_ptr = new layer[nl]; 72 : 73 : // intialize the data of the created network layers 74 0 : for (_LAYER = 0; _LAYER < nl; _LAYER++) { 75 : // intialize values to neuron struct information 76 0 : layer_ptr[_LAYER].num_neurons = npl[_LAYER]; 77 0 : layer_ptr[_LAYER].neuron_ptr = new neuron[npl[_LAYER]]; 78 : 79 : // intialize date of the neurons of the created network layers 80 0 : for (_NEURON = 0; _NEURON < npl[_LAYER]; _NEURON++) { 81 : // initialize exit value 82 0 : layer_ptr[_LAYER].neuron_ptr[_NEURON].sortir = 1.0; 83 : // save the error 84 0 : layer_ptr[_LAYER].neuron_ptr[_NEURON].err = 0.0; 85 : 86 : // check if there is at least 1 layer 87 0 : if (_LAYER > 0) { 88 : /* initialize weight, last weight, and saved weight 89 : * values to _LAYER - 1 90 : */ 91 0 : layer_ptr[_LAYER].neuron_ptr[_NEURON].wt = 92 0 : new long double[npl[_LAYER - 1]]; 93 : 94 0 : layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_last = 95 0 : new long double[npl[_LAYER - 1]]; 96 : 97 0 : layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_saved = 98 0 : new long double[npl[_LAYER - 1]]; 99 : } 100 : // otherwise 101 : else { 102 : /* 103 : * initialize weight, last weight, and saved weight 104 : * to NULL 105 : */ 106 0 : layer_ptr[_LAYER].neuron_ptr[_NEURON].wt = NULL; 107 0 : layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_last = NULL; 108 0 : layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_saved = NULL; 109 : } 110 : } 111 : } 112 0 : } 113 : 114 : /* PRIMARY MLP DECONSTRUCTOR */ 115 0 : gpmp::ml::PrimaryMLP::~PrimaryMLP() { 116 : int64_t _LAYER, _NEURON; 117 : 118 : // TODO : thread the loops dealing with rudimentary computations 119 : 120 0 : for (_LAYER = 0; _LAYER < num_layers; _LAYER++) { 121 0 : if (layer_ptr[_LAYER].neuron_ptr) { 122 0 : for (_NEURON = 0; _NEURON < layer_ptr[_LAYER].num_neurons; 123 : _NEURON++) { 124 0 : if (layer_ptr[_LAYER].neuron_ptr[_NEURON].wt) { 125 0 : delete[] layer_ptr[_LAYER].neuron_ptr[_NEURON].wt; 126 : } 127 : 128 0 : if (layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_last) { 129 0 : delete[] layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_last; 130 : } 131 : 132 0 : if (layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_saved) { 133 0 : delete[] layer_ptr[_LAYER].neuron_ptr[_NEURON].wt_saved; 134 : } 135 : } 136 : } 137 0 : delete[] layer_ptr[_LAYER].neuron_ptr; 138 : } 139 0 : delete[] layer_ptr; 140 0 : }