Feedforward Closedloop Learning
layer.h
1 #ifndef __Layer_H_
2 #define __Layer_H_
3 
12 #include "globals.h"
13 #include "neuron.h"
14 #include <stdlib.h>
15 #include <string.h>
16 
17 #ifdef __linux__
18 #include <pthread.h>
19 #endif
20 
21 #ifdef _WIN32
22 #include <windows.h>
23 #endif
24 
25 
26 #define NUM_THREADS 12
27 
28 #ifndef DOXYGEN_SHOULD_SKIP_THIS
29 
30 // abstract thread which contains the inner workings of the thread model
31 class LayerThread {
32 
33 protected:
34 
35  Neuron** neurons;
36  int nNeurons = 0;
37  int maxNeurons = 0;
38 
39 #ifdef __linux__
40  pthread_t id = 0;
41 #endif
42 
43 #ifdef _WIN32
44  DWORD id = 0;
45  HANDLE hThread = 0;
46 #endif
47 
48 #ifdef __linux__
49  static void *exec(void *thr) {
50  reinterpret_cast<LayerThread *> (thr)->run();
51  return NULL;
52  }
53 #endif
54 
55 #ifdef _WIN32
56  static DWORD WINAPI exec(LPVOID thr) {
57  reinterpret_cast<LayerThread *> (thr)->run();
58  return 0;
59  }
60 #endif
61 
62 
63 public:
64 
65  LayerThread(int _maxNeurons) {
66  maxNeurons = _maxNeurons;
67  neurons = new Neuron*[maxNeurons];
68  }
69 
70  virtual ~LayerThread() {
71 #ifdef _WIN32
72  CloseHandle(hThread);
73 #endif
74  delete [] neurons;
75  }
76 
77  void addNeuron(Neuron* neuron) {
78  if (nNeurons >= maxNeurons) {
79  fprintf(stderr,"Not enough memory for threads.\n");
80  exit(1);
81  }
82  neurons[nNeurons] = neuron;
83  nNeurons++;
84  }
85 
86  void start() {
87  if (nNeurons == 0) {
88  return;
89  }
90 #ifdef __linux__
91  int ret;
92  if ((ret = pthread_create(&id, NULL, &LayerThread::exec, this)) != 0) {
93  fprintf(stderr,"%s\n",strerror(ret));
94  throw "Error";
95  }
96 #endif
97 #ifdef _WIN32
98  hThread = CreateThread(
99  NULL, // default security attributes
100  0, // use default stack size
101  &LayerThread::exec, // thread function name
102  this, // argument to thread function
103  0, // use default creation flags
104  &id); // returns the thread identifier
105  if (hThread == NULL) {
106  ExitProcess(3);
107  }
108 #endif
109  }
110 
111  void join() {
112  if (nNeurons == 0) {
113  return;
114  }
115 #ifdef __linux__
116  pthread_join(id,NULL);
117 #endif
118 #ifdef _WIN32
119  WaitForSingleObject(hThread, INFINITE);
120 #endif
121  }
122 
123  // is implemented by its children to do the specfic task the thread
124  virtual void run() = 0;
125 
126 };
127 
128 
129 class CalcOutputThread : public LayerThread {
130  using LayerThread::LayerThread;
131  void run() {
132  for (int i=0;i<nNeurons;i++) {
133  neurons[i]->calcOutput();
134  }
135  }
136 };
137 
138 
139 class LearningThread : public LayerThread {
140  using LayerThread::LayerThread;
141  void run() {
142  for (int i=0;i<nNeurons;i++) {
143  neurons[i]->doLearning();
144  }
145  }
146 };
147 
148 
149 class MaxDetThread : public LayerThread {
150  using LayerThread::LayerThread;
151  void run() {
152  for (int i=0;i<nNeurons;i++) {
153  neurons[i]->doMaxDet();
154  }
155  }
156 };
157 
158 
159 #endif /* DOXYGEN_SHOULD_SKIP_THIS */
160 
161 
169 class Layer {
170 
171 public:
179  Layer(int _nNeurons, int _nInputs);
180 
184  ~Layer();
185 
191  WEIGHT_NORM_NONE = 0,
192  WEIGHT_NORM_LAYER_EUCLEDIAN = 1,
193  WEIGHT_NORM_NEURON_EUCLEDIAN = 2,
194  WEIGHT_NORM_LAYER_MANHATTAN = 3,
195  WEIGHT_NORM_NEURON_MANHATTAN = 4,
196  WEIGHT_NORM_LAYER_INFINITY = 5,
197  WEIGHT_NORM_NEURON_INFINITY = 6
198  };
199 
202  void calcOutputs();
203 
206  void doLearning();
207 
211  void setError( double _error);
212 
217  void setError( int i, double _error);
218 
222  void setErrors( double *_errors);
223 
227  double getError( int i);
228 
232  void setBias( double _bias);
233 
238  void setInput( int inputIndex, double input);
239 
243  void setInputs( double * _inputs);
244 
248  void setLearningRate( double _learningRate);
249 
253  void setActivationFunction(Neuron::ActivationFunction _activationFunction);
254 
258  void setMomentum( double _momentum);
259 
263  void setDecay( double _decay);
264 
270  void initWeights( double _max = 1,
271  int initBiasWeight = 1,
272  Neuron::WeightInitMethod weightInitMethod = Neuron::MAX_OUTPUT_RANDOM);
273 
278  inline double getOutput( int index) {
279  return neurons[index]->getOutput();
280  }
281 
286  inline Neuron* getNeuron( int index) {
287  assert(index < nNeurons);
288  return neurons[index];
289  }
290 
294  inline int getNneurons() { return nNeurons;}
295 
299  inline int getNinputs() { return nInputs;}
300 
305  void setConvolution( int width, int height);
306 
310  void setMaxDetLayer(int _m) { maxDetLayer = _m; };
311 
315  void setNormaliseWeights(WeightNormalisation _normaliseWeights);
316 
320  void setDebugInfo(int layerIndex);
321 
325  void setStep(long int step);
326 
331 
334  void doNormaliseWeights();
335 
339  void setUseThreads(int _useThreads) {
340  useThreads = _useThreads;
341  if (!useThreads) {
342  fprintf(stderr,"Thread execution if OFF\n");
343  }
344  };
345 
349  int saveWeightMatrix(char *filename);
350 
351 
352 private:
353 
354  int nNeurons;
355  int nInputs;
356  Neuron** neurons = 0;
357  int maxDetLayer = 0;
358  WeightNormalisation normaliseWeights = WEIGHT_NORM_NONE;
359  int debugOutput = 0;
360  // for debugging output
361  int layerIndex = 0;
362  long int step = 0;
363  int useThreads = 1;
364  CalcOutputThread** calcOutputThread = NULL;
365  LearningThread** learningThread = NULL;
366  MaxDetThread** maxDetThread = NULL;
367 };
368 
369 #endif
Layer::WeightNormalisation
WeightNormalisation
Weight normalisation constants Defines if weights are normalised layer-wide or for every neuron separ...
Definition: layer.h:190
Layer::setActivationFunction
void setActivationFunction(Neuron::ActivationFunction _activationFunction)
Set the activation function.
Definition: layer.cpp:212
Layer::getOutput
double getOutput(int index)
Gets the outpuut of one neuron.
Definition: layer.h:278
Layer::setMomentum
void setMomentum(double _momentum)
Set the momentum of all neurons in this layer.
Definition: layer.cpp:206
Layer::setUseThreads
void setUseThreads(int _useThreads)
Sets if threads should be used.
Definition: layer.h:339
Layer::setBias
void setBias(double _bias)
Sets the global bias for all neurons.
Definition: layer.cpp:194
Layer::setNormaliseWeights
void setNormaliseWeights(WeightNormalisation _normaliseWeights)
Normalise the weights.
Definition: layer.cpp:170
Layer::getError
double getError(int i)
Retrieves the error.
Definition: layer.cpp:235
Layer::setDebugInfo
void setDebugInfo(int layerIndex)
Sets the layer index within the whole network.
Definition: layer.cpp:248
Layer::doLearning
void doLearning()
Adjusts the weights.
Definition: layer.cpp:133
Layer::setInput
void setInput(int inputIndex, double input)
Set the input value of one input.
Definition: layer.cpp:241
Layer::setLearningRate
void setLearningRate(double _learningRate)
Sets the learning rate of all neurons.
Definition: layer.cpp:200
Layer
Layer which contains the neurons of one layer.
Definition: layer.h:169
Neuron::WeightInitMethod
WeightInitMethod
Constants how to init the weights in the neuron.
Definition: neuron.h:71
Layer::calcOutputs
void calcOutputs()
Calculates the output values in all neurons.
Definition: layer.cpp:64
Layer::saveWeightMatrix
int saveWeightMatrix(char *filename)
Save weight matrix for documentation and debugging.
Definition: layer.cpp:311
Neuron::getOutput
double getOutput()
Gets the output of the neuron.
Definition: neuron.h:118
Layer::getNeuron
Neuron * getNeuron(int index)
Gets a pointer to one neuron.
Definition: layer.h:286
Layer::setDecay
void setDecay(double _decay)
Sets the weight decay scaled by the learning rate.
Definition: layer.cpp:218
Neuron::doLearning
void doLearning()
Performs the learning Performs ICO learning in the neuron: pre * error.
Definition: neuron.cpp:150
Layer::~Layer
~Layer()
Destructor Frees all memory.
Definition: layer.cpp:46
Neuron::doMaxDet
void doMaxDet()
Detects max of an input Switches the highest weight to 1 and the others to 0.
Definition: neuron.cpp:274
Neuron
Neuron which calculates the output and performs learning.
Definition: neuron.h:19
Layer::setMaxDetLayer
void setMaxDetLayer(int _m)
Maxium detection layer.
Definition: layer.h:310
Layer::doNormaliseWeights
void doNormaliseWeights()
Performs the weight normalisation.
Definition: layer.cpp:81
Layer::getNinputs
int getNinputs()
Number of inputs.
Definition: layer.h:299
Layer::setError
void setError(double _error)
Sets the global error for all neurons.
Definition: layer.cpp:179
Neuron::calcOutput
void calcOutput()
Calculate the output of the neuron This runs the filters, activation functions, sum it all up.
Definition: neuron.cpp:47
Layer::initWeights
void initWeights(double _max=1, int initBiasWeight=1, Neuron::WeightInitMethod weightInitMethod=Neuron::MAX_OUTPUT_RANDOM)
Inits the weights.
Definition: layer.cpp:224
Neuron::ActivationFunction
ActivationFunction
Activation functions on offer LINEAR: linear unit, TANH: tangens hyperbolicus, RELU: linear rectifier...
Definition: neuron.h:86
Layer::getNneurons
int getNneurons()
Gets the number of neurons.
Definition: layer.h:294
Layer::setConvolution
void setConvolution(int width, int height)
Defines a 2D geometry for the input layer of widthxheight.
Definition: layer.cpp:288
Layer::Layer
Layer(int _nNeurons, int _nInputs)
Constructor.
Definition: layer.cpp:16
Layer::getWeightDistanceFromInitialWeights
double getWeightDistanceFromInitialWeights()
Get weight distance from the start of the simulation.
Definition: layer.cpp:264
Layer::setInputs
void setInputs(double *_inputs)
Sets all inputs from an input array.
Definition: layer.cpp:273
Layer::setErrors
void setErrors(double *_errors)
Sets all errors from an input array.
Definition: layer.cpp:185
Layer::setStep
void setStep(long int step)
Sets the simulation step in the layer for debug purposes.
Definition: layer.cpp:257