Feedforward Closedloop Learning
fcl.h
1 #ifndef __FEEDFORWARD_CLOSEDLOOP_LEARNING_H_
2 #define __FEEDFORWARD_CLOSEDLOOP_LEARNING_H_
3 
4 #include "fcl/globals.h"
5 #include "fcl/layer.h"
6 #include "fcl/neuron.h"
7 #include "fcl/bandpass.h"
8 
9 #include <stdio.h>
10 #include <stdlib.h>
11 #include <math.h>
12 #include <assert.h>
13 
14 
31 
32 public:
33 
40  int num_of_inputs,
41  int* num_of_neurons_per_layer_array,
42  int _num_layers
43  );
44 
49 
54  void doStep(double* input, double* error);
55 
58  void doStep(double* input, int n1, double* error, int n2);
59 
64  double getOutput(int index) {
65  return layers[num_layers-1]->getOutput(index);
66  }
67 
71  void setLearningRate(double learningRate);
72 
76  void setLearningRateDiscountFactor(double _learningRateDiscountFactor) {
77  learningRateDiscountFactor = _learningRateDiscountFactor;
78  }
79 
83  void setDecay(double decay);
84 
88  void setMomentum(double momentum);
89 
93  void setActivationFunction(Neuron::ActivationFunction _activationFunction);
94 
100  void initWeights(double max = 0.001,
101  int initBias = 1,
102  Neuron::WeightInitMethod weightInitMethod = Neuron::MAX_OUTPUT_RANDOM);
103 
107  void seedRandom(int s) { srand(s); };
108 
112  void setBias(double _bias);
113 
117  int getNumLayers() {return num_layers;};
118 
123  Layer* getLayer(int i) {assert (i<=num_layers); return layers[i];};
124 
128  Layer* getOutputLayer() {return layers[num_layers-1];};
129 
133  int getNumInputs() {return ni;}
134 
138  Layer** getLayers() {return layers;};
139 
143  bool saveModel(const char* name);
144 
148  bool loadModel(const char* name);
149 
150 
151 
152 private:
153  int ni;
154  int* n;
155  int num_layers;
156 
157  double learningRateDiscountFactor = 1;
158 
159  long int step = 0;
160 
161  Layer** layers;
162 
163  // should be called to relay layer index to the layer
164  void setDebugInfo();
165 
166  void doLearning();
167  void setStep();
168 
169 };
170 #endif
FeedforwardClosedloopLearning::setDecay
void setDecay(double decay)
Sets a typical weight decay scaled with the learning rate.
Layer::getOutput
double getOutput(int index)
Gets the outpuut of one neuron.
Definition: layer.h:278
FeedforwardClosedloopLearning::getLayers
Layer ** getLayers()
Returns all Layers.
Definition: fcl.h:138
FeedforwardClosedloopLearning::setMomentum
void setMomentum(double momentum)
Sets the global momentum for all layers.
FeedforwardClosedloopLearning::getNumInputs
int getNumInputs()
Gets the number of inputs.
Definition: fcl.h:133
FeedforwardClosedloopLearning::setBias
void setBias(double _bias)
Sets globally the bias.
Layer
Layer which contains the neurons of one layer.
Definition: layer.h:169
Neuron::WeightInitMethod
WeightInitMethod
Constants how to init the weights in the neuron.
Definition: neuron.h:71
FeedforwardClosedloopLearning::doStep
void doStep(double *input, double *error)
Performs the simulation step.
FeedforwardClosedloopLearning::getLayer
Layer * getLayer(int i)
Gets a pointer to a layer.
Definition: fcl.h:123
FeedforwardClosedloopLearning::getOutput
double getOutput(int index)
Gets the output from one of the output neurons.
Definition: fcl.h:64
FeedforwardClosedloopLearning::loadModel
bool loadModel(const char *name)
Loads the while network.
FeedforwardClosedloopLearning
Main class of Feedforward Closed Loop Learning.
Definition: fcl.h:30
FeedforwardClosedloopLearning::seedRandom
void seedRandom(int s)
Seeds the random number generator.
Definition: fcl.h:107
FeedforwardClosedloopLearning::saveModel
bool saveModel(const char *name)
Saves the whole network.
FeedforwardClosedloopLearning::FeedforwardClosedloopLearning
FeedforwardClosedloopLearning(int num_of_inputs, int *num_of_neurons_per_layer_array, int _num_layers)
Constructor: FCL without any filters.
FeedforwardClosedloopLearning::setLearningRate
void setLearningRate(double learningRate)
Sets globally the learning rate.
FeedforwardClosedloopLearning::initWeights
void initWeights(double max=0.001, int initBias=1, Neuron::WeightInitMethod weightInitMethod=Neuron::MAX_OUTPUT_RANDOM)
Inits the weights in all layers.
Neuron::ActivationFunction
ActivationFunction
Activation functions on offer LINEAR: linear unit, TANH: tangens hyperbolicus, RELU: linear rectifier...
Definition: neuron.h:86
FeedforwardClosedloopLearning::~FeedforwardClosedloopLearning
~FeedforwardClosedloopLearning()
Destructor De-allocated any memory.
FeedforwardClosedloopLearning::getOutputLayer
Layer * getOutputLayer()
Gets the output layer.
Definition: fcl.h:128
FeedforwardClosedloopLearning::setActivationFunction
void setActivationFunction(Neuron::ActivationFunction _activationFunction)
Sets the activation function of the Neuron.
FeedforwardClosedloopLearning::getNumLayers
int getNumLayers()
Gets the total number of layers.
Definition: fcl.h:117
FeedforwardClosedloopLearning::setLearningRateDiscountFactor
void setLearningRateDiscountFactor(double _learningRateDiscountFactor)
Sets how the learnign rate increases or decreases from layer to layer.
Definition: fcl.h:76