Feedforward Closedloop Learning
|
26 #define NUM_THREADS 12
28 #ifndef DOXYGEN_SHOULD_SKIP_THIS
49 static void *exec(
void *thr) {
50 reinterpret_cast<LayerThread *
> (thr)->run();
56 static DWORD WINAPI exec(LPVOID thr) {
57 reinterpret_cast<LayerThread *
> (thr)->run();
65 LayerThread(
int _maxNeurons) {
66 maxNeurons = _maxNeurons;
67 neurons =
new Neuron*[maxNeurons];
70 virtual ~LayerThread() {
77 void addNeuron(
Neuron* neuron) {
78 if (nNeurons >= maxNeurons) {
79 fprintf(stderr,
"Not enough memory for threads.\n");
82 neurons[nNeurons] = neuron;
92 if ((ret = pthread_create(&
id, NULL, &LayerThread::exec,
this)) != 0) {
93 fprintf(stderr,
"%s\n",strerror(ret));
98 hThread = CreateThread(
105 if (hThread == NULL) {
116 pthread_join(
id,NULL);
119 WaitForSingleObject(hThread, INFINITE);
124 virtual void run() = 0;
129 class CalcOutputThread :
public LayerThread {
130 using LayerThread::LayerThread;
132 for (
int i=0;i<nNeurons;i++) {
139 class LearningThread :
public LayerThread {
140 using LayerThread::LayerThread;
142 for (
int i=0;i<nNeurons;i++) {
149 class MaxDetThread :
public LayerThread {
150 using LayerThread::LayerThread;
152 for (
int i=0;i<nNeurons;i++) {
179 Layer(
int _nNeurons,
int _nInputs);
191 WEIGHT_NORM_NONE = 0,
192 WEIGHT_NORM_LAYER_EUCLEDIAN = 1,
193 WEIGHT_NORM_NEURON_EUCLEDIAN = 2,
194 WEIGHT_NORM_LAYER_MANHATTAN = 3,
195 WEIGHT_NORM_NEURON_MANHATTAN = 4,
196 WEIGHT_NORM_LAYER_INFINITY = 5,
197 WEIGHT_NORM_NEURON_INFINITY = 6
217 void setError(
int i,
double _error);
238 void setInput(
int inputIndex,
double input);
271 int initBiasWeight = 1,
287 assert(index < nNeurons);
288 return neurons[index];
340 useThreads = _useThreads;
342 fprintf(stderr,
"Thread execution if OFF\n");
364 CalcOutputThread** calcOutputThread = NULL;
365 LearningThread** learningThread = NULL;
366 MaxDetThread** maxDetThread = NULL;
WeightNormalisation
Weight normalisation constants Defines if weights are normalised layer-wide or for every neuron separ...
Definition: layer.h:190
void setActivationFunction(Neuron::ActivationFunction _activationFunction)
Set the activation function.
Definition: layer.cpp:212
double getOutput(int index)
Gets the outpuut of one neuron.
Definition: layer.h:278
void setMomentum(double _momentum)
Set the momentum of all neurons in this layer.
Definition: layer.cpp:206
void setUseThreads(int _useThreads)
Sets if threads should be used.
Definition: layer.h:339
void setBias(double _bias)
Sets the global bias for all neurons.
Definition: layer.cpp:194
void setNormaliseWeights(WeightNormalisation _normaliseWeights)
Normalise the weights.
Definition: layer.cpp:170
double getError(int i)
Retrieves the error.
Definition: layer.cpp:235
void setDebugInfo(int layerIndex)
Sets the layer index within the whole network.
Definition: layer.cpp:248
void doLearning()
Adjusts the weights.
Definition: layer.cpp:133
void setInput(int inputIndex, double input)
Set the input value of one input.
Definition: layer.cpp:241
void setLearningRate(double _learningRate)
Sets the learning rate of all neurons.
Definition: layer.cpp:200
Layer which contains the neurons of one layer.
Definition: layer.h:169
WeightInitMethod
Constants how to init the weights in the neuron.
Definition: neuron.h:71
void calcOutputs()
Calculates the output values in all neurons.
Definition: layer.cpp:64
int saveWeightMatrix(char *filename)
Save weight matrix for documentation and debugging.
Definition: layer.cpp:311
double getOutput()
Gets the output of the neuron.
Definition: neuron.h:118
Neuron * getNeuron(int index)
Gets a pointer to one neuron.
Definition: layer.h:286
void setDecay(double _decay)
Sets the weight decay scaled by the learning rate.
Definition: layer.cpp:218
void doLearning()
Performs the learning Performs ICO learning in the neuron: pre * error.
Definition: neuron.cpp:150
~Layer()
Destructor Frees all memory.
Definition: layer.cpp:46
void doMaxDet()
Detects max of an input Switches the highest weight to 1 and the others to 0.
Definition: neuron.cpp:274
Neuron which calculates the output and performs learning.
Definition: neuron.h:19
void setMaxDetLayer(int _m)
Maxium detection layer.
Definition: layer.h:310
void doNormaliseWeights()
Performs the weight normalisation.
Definition: layer.cpp:81
int getNinputs()
Number of inputs.
Definition: layer.h:299
void setError(double _error)
Sets the global error for all neurons.
Definition: layer.cpp:179
void calcOutput()
Calculate the output of the neuron This runs the filters, activation functions, sum it all up.
Definition: neuron.cpp:47
void initWeights(double _max=1, int initBiasWeight=1, Neuron::WeightInitMethod weightInitMethod=Neuron::MAX_OUTPUT_RANDOM)
Inits the weights.
Definition: layer.cpp:224
ActivationFunction
Activation functions on offer LINEAR: linear unit, TANH: tangens hyperbolicus, RELU: linear rectifier...
Definition: neuron.h:86
int getNneurons()
Gets the number of neurons.
Definition: layer.h:294
void setConvolution(int width, int height)
Defines a 2D geometry for the input layer of widthxheight.
Definition: layer.cpp:288
Layer(int _nNeurons, int _nInputs)
Constructor.
Definition: layer.cpp:16
double getWeightDistanceFromInitialWeights()
Get weight distance from the start of the simulation.
Definition: layer.cpp:264
void setInputs(double *_inputs)
Sets all inputs from an input array.
Definition: layer.cpp:273
void setErrors(double *_errors)
Sets all errors from an input array.
Definition: layer.cpp:185
void setStep(long int step)
Sets the simulation step in the layer for debug purposes.
Definition: layer.cpp:257