Qualia  0.2
NeuralNetwork.h
Go to the documentation of this file.
1 /*
2  * NeuralNetwork.h
3  *
4  * A simple feedforward neural network with one hidden layer.
5  *
6  * This file is part of Qualia https://github.com/sofian/qualia
7  *
8  * (c) 2011 Sofian Audry -- info(@)sofianaudry(.)com
9  * Inspired by code by Karsten Kutza
10  * http://www.ip-atlas.com/pub/nap/nn-src/bpn.txt
11  *
12  * This program is free software: you can redistribute it and/or modify
13  * it under the terms of the GNU General Public License as published by
14  * the Free Software Foundation, either version 3 of the License, or
15  * (at your option) any later version.
16  *
17  * This program is distributed in the hope that it will be useful,
18  * but WITHOUT ANY WARRANTY; without even the implied warranty of
19  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20  * GNU General Public License for more details.
21  *
22  * You should have received a copy of the GNU General Public License
23  * along with this program. If not, see <http://www.gnu.org/licenses/>.
24  */
25 #ifndef NEURAL_NETWORK_INC
26 #define NEURAL_NETWORK_INC
27 
28 #include <stdlib.h>
29 #include <math.h>
30 #include <string.h>
31 
33 #include <qualia/core/common.h>
34 #include <qualia/util/random.h>
35 
36 // TODO: IMPORTANT l'output layer ne devrait pas etre sigmoide mais lineaire...
37 
39 
40 public:
41  // Configurable parameters /////
42 
53  float learningRate;
54 
59 
64  float weightDecay;
65 
66  // Internal use ////////////////
67 
72 
74  struct Layer {
75  unsigned int n; // number of units in this layer
76  real *output; // output of ith unit
77  real *error; // error term of ith unit
78  real *weight; // connection weights to ith unit
79  real *dWeight; // weight derivatives
80  bool linear; // whether the output of the layer is linear (otherwise it's sigmoid)
81  };
82 
84  unsigned int _nParams;
85 
88 
89 public:
90  // Interface ///////////////////
91 
92  // Constructor/destructor.
93  NeuralNetwork(unsigned int nInputs,
94  unsigned int nHiddens,
95  unsigned int nOutputs,
96  float learningRate = 0.01,
97  float decreaseConstant = 0,
98  float weightDecay = 0,
99  bool linearOutput = false);
100  virtual ~NeuralNetwork();
101 
102  // Public methods.
103 
105  virtual void init();
106 
108  virtual unsigned int nInputs() const { return inputLayer.n; }
109 
111  virtual unsigned int nHidden() const { return hiddenLayer.n; }
112 
114  virtual unsigned int nOutputs() const { return outputLayer.n; }
115 
117  virtual unsigned int nParams() const { return _nParams; }
118 
120  virtual float getCurrentLearningRate() const;
121 
122  virtual void setInput(int i, real x);
123  virtual void setInputs(const real *inputs);
124 
125  virtual real getOutput(int i) const;
126  virtual void getOutputs(real *outputs) const;
127 
128  virtual void backpropagate(real *outputError);
129 
130  virtual void propagate();
131 
132  virtual void update();
133 
134  virtual void save(XFile* file);
135  virtual void load(XFile* file);
136 
137 #ifdef DEBUG
138 
139  void printLayer(Layer* layer, Layer* lower);
140 
141  void debug();
142 #endif
143 
144  // Internal ("private") methods.
145  void _allocateLayer(Layer& layer, unsigned int nInputs, unsigned int nOutputs, unsigned int& k, bool isLinear=false);
146  void _deallocateLayer(Layer& layer);
147 
148  void _propagateLayer(Layer& lower, Layer& upper);
149  void _backpropagateLayer(Layer& upper, Layer& lower);
150  void _deallocate();
151 };
152 
153 #endif
154 
155