Hi, I've been coding a small Neural Network lib, and it worked upto the point where I coded backprop 'training'. The neural net is a standard 3 layer feed forward network. (Input Layer->Hidden Layer->Output Layer.) All easy to grasp as I wasn't going for extreme effiency, just trying to get it running atm. Here's the code,
using namespace std;
double rndn( double from,double fin)
{
double rndr =(double) rand()/RAND_MAX;
return from+(fin-from)*rndr;
};
double rndn()
{
return (double)rand()/RAND_MAX;
};
double sigmoid(double netinput, double response=1)
{
return ( 1.0 / ( 1.0 + exp(-netinput / response)));
}
class Neuron
{
public:
~Neuron()
{}
Neuron(){
ic=0;
oc=0;
output=0;
echoOut=false;
};
void modWeight( int connection,double val){
w[connection]+=val;
};
void connectNeuron( Neuron *to){
out.push_back( to );
oc++;
to->addInput( (Neuron *)this);
// cout<<"Connected Neuron"<<endl;
// cout<<"Oc:"+oc<<endl;
// cout<<"ic:"+to->ic;
};
void init(){
oc=0;
ic=0;
for(int j=0;j<255;j++)
{
w[j]=-1+(rndn()*2);
};
output =0;
};
double getWeight( int connection =0){
return w[connection];
};
void sumInputs()
{
double weight =0;
for(int i=0;i<ic;i++)
{
weight += w * in->getOutput() ;
};
// weight+= w[ic]*-1;
// if(output>w[ic]*-1){
output =sigmoid(weight,-1);
// }else{
// output=0;
//};
};
void fire(){
};
void setOutput( double noutput)
{
output=noutput;
};
double getOutput()
{
if(echoOut==true){
cout<<"Echo Neuron called."<<endl;
cout<<"Output is:"<<output<<endl;
};
return output;
};
void incInput(){
ic++;
};
void addInput( Neuron *inN){
in.push_back( inN );
ic++;
cout <<"Added input>"<<ic<<endl;
};
void echoOutput( bool enable=true){
echoOut=enable;
};
double getErr(){
return err;
};
void setErr( double val){
err=val;
};
double getDelta(){
return delta;
};
void setDelta(double ndelta){
delta = ndelta;
};
private:
vector<Neuron *>in;
vector<Neuron *>out;
double w[255];
double output;
int ic,oc;
bool echoOut;
double err;
double delta;
};
// TODO (Antony#1#): Try increasing network by adding random neurons in areas of high activitity.
class NInput
{
public:
void addWatch(){
dat->echoOutput(true);
};
void value(double input)
{
dat->setOutput( input );
}
Neuron *getNeuron(){
return dat;
};
void setNeuron(Neuron *an){
dat = an;
};
protected:
Neuron *dat;
};
class NOutput
{
public:
double value()
{
return dat->getOutput();
}
Neuron *getNeuron(){
return dat;
};
void setNeuron(Neuron *an){
dat = an;
};
void target( double val){
desired = val;
};
double getTarget(){
return desired;
};
protected:
Neuron *dat;
const char *name;
double desired;
};
class NeuralNet
{
public:
NeuralNet(){
ic=oc=hc=0;
};
~NeuralNet(){
};
void addHiddenLayer( int neurons=8)
{
for(int j=0;j<neurons;j++)
{
hidden.push_back( new Neuron);
//hidden[j]->init();
};
hc=neurons;
};
NInput* addInput()
{
NInput *out = new NInput;
out->setNeuron( new Neuron);
out->getNeuron()->init();
inputs.push_back( out );
ic++;
return out;
};
NOutput *addOutput()
{
NOutput *out = new NOutput;
out->setNeuron( new Neuron);
out->getNeuron()->init();
outputs.push_back( out );
oc++;
return out;
};
void connectNetwork()
{
for(int j=0;j<ic;j++)
{
for(int k=0;k<hc;k++)
{
Neuron *neuron;
Neuron *hneuron = hidden[k];
NInput *tempi = inputs[j];
neuron = tempi->getNeuron();
neuron->connectNeuron( hneuron );
};
};
for(int j=0;j<hc;j++)
{
for(int k=0;k<oc;k++)
{
Neuron *neuron = hidden[j];
Neuron *oneuron;
NOutput *tempo=outputs[k];
oneuron = tempo->getNeuron();
neuron->connectNeuron( oneuron );
};
};
};
void cycle(){
//Feed network.
for(int j=0;j<hc;j++){
hidden[j]->sumInputs();
// hidden[j]->modWeight( 0,5 );
};
for(int j=0;j<oc;j++){
outputs[j]->getNeuron()->sumInputs();
};
//Calculate output neurons error.
for(int j=0;j<oc;j++){
double error;
double target=outputs[j]->getTarget();
double actual=outputs[j]->value();
error=(target - actual) * actual * (1 - actual);
//error = actual * ( 1-actual)*(target-actual);
outputs[j]->getNeuron()->setDelta( error );
};
double delta_sum=0;
for(int j=0;j<hc;++j){
delta_sum=0;
for (int k=0;k<oc;++k)
{
delta_sum+=outputs[k]->getNeuron()->getDelta()*outputs[k]->getNeuron()->getWeight( j );
// outputs[k]->getNeuron()->modWeight( j, outputs[k]->getNeuron()->getDelta()*outputs[k]->getNeuron()->getOutput() );
}
//outputs[k
// hidden_weight[middlenode][outputnode]+=delta_output[outputnode]*out_hidden[middlenode];
cout<<"Delta Sum was:"<<delta_sum<<endl;
// delta_hidden[middlenode]=delta_sum*out_hidden[middlenode]*(1-out_hidden[middlenode]);
hidden[j]->setDelta( delta_sum*hidden[j]->getOutput()*(1-hidden[j]->getOutput()) );
};
for(int j=0;j<ic;++j){
for(int k=0;k<hc;++k){
//hidden[k]->modWeight( j,2 );
hidden[k]->modWeight( j,hidden[k]->getDelta()*inputs[j]->getNeuron()->getOutput() );
};
};
//Calculate hidden neurons error.
};
protected:
vector<NInput *> inputs;
int ic;
vector<Neuron *> hidden;
int hc;
vector<NOutput*> outputs;
int oc;
private:
};
#endif // NEURALNET_H
And here's the bit that specifically updates the net and the backprop code. (Cut and pasted from above, not in addition)
void cycle(){
//Feed network.
for(int j=0;j<hc;j++){
hidden[j]->sumInputs();
// hidden[j]->modWeight( 0,5 );
};
for(int j=0;j<oc;j++){
outputs[j]->getNeuron()->sumInputs();
};
//Calculate output neurons error.
for(int j=0;j<oc;j++){
double error;
double target=outputs[j]->getTarget();
double actual=outputs[j]->value();
error=(target - actual) * actual * (1 - actual);
//error = actual * ( 1-actual)*(target-actual);
outputs[j]->getNeuron()->setDelta( error );
};
double delta_sum=0;
for(int j=0;j<hc;++j){
delta_sum=0;
for (int k=0;k<oc;++k)
{
delta_sum+=outputs[k]->getNeuron()->getDelta()*outputs[k]->getNeuron()->getWeight( j );
// outputs[k]->getNeuron()->modWeight( j, outputs[k]->getNeuron()->getDelta()*outputs[k]->getNeuron()->getOutput() );
}
//outputs[k
// hidden_weight[middlenode][outputnode]+=delta_output[outputnode]*out_hidden[middlenode];
cout<<"Delta Sum was:"<<delta_sum<<endl;
// delta_hidden[middlenode]=delta_sum*out_hidden[middlenode]*(1-out_hidden[middlenode]);
hidden[j]->setDelta( delta_sum*hidden[j]->getOutput()*(1-hidden[j]->getOutput()) );
};
for(int j=0;j<ic;++j){
for(int k=0;k<hc;++k){
//hidden[k]->modWeight( j,2 );
hidden[k]->modWeight( j,hidden[k]->getDelta()*inputs[j]->getNeuron()->getOutput() );
};
};
};
Am I doing something wrong? As is the outputs NEVER change, no matter how much I fiddle with the hidden layer's weights.