Backpropagation doesn't work properly

I got a problem with learning in Neuronal Networks again.
I tried using backpropagation and created following two classes:

class Neuron {
  float lastinputs[];
  int id;
  float[] weights;
  Neuron(int id_, int lengthweights) {
    id=id_;
    weights=new float[lengthweights];
    try {
      String load[]=loadStrings("weight"+id+".txt");
      for (int i=0; i<weights.length; i++) weights[i]=Float.parseFloat(load[i]);
      String format_controll=load[weights.length-1];
    }
    catch(Exception e) {
      for (int i=0; i<weights.length; i++) weights[i]=random(-1, 1);
      String[] save=new String[weights.length];
      for (int i=0; i<weights.length; i++) save[i]=weights[i]+"";
      saveStrings("weight"+id+".txt", save);
    }
  }
  
  void saveNeuron() {
    String[] save=new String[weights.length];
    for (int i=0; i<weights.length; i++) save[i]=weights[i]+"";
    saveStrings("weight"+id+".txt", save);
  }
  float calculate(float inputs[]) {
    float sum=0;
    lastinputs=inputs;
    for (int i=0; i<weights.length; i++) sum+=inputs[i]*weights[i];
    return sum;
  }
}
class Network {

  int structure[];
  int structure2[];
  float calculationstructure[][];
  Neuron n[];
  Network(int struc[]) {
structure=new int[struc.length];
    structure[0]=0;
    for (int i=1; i<struc.length; i++) structure[i]+=struc[i]+structure[i-1];
    println(structure);
    structure2=struc;
    structure[0]=0;

    n=new Neuron[structure[structure.length-1]];
    for (int i=0; i<structure.length-1; i++) {
      for (int j=structure[i]; j<structure[i+1]; j++) {
        n[j]=new Neuron(j, struc[i]);
      }
    }
  }
  float calculate(float inputs[]) {
    int max=inputs.length;
    for (int i=0; i<structure.length; i++) if (max<structure2[i]) max=structure2[i];
    float[][] layeroutput=new float[structure.length][max];
    layeroutput[0]=inputs;
    for (int i=0; i<structure.length-1; i++) {
      for (int j=structure[i]; j<structure[i+1]; j++) {
        //n[j]=new Neuron(j, struc[i]);
        layeroutput[i+1][j-structure[i]]=n[j].calculate(layeroutput[i]);
      }
    }
    calculationstructure=layeroutput;
    return layeroutput[structure.length-1][0];
  }

  void restart() {
    for (int i=0; i<n.length; i++) for (int j=0; j<n[i].weights.length; j++) n[i].weights[j]=random(-1, 1);
  }
void backPropagation(float learning_rate, float inp[], float correct_answer) {
    float error=abs(correct_answer-calculate(inp))/2;
    for (int i=structure.length-2; i>1; i--) {
      for (int j=structure[i]; j<structure[i+1]; j++) {
        for (int l=0; l<n[j].weights.length; l++) { 
          n[j].weights[l]-=learning_rate*error*calculationstructure[i-1][j-structure[i]];
          n[j].weights[l]=constrain(n[j].weights[l],1E30,-1E30); 
      }
      }
    }
    for (int i=0; i<n.length; i++) n[i].saveNeuron();
  }
}

If I use the code


Network n;
void setup() {

  n=new Network(new int[]{2, 3, 3, 1});
  n.restart();
  frameRate(100);
  //n.supervisedLearningAuto(new float[][]{{1,1},{1,0},{0,1}},new float[]{0,1,1},0.001);
}
void draw() {
  println();
  n.backPropagation(0.2, new float[]{1, 0}, 1);
  n.backPropagation(0.2, new float[]{1, 1}, 0);
  println(n.calculate(new float[]{1,0}));
  println(n.calculate(new float[]{1,1}));
}

or any other Input for learning it only cycles throug my range and if the learningrate is small enouth it is static at one Point how can I fix this.

This is my source for how to make that!

can you recheck your posted code,
when i try here i get error

The function supervisedLearningCustom(float, float[], int) does not exist.

also can you link where your

backPopagation

comes from?

1 Like

I fixed the Problem with the function.
But what do you mean with
“link where your backPropagation comes from”

1 Like