-
Notifications
You must be signed in to change notification settings - Fork 0
/
BackpropMomentum.java
executable file
·67 lines (56 loc) · 2.11 KB
/
BackpropMomentum.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
public class BackpropMomentum extends Backpropagation implements LearnAlg {
private double lambda=0;
public BackpropMomentum(double learnrateinp, Network network, int maximalCycle, String filename,double momentum) {
super(learnrateinp, network, maximalCycle, filename);
lambda=momentum;
}
public BackpropMomentum(double learnrateinp, Network network, int maximalCycle, String filename) {
super(learnrateinp, network, maximalCycle, filename);
lambda=0.3;
}
public void learn(int num){
success=false;
cycle=0;
double[] TrainOut=net.GetTrainOut(num);
while(!success){
ErrSum=0;
cycle++;
net.SetInput(num);
net.propagate();
Output=net.GetOutput();
double ErrSigDelayed=0;
//Calculating Output Layer errors:
for(int i=0;i<Output.length;i++){
ErrSigDelayed=ErrSig[net.GetLayerCount()-1][i];
Err[i]=TrainOut[i]-Output[i];
ErrSig[net.GetLayerCount()-1][i]=Output[i]*(1-Output[i])*Err[i];
ErrSum+=Math.pow(Err[i], 2);
net.GetLayer(net.GetLayerCount()-1).SetBias(i,(net.GetLayer(net.GetLayerCount()-1).GetBias(i)+learnrate*ErrSig[net.GetLayerCount()-1][i]+lambda*ErrSigDelayed));
}
//Calculating Hidden Layer errors:
for(int i=net.GetLayerCount()-2;i>=0;i--){
for(int k=0;k<net.GetLayer(i).nNeurons();k++){
ErrWeightSum=0;
weightval=0;
for(int m=0;m<net.GetLayer(i+1).nNeurons();m++){
ErrSigDelayed=ErrSig[i+1][m];
ErrWeightSum+=net.GetLayer(i).GetWeight(k, m)*ErrSig[i+1][m]+lambda*ErrSigDelayed;
}
ErrSig[i][k]=net.GetLayer(i).GetNeuron(k)*(1-net.GetLayer(i).GetNeuron(k))*ErrWeightSum;
for(int m=0;m<net.GetLayer(i+1).nNeurons();m++){
weightval=net.GetLayer(i).GetWeight(k,m)+learnrate*net.GetLayer(i).GetNeuron(k)*ErrSig[i+1][m];
net.GetLayer(i).SetWeight(k, m, weightval);
}
net.GetLayer(i).SetBias(k,(net.GetLayer(i).GetBias(k)+learnrate*ErrSig[i][k]+lambda*ErrSigDelayed));
}
}
//ErrSum=Math.pow(ErrSum,1/2);
if(cycle==maxCycle){
success=true;
}
if(CheckResult()){
success=true;
}
}
}
}