需要一些调试此Java的帮助

时间:2018-12-11 07:44:58

标签: java eclipse machine-learning neural-network

我一直在从事机器学习课程的作业。我是Java新手,一直在使用Eclipse。逻辑和学习算法并不是我寻求帮助的地方。

但是,具体地说,我在main()中有一个while循环,应该输出一个名为totError的变量。 totError每次循环时都应有所不同(根据变化的参数计算)。但是,我似乎找不到我的代码出了问题的地方,它一直在显示相同的内容。我使用静态变量和方法是否错误?

.java和.txt粘贴在下面(不幸的是,.txt太大,因此我只包含了一小部分,但是数组的尺寸是正确的)。这是相当多的材料,如果有人可以指出正确的方向,我将不胜感激。

谢谢!

package nn;

import java.io.File;
import java.io.FileReader;
import java.io.BufferedReader;
//import java.io.PrintStream;
import java.io.IOException;
import java.lang.Math; 

public class learningBP {
	
	// Declare variables
	static int NUM_INPUTS = 10;		// Including 1 bias
	static int NUM_HIDDENS = 10;	// Including 1 bias
	static int NUM_OUTPUTS = 1;
	static double LEARNING_RATE = 0.1;
	static double MOMENTUM = 0.1;
	static double TOT_ERROR_THRESHOLD = 0.05;
	static double SIGMOID_UB = 1;
	static double SIGMOID_LB = -1;
	
	static double [][] wgtIH = new double[NUM_INPUTS][NUM_HIDDENS];
	static double [][] dltWgtIH = new double[NUM_INPUTS][NUM_HIDDENS];
	static double [][] wgtHO = new double[NUM_HIDDENS][NUM_OUTPUTS];
	static double [][] dltWgtHO = new double[NUM_HIDDENS][NUM_OUTPUTS];
	
	static int NUM_STATES_ACTIONS; 
	
	static String [][] strLUT = new String[4*4*4*3*4*4][2];
	static double [][] arrayLUT = new double[strLUT.length][2];
	static double [][] arrayNormLUT = new double[strLUT.length][2];
	static double [] arrayErrors = new double[strLUT.length];
	static double [] arrayOutputs = new double[strLUT.length];
	static double [] arrayNormOutputs = new double[strLUT.length];
	static double [][] valueInput = new double[strLUT.length][NUM_INPUTS];
	static double [][] valueHidden = new double[strLUT.length][NUM_HIDDENS];
	static double [] dltOutputs = new double[strLUT.length];
	static double [][] dltHiddens = new double[strLUT.length][NUM_HIDDENS];
	
	static double totError = 1;
	static int numEpochs = 0;

	public static void main(String[] args) {
		
		// Load LUT
		String fileName = "/Users/XXXXX/Desktop/LUT.txt";
		try {
			load(fileName);
		}
		catch (IOException e) {
			e.printStackTrace();
		}
		
		// Initialize NN Weights
		initializeWeights();
		
		while (totError > TOT_ERROR_THRESHOLD) {
			
			// Feed Forward
			fwdFeed();
			
			// Back Propagation
			bckPropagation();
			
			// Calculate Total Error
			totError = calcTotError(arrayErrors);
			numEpochs += 1;
		
			System.out.println("Number of Epochs: "+numEpochs);
			System.out.println(totError);
			
		}

	}


	public double outputFor(double[] X) {
		// TODO Auto-generated method stub
		return 0;
	}

	public double train(double[] X, double argValue) {
		// TODO Auto-generated method stub
		return 0;
	}

	public void save(File argFile) {
		// TODO Auto-generated method stub
		
	}

	public static void load(String argFileName) throws IOException {
		
		// Load LUT training set from Part2
        BufferedReader r = new BufferedReader(new FileReader(new File(argFileName)));
        String l = r.readLine();
        try {
	        int a = 0;
	        while (l != null) {
	            String spt[] = l.split("	");
	            strLUT[a][0] = spt[0]; 
	            strLUT[a][1] = spt[1];
	            arrayLUT[a][0] = Double.parseDouble(strLUT[a][0]);
	            arrayLUT[a][1] = Double.parseDouble(strLUT[a][1]);
	            a += 1;
	            l = r.readLine();
	        }
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
        	r.close();
        }
		
        // Normalize LUT to bipolar
        for (int b = 0; b < arrayLUT.length; b++) {
        	arrayNormLUT[b][0] = arrayLUT[b][0];
        	arrayNormLUT[b][1] = sigmoid(arrayLUT[b][1]);
        }
        
	}

	public static double sigmoid(double x) {
		
		// Bipolar sigmoid
		return (SIGMOID_UB - SIGMOID_LB) / (1 + Math.pow(Math.E, -x)) + SIGMOID_LB;
		
	}

	public static void initializeWeights() {
		
		// Initialize weights from input layer to hidden layer
		for (int i = 0; i < NUM_INPUTS; i++) {
			for (int j = 0; j < NUM_HIDDENS; j++) {
				wgtIH[i][j] = Math.random() - 0.5;
				dltWgtIH[i][j] = 0;
			}
		}
		
		// Initialize weights from hidden layer to output layer
		for (int j = 0; j < NUM_HIDDENS; j++) {
			for (int k = 0; k < NUM_OUTPUTS; k++) {
				wgtHO[j][k] = Math.random() - 0.5;
				dltWgtHO[j][k] = 0;
			}
		}
		
	}

	public void zeroWeights() {

		// TODO Auto-generated method stub
		
	}

	public static void fwdFeed() {
		
		for(int z = 0; z < arrayLUT.length; z++) { 
			
			// Normalize between [-1, 1]
			valueInput[z][0] = (Character.getNumericValue(strLUT[z][0].charAt(0)) - 2.5)/1.5; // myX
			valueInput[z][1] = (Character.getNumericValue(strLUT[z][0].charAt(1)) - 2.5)/1.5; // myY
			valueInput[z][2] = (Character.getNumericValue(strLUT[z][0].charAt(2)) - 2.5)/1.5; // myHead
			valueInput[z][3] = Character.getNumericValue(strLUT[z][0].charAt(3)) - 2; // enProx
			valueInput[z][4] = (Character.getNumericValue(strLUT[z][0].charAt(4)) - 2.5)/1.5; // enAngle
			
			// Vectorization of the four possible actions into binaries
			valueInput[z][5] = 0;
			valueInput[z][6] = 0;
			valueInput[z][7] = 0;
			valueInput[z][8] = 0;
		
			int action = Character.getNumericValue(strLUT[z][0].charAt(5)); // action
			valueInput[z][action-1] = 1;
			
			// Apply bias input
			valueInput[z][9] = 1;
			
			// Calculate value for hidden neuron j
			for(int j = 0; j < NUM_HIDDENS-1; j++) {
				valueHidden[z][j] = 0;
				for(int i = 0; i < NUM_INPUTS; i++) {
					valueHidden[z][j] += valueInput[z][i]*wgtIH[i][j];
				}
				valueHidden[z][j] = sigmoid(valueHidden[z][j]);
			}
			
			// Apply bias hidden neuron
			valueHidden[z][9] = 1;
			
			// Calculate value for output neuron
			arrayOutputs[z] = 0;
			for(int j = 0; j < NUM_HIDDENS; j++) {
				arrayOutputs[z] += valueHidden[z][j]*wgtHO[j][0];
			}
			
			arrayNormOutputs[z] = sigmoid(arrayOutputs[z]);		
			arrayErrors[z] = arrayNormOutputs[z] - arrayNormLUT[z][1];
		}
		
	}
	
	public static void bckPropagation() {
		
		for(int z = 0; z < arrayLUT.length; z++) { 
			
			// Delta rule for bipolar sigmoids
			dltOutputs[z] = arrayErrors[z] * (1/2) * (1 + arrayNormLUT[z][1]) * (1 - arrayNormLUT[z][1]);
			
			// Calculate update weights between hidden & output layers
			for(int j = 0; j < NUM_HIDDENS; j++) {
				
				dltWgtHO[j][0] = (LEARNING_RATE * dltOutputs[z] * valueHidden[z][j]) + (MOMENTUM * dltWgtHO[j][0]);
				wgtHO[j][0] += dltWgtHO[j][0];
				
			}	
			
			// Delta rule for bipolar sigmoids
			for(int j = 0; j < NUM_HIDDENS-1; j++) {
				
				dltHiddens[z][j] = (dltOutputs[z] * wgtHO[j][0]) * (1/2) * (1 + valueHidden[z][j]) * (1 - valueHidden[z][j]);
			
				// calculate update weights between input & hidden layers
				for(int i = 0; i < NUM_INPUTS; i++){
					
					dltWgtIH[i][j] = (LEARNING_RATE * dltHiddens[z][j] * valueInput[z][i]) + (MOMENTUM * dltWgtIH[i][j]);
					wgtIH[i][j] += dltWgtIH[i][j];
					
				}
				
			}
			
		}
	}
	
	public static double calcTotError(double [] Ar) {
		
		// Get total error
		double outputTotError = 0;
		for(int z = 0; z < Ar.length; z++) {
			
			outputTotError += Math.pow(Ar[z], 2);
			
		}
		return outputTotError /= 2;
		
	}
	
}

LUT.txt

111111  10.941118079589064
111112  -0.1
111113  0.5562004990848579
111114  1.98907128902595
111121  11.862151157526291
111122  0
111123  -0.38423559443128236
111124  0.2924429882372822
111131  0
111132  0
111133  0
111134  0.12275095886294243
111141  -0.0545618032237386
111142  1.111149754536815
111143  -0.6483940696098076
111144  -0.30397004441336395
111211  8.104946515845224
111212  3.4679914863334447
111213  3.662003985119952
111214  6.277685676457839
111221  12.552710546022281
111222  -0.09099267948190845
111223  -0.29566545023952967
111224  3.1487890500927063
111231  0
111232  0
111233  0
111234  8.934912143040652E-4
111241  3.895126725032672
111242  -0.2010016212971984
111243  0.837429543536912
111244  -0.27663053491694656
111311  11.653951513990371
111312  -0.2946973145089208
111313  -0.2978184448888472
111314  0.8279393778791164
111321  0
111322  0
111323  0
111324  2.2641633761201114
111331  0
111332  0
111333  0
111334  0
111341  0
111342  0
111343  1.1732725059583249
111344  -0.1
112111  5.5359038859179535
112112  0
112113  0
112114  0.0
112121  0.08659995226070327
112122  0.2798072139553114
112123  5.49078110134232
112124  -0.3108745952024568
112131  -0.05965237074923033
112132  0.09253924707369854
112133  -0.4
112134  3.161972099002618
112141  -0.5260766570034812
112142  -0.48090118837156254
112143  -0.7310822755532788
112144  3.486617439631581
112211  0
112212  0
112213  0
112214  0.6522588119326032
112221  0
112222  0
112223  0
112224  0.7460303984847749
112231  0.23736484529821295
112232  0.4052788544857546
112233  0
112234  2.951631100344372
112241  0.5653655679375406
112242  0.4971810465334616
112243  7.402004693866543
112244  -0.30000000000000004
112311  0
112312  0
112313  0
112314  0
112321  0
112322  0
112323  0
112324  0
112331  0
112332  0
112333  0.4151878259768604
112334  1.7724104736042405
112341  0
112342  0
112343  4.069896885464749
112344  -0.4
113111  0
113112  0.022566986598282823
113113  0.08724320758208144
113114  10.05432214824662
113121  1.0564414035161591
113122  -0.29029602924153364
113123  -0.5541038225131591
113124  8.672324872378988
113131  -0.3654234566003739
113132  -0.4
113133  0.5004192669349199
113134  2.078082532119674
113141  0
113142  0
113143  0
113144  1.2525107221533354
113211  0
113212  0.29495695502888564
113213  -0.07529481401595756
113214  -0.2404514421514272
113221  -0.30000000000000004
113222  0.7445615195514395
113223  -0.3658317755666047
113224  8.553656940756902
113231  -0.30000000000000004
113232  4.6010557496650915
113233  -0.3879385840465742
113234  -0.2
113241  0.4326819938548774
113242  0
113243  0
113244  1.1942595427121407
113311  0
113312  0
113313  0.0
113314  -0.30000000000000004
113321  -0.30000000000000004
113322  0
113323  0
113324  0.12628436039474933
113331  0
113332  0
113333  0
113334  1.1990757358685409
113341  0
113342  0
113343  0
113344  0
114111  -0.2620854057619084
114112  4.125854638322618
114113  -0.6357408602214762
114114  -0.3833440478188098
114121  4.151592198100268
114122  0.07881020285589568
114123  0.2470962266586317
114124  -0.614351130314123
114131  0
114132  0
114133  0.137166408235687
114134  -0.0736602283383406
114141  0
114142  1.79455706235276
114143  -0.10778180504389967
114144  -0.1095
114211  4.093099235361004
114212  0.43773368515345285
114213  -0.22722143170688813
114214  -0.47254408375084955
114221  0.9666070656021031
114222  5.3257648197212175
114223  0.8550257571983391
114224  1.7294133618581196
114231  0
114232  0
114233  0.21693098965929433
114234  -0.20056649258727272
114241  0
114242  0
114243  -0.00420789076454664
114244  -0.03980396617148699
114311  -0.14894661319071242
114312  2.8318004984996086
114313  0.09972003835421428
114314  -0.30000000000000004
114321  -0.22014771207852618
114322  3.6613263848490236
114323  -0.961642132911289
114324  -0.37587629822526014
114331  0
114332  0
114333  0
114334  0
114341  0
114342  0
114343  0
114344  0.01029174912920401
121111  8.749283150544025
121112  5.160303436301445
121113  5.492968882659686
121114  5.1300005456187545
121121  9.080296371003485
121122  5.48452094178394
121123  8.364785563964707
121124  8.988905334385453
121131  0
121132  0
121133  3.9657653202217764
121134  -0.1
121141  4.299714795485242
121142  -0.20100940661896582
121143  -0.14475899994010905
121144  0.7735726092109716
121211  8.925285927651668
121212  7.242378809714628
121213  5.825241551756816
121214  7.113455264749147
121221  10.957172410507585
121222  7.914499954045615
121223  8.43670507913828
121224  9.483271725903045
121231  0
121232  0
121233  0
121234  1.0618679323154605
121241  3.916743510589585
121242  -0.30816983215504323
121243  0.18644548962100688
121244  -0.05704324546134821
121311  9.89354840660501
121312  4.1887499584046495
121313  8.597262669988885
121314  4.6709783035857715
121321  0.8690772369609352
121322  0
121323  0.0770114005696081
121324  9.316545509588495
121331  0
121332  0
121333  0
121334  0
121341  0.0017093447236721923
121342  0.303857609787908
121343  -0.09889618686732593
121344  -0.1
122111  12.317344944762887
122112  4.056756806038644
122113  4.301889697884755
122114  1.6336292603910316
122121  12.805920072720827
122122  -0.1
122123  3.139275934028691
122124  2.8599851112573824
122131  0.4897618677858343
122132  0.6379457883752231
122133  0
122134  6.037946488270734
122141  5.226252351525389
122142  1.0456108876298758
122143  1.8110144287556706
122144  2.8484043039272486
122211  0
122212  5.9365660910572355
122213  -0.356430880812722
122214  0.3883930656736807
122221  0
122222  0
122223  0
122224  2.5601609549768254
122231  2.742030684928853
122232  0.7052947819136844
122233  0.9153964145328813
122234  -0.19891214689276654
122241  2.4104530244102085
122242  0.7192765109793244
122243  8.021153776446441
122244  3.0103404208692996
122311  0
122312  0
122313  1.0948198700427376
122314  -0.30000000000000004
122321  0
122322  0
122323  0
122324  0
122331  0.3533903217826514
122332  0
122333  0
122334  0.0
122341  0
122342  0
122343  6.341695880063564
122344  -0.30000000000000004
123111  8.052433181772965
123112  0.9792619096037194
123113  3.516672440488999
123114  5.103076270114251
123121  1.8546248328655348
123122  0.8584792487692742
123123  0.6183544390439952
123124  11.243464701495014
123131  -0.30000000000000004
123132  0.3787696031522099
123133  6.426117479337204
123134  -0.009165556952222887
123141  -0.255325121875
123142  0.27495315025119793
123143  2.0877902664802046
123144  15.052930702691679
123211  0.05705852349544345
123212  0
123213  0
123214  5.37875222439207
123221  0.3908474118126858
123222  3.758510773636247
123223  0.32129323045019553
123224  1.1666325723423803
123231  0.16608740430387875
123232  1.092582878355829
123233  6.1709971842734435
123234  0.4079783880523599
123241  0
123242  0
123243  2.4909735393799455
123244  7.426735638088524
123311  0
123312  0
123313  0
123314  0
123321  0
123322  0
123323  0.0
123324  -0.10940480881250002
123331  -0.2894026739838433
123332  -0.30000000000000004
123333  1.59153165697499
123334  -0.13050043419103746
123341  0
123342  0
123343  0
123344  0.03503747387899257
124111  0.01536784373538258
124112  2.0319110516204235
124113  -0.5351007238413845
124114  5.423726149526063
124121  5.279268970679494
124122  1.2887747592527807
124123  1.476338808270816
124124  0.8760634549082689
124131  8.749427293590882
124132  1.8028833109798756
124133  4.46751446615956
124134  0
124141  0
124142  0
124143  5.207895969023795
124144  -0.006693996387822744
124211  4.896919081267351
124212  1.6589666365699665
124213  1.2924051374223933
124214  2.834166036133037
124221  3.3219101326017206
124222  6.378877489077842
124223  3.0055837770041665
124224  4.333259440220513
124231  0
124232  10.029145559150423
124233  -0.15322286428578097
124234  1.389588741458979
124241  0
124242  0
124243  0.8581674783961017
124244  -0.014356665271187358
124311  9.52845663086399
124312  0.1830253181731127
124313  -0.38641783217239833
124314  -0.007934158255211453
124321  1.099897885667945
124322  0.3860742508792718
124323  -0.19223933164621543
124324  4.627561114895328
124331  0
124332  0
124333  0
124334  3.4671490083453262
124341  0
124342  0
124343  0
124344  0
131111  8.024045705739566
131112  7.395450885011249
131113  7.34118551111899
131114  7.463998590177262
131121  9.790468868675749
131122  8.470483880581282
131123  8.559334084219465
131124  8.262665145631342
131131  0.8393023561186844
131132  13.856552429607193
131133  -0.1
131134  -0.2
131141  7.146732355121131
131142  0.34995120030623894
131143  0.714203905202733
131144  -0.011585014038882235
131211  8.249500713722194
131212  8.767804646121553
131213  7.4337276444851845
131214  7.863781130447144
131221  11.376634822466341
131222  9.762504768266828
131223  9.045877477702753
131224  9.101866405278303
131231  0
131232  0
131233  0
131234  4.339000531841493
131241  -0.046717703296808556
131242  0.3795134364823679
131243  6.709654432425106
131244  0.30231549762710314
131311  6.295576700530438
131312  4.959411072704535
131313  7.929806083201738
131314  8.27006166340405
131321  9.137480438896842
131322  10.941755768675854
131323  2.0323003680608407
131324  9.367550766664452
131331  0
131332  0
131333  0
131334  1.0903289049329359
131341  0
131342  0
131343  -0.4
131344  -0.30000000000000004
132111  9.211870190073093
132112  9.009145337737923
132113  13.662402663548175
132114  6.08052345303469
132121  14.090706571185123
132122  1.4442538633817947
132123  5.682703466108649
132124  4.8938515263733855
132131  1.4877735210912775
132132  9.30522865122774
132133  0.0318343841656952
132134  1.8480189381690852
132141  7.189335770973256
132142  1.8391897007293547
132143  1.6185574252944825
132144  0.8032365755284001
132211  2.9593175512497805
132212  4.20604074878555
132213  10.956774013115046
132214  4.6765698627147
132221  0.6988667426437374
132222  0
132223  0
132224  13.456522948558092
132231  0.015779467282452043
132232  0.9558694444048597
132233  0.37106484047615385
132234  6.914228700084009
132241  4.971992528222719
132242  3.1312714167352507
132243  8.624854885007707
132244  1.9690299149897512
132311  0.9322106143642612
132312  0
132313  6.048165247943566
132314  -0.1
132321  0
132322  0
132323  0
132324  0
132331  0
132332  0
132333  1.7812757310121436
132334  -0.2
132341  6.9800782631704825
132342  0
132343  1.1405076368653055
132344  0.0
133111  2.7446434363740746
133112  0.20417876292279885
133113  12.352028198701907
133114  2.7584880349494005
133121  3.6546886422448086
133122  1.4213014104348514
133123  2.0020456737506995
133124  8.856345983327792
133131  0.6794179510815772
133132  0.219079416550598
133133  0
133134  7.754243885196628
133141  4.633501397780967
133142  3.3022778075041295
133143  10.126314309212686
133144  10.670462523862575
133211  0
133212  0
133213  0
133214  5.401263664154471E-4
133221  0
133222  5.917541600023379
133223  -0.09523094160963619
133224  -0.0633133852715455
133231  5.741055254987753
133232  1.992582411513877
133233  7.324611559468452
133234  -0.1
133241  8.135968999779156
133242  3.34903378355902
133243  2.9436918343623195
133244  0.9657874603480346
133311  0
133312  0
133313  0
133314  0
133321  0
133322  0
133323  0.0
133324  -0.08008210792982286
133331  4.23485697965928
133332  0.3634856886870832
133333  0.4337244606279447
133334  0.37220233439540457
133341  0.0
133342  2.0698651827806454
133343  0
133344  0.20782837601618498
134111  4.862755320979474
134112  1.3392607897203783
134113  1.003310218292519
134114  2.727835278711853
134121  3.1798247364903314
134122  3.710423080785619
134123  3.3932862839128184
134124  5.5698948817863805
134131  2.89261297607867
134132  10.185839908552962
134133  0
134134  2.201704835148149
134141  0
134142  0.4849521717263965
134143  10.351931187155113
134144  -0.004942530989460344
134211  4.88773911022146
134212  6.444319172269958
134213  2.7802208073327925
134214  3.0237956197083213
134221  5.850299167989212
134222  7.036812529384287
134223  4.942573696152268
134224  5.761153646214285
134231  2.6544942622894556
134232  11.605858048542924
134233  5.6064170248746965
134234  8.616208477293307
134241  0.5537808537973408
134242  0
134243  0
134244  0
134311  0
134312  5.276610692651878
134313  0
134314  5.274909137322581
134321  1.6600651250107477
134322  0.5785558338058275
134323  1.1548813326330682
134324  5.408164629390569
134331  0.48976275692068205
134332  0.71518263753347
134333  0
134334  8.081570353851838
134341  0
134342  0
134343  0
134344  0
141111  5.948277601471267
141112  5.580433897660083
141113  4.335962359955721
141114  7.378389360258941
141121  6.8378840369220715
141122  11.252245511122066
141123  6.3394642008195845
141124  6.627698306655345
141131  5.552899354859627
141132  0.3829412757396376
141133  -0.2440080423747796
141134  1.055781986224518
141141  0
141142  0
141143  0
141144  6.353183616160064
141211  5.2327892874047235
141212  7.854843130852267
141213  5.884106560048812
141214  6.800473428331975
141221  9.071145157127262
141222  11.574766197233663
141223  6.290481676662974
141224  9.299059401358825
141231  0
141232  9.80189282485124
141233  -0.007605341238756391
141234  -0.4
141241  0
141242  0
141243  0.9237661773381399
141244  0
141311  1.4586780837889592
141312  7.140776456094764
141313  1.9850070229529555
141314  1.2037337389541318
141321  1.4126807572611515
141322  9.727712475035863
141323  -0.5715699148990985
141324  0.9990128149500621
141331  0
141332  0
141333  0
141334  2.0928080889476925
141341  0
141342  0
141343  0
141344  0
142111  5.176112734824237
142112  9.310898135770518
142113  6.272944130850734
142114  4.85652932212051
142121  18.9861856358027
142122  0.48535379580971055
142123  1.3410418088405798
142124  0.7012724204789096
142131  0
142132  0.46602663211087675
142133  0
142134  7.763132942907696
142141  7.1451637374517745
142142  1.1813916287940769
142143  0.5080411275073015
142144  0.2099857144047121
142211  14.38398167054746
142212  5.865852999474187
142213  2.7981186847043444
142214  4.326028222423406
142221  2.9259450322368754
142222  1.3955420673773145
142223  1.2479303376142
142224  12.145353003593861
142231  0
142232  0
142233  4.11860558585335
142234  -0.281730499166373
142241  7.2980459091909
142242  2.2546632889784823
142243  2.6928001665382633
142244  1.747656855892581
142311  0.33344431480349357
142312  -0.30000000000000004
142313  3.997272987649369
142314  -0.34434248274115264
142321  0
142322  0
142323  0
142324  0
142331  0
142332  0
142333  0
142334  0
142341  5.080853787127418
142342  -0.0522396963685936
142343  -0.16966278003909963
142344  0.11581097004622792
143111  0.8060995308878643
143112  3.4807096194086338
143113  0.15996138970136284
143114  -0.3700000000000001
143121  5.252611065308498
143122  0.33520650345235325
143123  1.810255547380127
143124  -0.02128572790438872
143131  5.523849271889849
143132  1.9779339774326394
143133  2.3413393734869095
143134  1.9244744869609307
143141  1.5354633370895308
143142  1.431729411137162
143143  12.520456886246334
143144  1.3704881386676198
143211  0
143212  0
143213  0
143214  0.2717994588008334
143221  0
143222  0
143223  0
143224  0.5013810976691088
143231  3.705095033518279
143232  2.9690709686931953
143233  3.8534369259006267
143234  6.493995243377973
143241  8.755735366770711
143242  2.5287558198755558
143243  1.9005228674898758
143244  1.8360092622628281
143311  0
143312  0
143313  0
143314  0
143321  0
143322  0
143323  0
143324  0
143331  0.03535922176756343
143332  1.042466465501525
143333  5.296100495848561
143334  0.0
143341  -0.36460422697606937
143342  -0.5544459314705894
143343  1.6161031040853104
143344  -0.56715
144111  4.9863752124461715
144112  0.23677132165254763
144113  1.3645437953108817
144114  0.2345035589216279
144121  3.49646595337643
144122  3.3387132946304985
144123  3.3151415498661585
144124  5.980891028602716
144131  3.745019443172936
144132  3.0175640432822015
144133  1.3877368403254533
144134  10.132639568845056
144141  4.24513216193431
144142  0
144143  0
144144  5.307882826693992
144211  0
144212  3.3917096260892348
144213  1.4411098495664865
144214  5.613736025061051
144221  5.09037993149875
144222  5.345726724782964
144223  4.719656046182518
144224  7.314854164438077
144231  2.4673898391086078
144232  2.542342553449884
144233  2.9719213080146414
144234  9.784906071698774
144241  0
144242  0
144243  0
144244  5.029759437397144
144311  0
144312  0
144313  0
144314  1.7790871807684945
144321  0.19771733139514697
144322  7.544140628315534
144323  0.22228235002623958
144324  -0.08751707198429494
144331  -0.424804318976436
144332  -0.21848713700026579
144333  -0.1
144334  3.489670146770858
144341  0
144342  0
144343  0
144344  0
211111  6.172071608975783
211112  0.635976130543352
211113  0.3416758120645408
211114  0.693740736429633
211121  0
211122  5.3303604871812125
211123  0.35271945800462623
211124  -0.30103705678860593
211131  0.07396936449046458
211132  6.15340818360405
211133  0
211134  0.010273907221470832
211141  3.0803140326598055
211142  0.2275299634398884
211143  0.4618967137892429
211144  0.23198798655821407
211211  8.291816299515194
211212  0.5368887805221179
211213  2.155664711179823
211214  2.489889613952935
211221  8.64516786017038

1 个答案:

答案 0 :(得分:0)

我发现了以下技术和算法/数学缺陷:

技术问题:

(1/2)替换0.5,因为(1/2)会产生0(在Java中,除数或除数或两者都必须是双数,因此结果是double,否则为int)。 bckPropagation()中有两次出现。

数学问题1:

考虑Delta规则(例如http://users.pja.edu.pl/~msyd/wyk-nai/multiLayerNN-en.pdf)和带动量的Delta规则(例如http://ecee.colorado.edu/~ecen4831/lectures/deltasum.html),似乎与dltOutputs[z]有关的符号错误。替换为bckPropagation()

dltOutputs[z] = arrayErrors[z] * (1/2) * (1 + arrayNormLUT[z][1]) * (1 - arrayNormLUT[z][1]);

使用

dltOutputs[z] = -arrayErrors[z] * 0.5 * (1 + arrayNormLUT[z][1]) * (1 - arrayNormLUT[z][1]);

数学问题2 (我不确定,但我认为这是一个错误): 测试用例z的权重可能仅取决于当前时期和所有先前时期(由于while循环)的测试用例z的数据。当前,在bckPropagation()中,测试用例z的权重还包含所有先前的权重 当前时代和所有先前时代(同样由于while循环)的测试用例z'wgtIH[z][i][j]和wgtHO[z][j][0]引入3作为维度。现在,权重的贡献是针对每个测试用例z彼此隔离的测试用例z。为此,必须进行以下修改:

1)定义:

static double [][][] wgtIH = new double[strLUT.length][NUM_INPUTS][NUM_HIDDENS];
static double [][][] wgtHO = new double[strLUT.length][NUM_HIDDENS][NUM_OUTPUTS];

2)初始化:

public static void initializeWeights() {
    for(int z = 0; z < arrayLUT.length; z++) { 
        // Initialize weights from input layer to hidden layer
        double rndWgtIH = Math.random() - 0.5;
        for (int i = 0; i < NUM_INPUTS; i++) {
            for (int j = 0; j < NUM_HIDDENS; j++) {
                wgtIH[z][i][j] = rndWgtIH;    
                dltWgtIH[i][j] = 0;
            }
        }
        // Initialize weights from hidden layer to output layer
        double rndWgtHO = Math.random() - 0.5;
        for (int j = 0; j < NUM_HIDDENS; j++) {
            for (int k = 0; k < NUM_OUTPUTS; k++) {
                wgtHO[z][j][k] = rndWgtHO;
                dltWgtHO[j][k] = 0;
            }
        }
    }
}

3)fwdFeed()-和bckPropagation()-方法:

在两种方法中,wgtIH[i][j]wgtHO[j][k]都必须分别用wgtIH[z][i][j]wgtHO[z][j][k]替换。

示例:根据历时的总误差的发展

 LEARNING_RATE = 0.4, MOMENTUM = 0.4, TOT_ERROR_THRESHOLD = 1;

 Number of Epochs: 1
 178.54336668545102
 Number of Epochs: 10000
 15.159692746944888
 Number of Epochs: 20000
 10.653887138186896
 Number of Epochs: 30000
 8.669183516487523
 Number of Epochs: 40000
 7.504963842773336
 Number of Epochs: 50000
 6.723327476195474
 Number of Epochs: 60000
 6.153237046947662
 Number of Epochs: 70000
 5.7133602902880325
 Number of Epochs: 80000
 5.360053126719502
 Number of Epochs: 90000
 5.06774284345891
 Number of Epochs: 100000
 4.820373442353342
 Number of Epochs: 200000
 3.4647965464740746
 Number of Epochs: 300000
 2.8350276017589153
 Number of Epochs: 400000
 2.4398876881673557
 Number of Epochs: 500000
 2.158533606426507
 Number of Epochs: 600000
 1.9432229058177424
 Number of Epochs: 700000
 1.770444540122524
 Number of Epochs: 800000
 1.627115257304848
 Number of Epochs: 900000
 1.5053344819279666
 Number of Epochs: 1000000
 1.4000233082047084
 Number of Epochs: 1100000
 1.3077427523972092
 Number of Epochs: 1200000
 1.2260577251537967
 Number of Epochs: 1300000
 1.153175740062673
 Number of Epochs: 1400000
 1.0877325511159377
 Number of Epochs: 1500000
 1.0286600703077815
 Duration: 822.8203290160001s -> approx. 14min

正如预期的那样,由于神经网络的学习进度,总误差在每个时期之间都会减小。