⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 neuralnet.java

📁 All the tool for build a network able to reconize any shapes. Very complete, a good base for anythi
💻 JAVA
📖 第 1 页 / 共 2 页
字号:
		FeedForward(inputs);
		double[] tempoutputs = new double[layers[nolayers-1].neurons.length];
		for(int i = 0; i < layers[nolayers-1].neurons.length; i++) {
			tempoutputs[i] = layers[nolayers-1].neurons[i].output;
		}
		return tempoutputs;
	}
	
	// calculates a std error for this net using given cross validation patterns 
	public double CrossValErrorRatio (PatternSet patternset) {
		int noofoutputunits = layers[nolayers-1].neurons.length;
		double[] abserrors = new double[noofoutputunits];
		for ( int i = 0; i < noofoutputunits; i++ ) { abserrors[i] = 0; }
		// calculate avg error for each neuron
		double errorratio = 0;
		double[] temp_output = new double[noofoutputunits];
		for (int j = 0; j < patternset.crossvalpatterns.length; j++) {
			temp_output = Output(patternset.crossvalpatterns[j].input);
			for (int i = 0; i < noofoutputunits; i++) {
				abserrors[i] += Math.abs( temp_output[i] - patternset.crossvalpatterns[j].target[i] );
			}
		}
		for (int i = 0; i < noofoutputunits; i++) {
			abserrors[i] /= patternset.crossvalpatterns.length;
			errorratio += ( abserrors[i] / patternset.crossvaldeviations[i] );
		}
		errorratio /= noofoutputunits;
		return errorratio;
	}

	// calculates a std error for this net using given test patterns
	public double TestErrorRatio (PatternSet patternset) {
		int noofoutputunits = layers[nolayers-1].neurons.length;
		double[] abserrors = new double[noofoutputunits];
		for ( int i = 0; i < noofoutputunits; i++ ) { abserrors[i] = 0; }
		// calculate avg error for each neuron
		double errorratio = 0;
		double[] temp_output = new double[noofoutputunits];
		for (int j = 0; j < patternset.testpatterns.length; j++) {
			temp_output = Output(patternset.testpatterns[j].input);
			for (int i = 0; i < noofoutputunits; i++) {
				abserrors[i] += Math.abs( temp_output[i] - patternset.testpatterns[j].target[i] );
			}
		}
		for (int i = 0; i < noofoutputunits; i++) {
			abserrors[i] /= patternset.testpatterns.length;
			errorratio += ( abserrors[i] / patternset.testdeviations[i] );
		}
		errorratio /= noofoutputunits;
		return errorratio;
	}

// Training methods 痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/

	// takes all patterns one by one (with random order) and trains the net
	// using each one.
	public void IncrementalTrainPatterns(Pattern[] patterns, double rate) {
		int patternsnottrained = patterns.length; // no of patterns used
		int patterntotrain;
		int indexofpatterntotrain = -1;
		int counter;
		// turn all "selected" flags off
		for (int i = 0; i < patterns.length; i++) {
			patterns[i].selected = false;
		}
		for (int i = 0; i < patterns.length; i++) {
			patterntotrain = randomizer.random.nextInt(patternsnottrained);
			// find the index of the pattern to train
			counter = 0;
			for (int j = 0; j < patterns.length; j++) {
				if (!patterns[j].selected) {
					if (counter != patterntotrain) {
						counter++;
					}
					else if (counter == patterntotrain) {
						indexofpatterntotrain = j;
						break;
					}
				}
			}
			// train the net using the selected pattern
			IncrementalTrain(rate, patterns[indexofpatterntotrain]);
			patterns[indexofpatterntotrain].selected = true;
			patternsnottrained--;
		}
		
		// turn all "selected" flags off again
		for (int i = 0; i < patterns.length; i++) {
			patterns[i].selected = false;
		}
	}

	// trains the net incrementally.
	public void IncrementalTrain(double rate, Pattern pattern) {
		// feed fw
		FeedForward(pattern.input);
		// train the output layer first
		for (int j = 0; j < layers[nolayers-1].neurons.length; j++) {
			layers[nolayers-1].neurons[j].OutputIncrementalTrain(rate, pattern.target[j]);
		}
		// train hidden layers
		for (int i = nolayers-2; i > 0; i--) {
			for (int j = 0; j < layers[i].neurons.length; j++) {
				layers[i].neurons[j].HiddenIncrementalTrain(rate);
			}
		}
	}
	
	// selects patterns (quantity: nopatterns) randomly and trains the net using those patterns.
	// repeats this until all patterns in the pattern array have been used for training
	public void MinibatchTrainPatterns(Pattern[] patterns, double rate, int nopatterns) {
		int patternsnottrained = patterns.length; // no of patterns used
		if (nopatterns > patterns.length) {nopatterns = patterns.length;}
		if (nopatterns < 1) {nopatterns = 1;}
		int patterntotrain;
		int noofpatternsselected;
		Pattern[] patternsselected;
		int indexofpatterntotrain = -1;
		int[] indexesofpatternstotrain = new int[nopatterns];
		int counter;
		
		// turn all "selected" flags off
		for (int i = 0; i < patterns.length; i++) {
			patterns[i].selected = false;
		}

		while ( patternsnottrained > 0 ) {
			// choose patterns to be used for training and put them in the temp. pattern array
			noofpatternsselected = 0;
			while ( noofpatternsselected < nopatterns && patternsnottrained > 0 ) {
				patterntotrain = randomizer.random.nextInt(patternsnottrained);
				patternsnottrained--;
				// find the index of the pattern to be used
				counter = 0;
				for (int i = 0; i < patterns.length; i++) {
					if (!patterns[i].selected) {
						if (counter != patterntotrain) {
							counter++;
						}
						else if (counter == patterntotrain) {
							indexofpatterntotrain = i;
							break;
						}
					}
				}
				noofpatternsselected++;
				indexesofpatternstotrain[noofpatternsselected-1] = indexofpatterntotrain;
				patterns[indexofpatterntotrain].selected = true;
			}
			// train the net using the temp. pattern array
			patternsselected = null;
			patternsselected = new Pattern[noofpatternsselected];
			for (int i = 0; i < noofpatternsselected; i++) {
				patternsselected[i] = patterns[indexesofpatternstotrain[i]];
			}
			BatchTrainPatterns( patternsselected, rate);
		}
		// turn all "selected" flags off again
		for (int i = 0; i < patterns.length; i++) {
			patterns[i].selected = false;
		}
	}

	// trains the net using batch training
	// takes a number of patterns
	public void BatchTrainPatterns(Pattern[] patterns, double rate) {
		for (int i = 0; i < patterns.length; i++) {
			BatchTrain(rate, patterns[i]);
		}
		// update weights using cumulative values obtained during batch training
		for  ( int i = 0; i < neurons.length; i++ ) {
			neurons[i].BatchUpdateWeights(patterns.length);
		}
	}

	// trains the net using batch training
	// takes only one pattern
	public void BatchTrain(double rate, Pattern pattern) {
		// feed fw
		FeedForward(pattern.input);
		// train the output layer first
		for (int j = 0; j < layers[nolayers-1].neurons.length; j++) {
			layers[nolayers-1].neurons[j].OutputBatchTrain(rate, pattern.target[j]);
		}
		// train hidden layers
		for (int i = nolayers-2; i > 0; i--) {
			for (int j = 0; j < layers[i].neurons.length; j++) {
				layers[i].neurons[j].HiddenBatchTrain(rate);
			}
		}
	}
	
// 痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/痋_/

	// represents an array of neurons belonging to the same layer.
	class Layer {
		Neuron[] neurons;
		// constructs a layer object
		public Layer(int layerno) {
			int counter = 0;
			// see how many neurons there are in this layer
			for (int i = 0; i < NeuralNet.this.neurons.length; i++) {
				if (NeuralNet.this.neurons[i].layer == layerno) {counter++;}
			}
			// create an array of neurons
			this.neurons = new Neuron[counter];
			// place neurons
			counter = 0; 
			for (int i = 0; i < NeuralNet.this.neurons.length; i++) {
				if (NeuralNet.this.neurons[i].layer == layerno) {
					this.neurons[counter++] = NeuralNet.this.neurons[i];
				}
			}
		}
	}

}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -