⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 neuron.java

📁 Single-layer neural networks can be trained using various learning algorithms. The best-known algori
💻 JAVA
字号:
import java.util.*;class Neuron{    public static double momentum = 0.0;    public static double learningRate = 0.05;    double output; // range from 0.0 to 1.0    double sum;    double delta;    Vector inlinks;    Vector outlinks;    String label;    public Neuron(String l)    {		output   = 0.0;		delta    = 0.0;		sum      = 0.0;		inlinks  = new Vector();		outlinks = new Vector();		label    = new String(l);    }	    public double getOutput()    {		return output;  // Sigmoid(sum)    }	    public double getThresholdedOutput()    {		//if (Perceptron.OPTIMAL == Perceptron.algorithm) {		//	if (output >= 0) return 1.0;		//	else return 0.0;		//}		//else {			if (output >= 0.5) return 1.0;			else return 0.0;			//}    }	    public double getLinearOutput()    {		return sum;	    // weighted sum    }	    public double getDelta()    {		return delta;    }	    public void computeOutput(Sample sample_my)    {		Enumeration e = inlinks.elements();		Synapse s;		sum = 0.0;		if (Perceptron.SVM == Perceptron.algorithm) {			Enumeration samples = InputSpaceCanvas.points.elements();			while (samples.hasMoreElements()) {				Sample sample_j = (Sample) samples.nextElement();				sum += sample_j.alpha * sample_j.o_out * Layer.K(sample_my.in, sample_j.in);			}			Enumeration tns = Perceptron.threshold_neuron.outlinks.elements();			Synapse tnsyn = (Synapse)tns.nextElement();			sum -= tnsyn.weight;			output = sum;		}		else {			while(e.hasMoreElements())		    {				s = (Synapse)e.nextElement();				double from = s.from.getOutput();				double add = from * s.getWeight(); 				//System.out.println("add:" + add + " from:" + from);				sum += add;		    }		}		//System.out.println();		//This is the pocket algorithm		if (Perceptron.OPTIMAL == Perceptron.algorithm) {			output = sum;			//System.out.println("output:" + output);		}		else if(Perceptron.SVM == Perceptron.algorithm) {			output = sum;		}		else if (Perceptron.algorithm == Perceptron.POCKET) {		    // We use the threshold fonction		    if (sum >= 0.5) {				output = 1.0;		    }		    else {				output = 0.0;		    }		}		else {			output = 1.0/(1.0 + Math.exp(-sum)); // sigmoid function		}    }	    public void computeBackpropDelta(double d) // for an output neuron    {		delta = (d - output) * output * (1.0 - output);    }	    public void computeBackpropDelta() // for a hidden neuron    {		double errorSum = 0.0;		Synapse synapse;		Enumeration e = outlinks.elements();		while(e.hasMoreElements())	    {			synapse = (Synapse)e.nextElement();			errorSum += synapse.to.delta * synapse.getWeight();	    }		delta = output * (1.0 - output) * errorSum;    }	    public void computePerceptronDelta(double d)    {		delta = (d - output);  // delta in [0,1]    }	    public void computeOptimalDelta(double d)    {		delta = (d - output);  // delta in [0,1]    }	    public void computeAdalineDelta(double d) // for an output neuron    {		delta =  d - 0.5 * sum - 0.5; //2.0 *d - 1.0 - sum; //    }	    public void computeWeight()    {		Synapse synapse;		Enumeration e = inlinks.elements();		while(e.hasMoreElements())		{			synapse = (Synapse)e.nextElement();			synapse.data = learningRate*delta*synapse.from.getOutput()			    + momentum*synapse.data;			synapse.weight += synapse.data;		}    }	    public void print()    {		System.out.print(label+"="+output+": ");		Synapse synapse;		Enumeration e = outlinks.elements();		while(e.hasMoreElements())		{			synapse = (Synapse)e.nextElement();			System.out.print(synapse.to.label+"("+synapse.weight+") ");		}		System.out.println("");    }}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -