⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 neuralnetwork.java

📁 JavaANPR是一个自动车牌识别程序
💻 JAVA
📖 第 1 页 / 共 3 页
字号:
            }
            return Math.sqrt(totalX);
        }
        
        private double vectorAbs(Vector<Double> vector) {
            double totalX = 0;
            for (Double x : vector) totalX += Math.pow(x,2);
            return Math.sqrt(totalX);
        }
        
    }
    
    private double random() {
        return randomGenerator.nextDouble();
    }
    
    private void computeGradient(Gradients gradients, Vector<Double> inputs, Vector<Double> requiredOutputs) {
       //Gradients gradients = new Gradients(this);
       activities(inputs);
       for (int il=this.numberOfLayers()-1; il>=1; il--) { //backpropagation cez vsetky vrstvy okrem poslednej
            NeuralLayer currentLayer = this.getLayer(il);
           
            if (currentLayer.isLayerTop()) { // ak sa jedna o najvyssiu vrstvu
                // pridame gradient prahov pre danu vrstvu do odpovedajuceho vektora a tento gradient pocitame cez neurony : 
                //gradients.thresholds.add(il, new Vector<Double>()); 
                for (int in=0; in<currentLayer.numberOfNeurons(); in++) { // pre vsetky neurony na vrstve
                    Neuron currentNeuron = currentLayer.getNeuron(in);
                    gradients.setThreshold(il, in, 
                            currentNeuron.output * (1 - currentNeuron.output) * (currentNeuron.output - requiredOutputs.elementAt(in))
                            );
                } // end for each neuron

                for (int in=0; in<currentLayer.numberOfNeurons(); in++) { // for each neuron
                    Neuron currentNeuron = currentLayer.getNeuron(in);
                    for (int ii=0; ii<currentNeuron.numberOfInputs(); ii++) { // for each neuron's input
                        NeuralInput currentInput = currentNeuron.getInput(ii);
                        gradients.setWeight(il,in,ii,
                                gradients.getThreshold(il,in) * currentLayer.lowerLayer().getNeuron(ii).output    
                                );
                    } // end for each input
                } // end for each neuron
                
            } else { // ak sa jedna o spodnejsie vrstvy (najnizsiu vrstvu nepocitame, ideme len po 1.) 
                // pocitame gradient prahov :
                //gradients.thresholds.add(il, new Vector<Double>()); 
                for (int in=0; in<currentLayer.numberOfNeurons();in++) { // for each neuron
                    double aux = 0;
                    // iterujeme cez vsetky axony neuronu (resp. synapsie neuronov na vyssej vrstve)
                    for (int ia=0; ia<currentLayer.upperLayer().numberOfNeurons(); ia++) { 
                        aux += gradients.getThreshold(il+1,ia) * 
                               currentLayer.upperLayer().getNeuron(ia).getInput(in).weight;
                    }
                    gradients.setThreshold(il,in,
                            currentLayer.getNeuron(in).output * (1 - currentLayer.getNeuron(in).output) * aux
                            );
                } //end for each neuron
                
                // pocitame gradienty vah : 
                for (int in=0; in<currentLayer.numberOfNeurons(); in++) { // for each neuron
                    Neuron currentNeuron = currentLayer.getNeuron(in);
                    for (int ii=0; ii<currentNeuron.numberOfInputs(); ii++) { // for each neuron's input
                        NeuralInput currentInput = currentNeuron.getInput(ii);
                        gradients.setWeight(il, in, ii,
                                gradients.getThreshold(il,in) * currentLayer.lowerLayer().getNeuron(ii).output
                        );    
                    } // end for each input
                } // end for each neuron
               
            } // end layer IF
            
        } // end backgropagation for each layer
        //return gradients;
    }
   
    private void computeTotalGradient(Gradients totalGradients, Gradients partialGradients, SetOfIOPairs trainingSet) {
        // na zaciatku sa inicializuju gradienty (total)
        totalGradients.resetGradients();
        //partialGradients.resetGradients();
        //Gradients totalGradients = new Gradients(this);
        //Gradients partialGradients = new Gradients(this); /***/
        
        for (SetOfIOPairs.IOPair pair : trainingSet.pairs) { // pre kazdy par trenovacej mnoziny
            //partialGradients = computeGradient(pair.inputs, pair.outputs);
            computeGradient (partialGradients, pair.inputs, pair.outputs);
            for (int il = this.numberOfLayers()-1; il >= 1; il--) { // pre vsetky vrstvy okrem poslednej
                NeuralLayer currentLayer = this.getLayer(il);
                for (int in=0; in<currentLayer.numberOfNeurons(); in++) { // pre vsetky neurony na currentLayer
                    // upravime gradient prahov : 
                    totalGradients.incrementThreshold(il,in,partialGradients.getThreshold(il,in));
                    for (int ii=0; ii<currentLayer.lowerLayer().numberOfNeurons(); ii++) { // pre vsetky vstupy
                        totalGradients.incrementWeight(il,in,ii,partialGradients.getWeight(il,in,ii));
                    }
                }
            
            } // end for layer
        } // end foreach
        //return totalGradients;
    } // end method

    private void adaptation(SetOfIOPairs trainingSet, int maxK, double eps , double lambda , double micro) {
//         trainingSet  : trenovacia mnozina
//         maxK         : maximalny pocet iteracii
//         eps          : pozadovana presnost normovanej dlzky gradientu
//         lambda       : rychlost ucenia (0.1)
//         micro        : momentovy clen
        double delta;
        Gradients deltaGradients = new Gradients(this);
        Gradients totalGradients = new Gradients(this);
        Gradients partialGradients = new Gradients(this);
        
        System.out.println("setting up random weights and thresholds ...");
        
        // prahy a vahy neuronovej siete nastavime na nahodne hodnoty, delta-gradienty vynulujeme (oni sa nuluju uz pri init)
        for (int il = this.numberOfLayers()-1; il >= 1; il--) { // iteracia cez vsetky vrstvy nadol okrem poslednej
            NeuralLayer currentLayer = this.getLayer(il);
            for (int in=0; in<currentLayer.numberOfNeurons(); in++) { // pre kazdy neuron na vrstve
                Neuron currentNeuron = currentLayer.getNeuron(in);
                currentNeuron.threshold = 2*this.random()-1;
                //deltaGradients.setThreshold(il,in,0.0);
                for (int ii = 0; ii < currentNeuron.numberOfInputs(); ii++) {
                    currentNeuron.getInput(ii).weight = 2*this.random()-1;
                    //deltaGradients.setWeight(il,in,ii,0.0);
                } // end ii
            } // end in
        } // end il
         
        int currK = 0; // citac iteracii
        double currE = Double.POSITIVE_INFINITY; // pociatocna aktualna presnost bude nekonecna (tendencia znizovania)
        
        System.out.println("entering adaptation loop ... (maxK = "+maxK+")");
        
        while ( currK < maxK && currE > eps ) {
            computeTotalGradient(totalGradients,partialGradients,trainingSet);
            for (int il = this.numberOfLayers()-1; il >= 1; il--) { // iteracia cez vsetky vrstvy nadol okrem poslednej
                NeuralLayer currentLayer = this.getLayer(il);            
                
                for (int in=0; in<currentLayer.numberOfNeurons(); in++) { // pre kazdy neuron na vrstve
                    Neuron currentNeuron = currentLayer.getNeuron(in);
                    delta = -lambda * totalGradients.getThreshold(il,in) + micro * deltaGradients.getThreshold(il,in);
                    currentNeuron.threshold += delta;
                    deltaGradients.setThreshold(il,in,delta);
                } // end for ii 1
                
                for (int in=0; in<currentLayer.numberOfNeurons(); in++) { // pre kazdy neuron na vrstve
                    Neuron currentNeuron = currentLayer.getNeuron(in);
                    for (int ii = 0; ii < currentNeuron.numberOfInputs(); ii++) { // a pre kazdy vstup neuronu
                        delta = -lambda * totalGradients.getWeight(il,in,ii) + micro * deltaGradients.getWeight(il,in,ii);
                        currentNeuron.getInput(ii).weight += delta;
                        deltaGradients.setWeight(il,in,ii,delta);
                    } // end for ii
                } // end for in 2
            } // end for il
            
            currE = totalGradients.getGradientAbs(); 
            currK++;
            if (currK%25==0) System.out.println("currK="+currK+"   currE="+currE);
        } // end while
    }
        
    private Vector<Double> activities (Vector<Double> inputs) {
        for (int il=0; il<this.numberOfLayers();il++) { // pre kazdu vrstvu
            for (int in=0; in<this.getLayer(il).numberOfNeurons();in++) { // pre kazdy neuron vo vrstve
                double sum = this.getLayer(il).getNeuron(in).threshold; // sum <- threshold
                for (int ii=0; ii<this.getLayer(il).getNeuron(in).numberOfInputs(); ii++) { // vstupy
                    // vynasobi vahu so vstupom
                    if (il==0) { // ak sme na najspodnejsej vrstve, nasobime vahy so vstupmi
                        sum+=
                        this.getLayer(il).getNeuron(in).getInput(ii).weight *
                        inputs.elementAt(in).doubleValue();
                    } else { // na hornych vrstvach nasobime vahy s vystupmi nizsej vrstvy
                        sum+=
                        this.getLayer(il).getNeuron(in).getInput(ii).weight *
                        this.getLayer(il-1).getNeuron(ii).output;
                    }
                }
                
                // !!! TU SOM ROZLISIL CI SA JEDNA O PRVU VRSTVU :
    //            if (il == 0)
    //                  this.getLayer(il).getNeuron(in).output = sum; // vystup neuronu
    //            else 
                      this.getLayer(il).getNeuron(in).output = this.gainFunction(sum);
                
                //this.getLayer(il).getNeuron(in).output = this.gainFunction(sum); // vystup neuronu
            }
        }
        // nazaver vystupy neuronov najvyssej vrstvy zapiseme do vektora : 
        Vector<Double> output = new Vector<Double>();
        
        for (int i=0; i<this.getLayer(this.numberOfLayers()-1).numberOfNeurons();i++) 
            output.add(this.getLayer(this.numberOfLayers()-1).getNeuron(i).output);
        
        return output;
    }
        
    private double gainFunction (double x) {
        return 1/(1+Math.exp(-x)); 
    }
    
    private NeuralLayer getLayer(int index) {
        return this.listLayers.elementAt(index);
    }
    
    

/*    public void printNeuralNetwork() {
        for (int il=0; il<this.numberOfLayers();il++) {
            System.out.println("Layer "+il);
            for (int in=0; in<this.getLayer(il).numberOfNeurons();in++) {
                System.out.print("      Neuron "+in+ " (threshold="+this.getLayer(il).getNeuron(in).threshold+") : ");
                for (int ii=0; ii<this.getLayer(il).getNeuron(in).numberOfInputs(); ii++) {
                    System.out.print(this.getLayer(il).getNeuron(in).getInput(ii).weight+" ");
                }
                System.out.println();
            }
        }
      }
 */
    
} // end class NeuralNetwork

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -