📄 neuralnetwork.java
字号:
/*
* NNet.java
*
* Created on 2007��11��5��, ����10:04
*
*/
package neuralNetwork;
import java.util.*;
import myUtil.*;
import java.io.*;
import java.util.regex.*;
/**
* this is a intance of neural network.
* @author yuhui_bear
*/
public class NeuralNetwork {
public Neuron[][] neuralNet_A;
// public ArrayList<ArrayList> neuralNet;
private InputNeuron[] inputLay;
private int[] netTop;
private int branches=0;
/** Creates a new instance of NeuralNetwork
* nettop , how many neuron each layer has
*brach , how many dendrites each neuron has,gate port excepted
*/
public NeuralNetwork (int[] nettop , int branch) {
netTop = nettop;
branches =branch;
neuralNet_A = initNode(netTop,branches);
setupLink(netTop ,branches , neuralNet_A);
solidifyNet();
}
/**
* restore a net form file.
* @param netfile , file stored net.
*/
public NeuralNetwork (File netfile) throws IOException{
int count = 0;
ArrayList<ArrayList<ArrayList<NumberBox>>> neuralWeightList = new ArrayList();
Pattern neuron_mark = Pattern.compile("<Neuron>.*</Neuron>");
Pattern weight_seperator = Pattern.compile("\\s?,\\s?");
Pattern lay_mark = Pattern.compile("<lay>\\s?\\d+");
Pattern randomWight_mark = Pattern.compile("<weight>RANDOM WEIGHT</weight>");
try{
RandomAccessFile inNml = new RandomAccessFile(netfile,"r");
//setup neuron weight list.
String str = inNml.readLine();
while (str != null){
if(lay_mark.matcher(str).matches()){
ArrayList<ArrayList<NumberBox>> layn = new ArrayList();
neuralWeightList.add(layn);
str = inNml.readLine();
while(str!=null && neuron_mark.matcher(str).matches()){
str = str.replaceAll("<[a-zA-Z]+>\\d+<[a-zA-Z]+>","");
str = str.replaceAll("</?[a-zA-Z]+>","");
Scanner scanWeight = new Scanner(str).useDelimiter(weight_seperator);
ArrayList<NumberBox> wal = new ArrayList();
while ( scanWeight.hasNextDouble()){
wal.add(new NumberBox(scanWeight.nextDouble()));
}
layn.add(wal);
str = inNml.readLine();
}
continue;
}
str = inNml.readLine();
}
}catch (IOException ex){
throw ex;
}
neuralNet_A = initNode(neuralWeightList);
setupLink(netTop ,branches , neuralNet_A);
solidifyNet();
}
/**
* get information of the network.
*
* @return ,NetInformation.
*/
public NetInformation getNetInformation(){
return new NetInformation(netTop,branches);
}
public void inputAdapte(double[] inData){
if(inData.length == branches){
for ( int i = 0 ; i< inData.length ; i++){
inputLay[i].input(inData[i]);
}
}
// }else if(inData.length < branches){
// int k=0;
// for ( int i = 0 ; i< branches ; i++){
// inputLay[i].input(inData[k++]);
// if(k >=inputLay.length){
// k=0;
// }
// }
// }
}
/**
* produce neuron.
* @param lay ,double[] ,net top information
* @param dendrites ,dendrites of neuron.
* @return an Arraylist stored neural network.
*/
// to slow!!!!!!!!!!!!!!!!!!!!!!!
// private ArrayList<ArrayList> initNode(int[] lay,int dendrites){
// ArrayList<ArrayList> NNet = new ArrayList();
// RandWeight wrand = new RandWeight(dendrites+1);
// ArrayList<Neuron> layer = new ArrayList();
// int ctno=0;
// for (int n: lay){
// layer = new ArrayList<Neuron>();
// NNet.add(layer);
// for (int i =0 ; i< n; i++) {
// layer.add(new Neuron(wrand.nextWeight(),ctno++));
// }
// }
// // iniate input adapter
// inputLay = new InputNeuron[netTop[0]];
// for ( int i=0; i< netTop[0];i++){
// inputLay[i] = new InputNeuron();
// }
//
// return NNet;
// }
private Neuron[][] initNode(int[] lay,int dendrites){
Neuron[][] NNet = new Neuron[lay.length][];
int totaln=0;
for (int i :lay){
totaln += i;
}
// RandWeight wrand = new RandWeight(totaln,dendrites+1);
Neuron[] layer=null;
int ctno=0 , cntlay =0;
for (int n: lay){
layer = new Neuron[n];
for (int i =0 ; i< n; i++) {
layer[i] =new Neuron(RandWeight.nextWeight(dendrites+1),ctno++);
}
NNet[cntlay++]=layer;
}
// iniate input adapter
inputLay = new InputNeuron[netTop[0]];
for ( int i=0; i< netTop[0];i++){
inputLay[i] = new InputNeuron();
}
return NNet;
}
private Neuron[][] initNode(ArrayList<ArrayList<ArrayList<NumberBox>>> nwl){
int[] lay = new int[nwl.size()];
for (int i = 0 ; i< lay.length; i++){
lay[i] = nwl.get(i).size();
}
int dendrity = nwl.get(0).get(0).size();
netTop = lay;
branches =dendrity-1;
Neuron[][] NNet = new Neuron[lay.length][];
Neuron[] layer=null;
int ctno=0 , cntlay =0;
for (int n: lay){
layer = new Neuron[n];
for (int i =0 ; i< n; i++) {
double[] temp = new double [dendrity];
for (int wi =0 ; wi< temp.length;wi++){
temp[wi] = nwl.get(cntlay).get(i).get(wi).doubleData;
}
layer[i] =new Neuron(temp,ctno++);
}
NNet[cntlay++]=layer;
}
// iniate input adapter
inputLay = new InputNeuron[branches];
for ( int i=0; i< branches;i++){
inputLay[i] = new InputNeuron();
}
return NNet;
}
/**
* setup link between each layer.
* @param lay , int[] , indicate neuron's number of every layer.
* @param NNet ,Arraylist which stored the net.
*/
private void setupLink(int[] lay ,int bra ,ArrayList<ArrayList> NNet ){
int[] index ;
Fetcher cyclef ;
Neuron curSubNp =null , curNp =null;
//setup input link:
cyclef = new Fetcher(netTop[0]);
for ( int link =0; link < netTop[0]; link++){
index = cyclef.nextGroup(bra);
for ( int b = 1 ; b <= bra; b++){
((Neuron)(NNet.get(0).get(link))).dendrites[b].in=inputLay[index[b-1]];
}
}
for ( int curlay=1;curlay < lay.length ;curlay++){
cyclef = new Fetcher(lay[curlay-1]);
for ( int i =0; i< lay[curlay];i ++){
index = cyclef.nextGroup(bra);
for (int k = 1; k <= bra ;k ++){
((Neuron)NNet.get(curlay).get(i)).dendrites[k].in = ((Neuron)NNet.get(curlay-1).get(index[k-1]));
((Neuron)NNet.get(curlay-1).get(index[k-1])).sub.add(((Neuron)NNet.get(curlay).get(i)).dendrites[k]);
}
}
}
}
private void setupLink(int[] lay ,int bra ,Neuron[][] NNet ){
int[] index ;
Fetcher cyclef ;
// Neuron curSubNp =null , curNp =null;
//setup input link:
cyclef = new Fetcher(lay[0]);
index = cyclef.nextGroup(bra);
for ( int link =0; link < netTop[0]; link++){
for ( int b = 1 ; b <= bra; b++){
NNet[0][link].dendrites[b].in=inputLay[index[b-1]];
}
}
for ( int curlay=1;curlay < lay.length ;curlay++){
cyclef = new Fetcher(lay[curlay-1]);
index = cyclef.nextGroup(bra);
for ( int i =0; i< lay[curlay];i ++){
for (int k = 1; k <= bra ;k ++){
NNet[curlay][i].dendrites[k].in = NNet[curlay-1][index[k-1]];
NNet[curlay-1][index[k-1]].sub.add(NNet[curlay][i].dendrites[k]);
}
}
}
}
private void solidifyNet(){
for (int i = 0 ;i < neuralNet_A.length;i++){
for (int k =0; k < neuralNet_A[i].length;k++){
neuralNet_A[i][k].convertToSubArray();
}
}
}
public void showNet ( PrintWriter outstream){
int cnt = 0;
float[] tw;
Neuron np = null;
for ( int i=0; i<neuralNet_A.length;i++) {
for(int k =0; k<neuralNet_A[i].length;k++){
np = neuralNet_A[i][k];
outstream.println(np);
}
}
}
}
class Weight {
public double data =0;
public Weight(double d){
data =d;
}
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -