📄 nrpop.java
字号:
package apv.nrlibj;
import java.io.*;
import java.lang.*;
import java.lang.reflect.*;
import java.util.*;
/************************************************************************/
/* */
/* CLASS NrPop */
/* */
/* (vector of NNet) */
/* */
/************************************************************************/
/**
* This class defines and holds a population of neural network objects.
* It olds neural networs in a vector but the first network has number 1.
* (it doesn't exist the network number 0) <BR>
* You can create a population of simple 3 layers networks (with or without Elman
* memory context) using simple constructor. Or you can create a population of
* very complicated neural networks using a description language.<BR>
* Each type of constructor call corrispondent constructor for single neural
* and assigns this neural network objects to vector elements.<BR>
* In addition this class contain as static member some default values and some
* utility methods as random generator and file-to-stringarray utility
* @author D. Denaro
* @version 5.0 , 2/2001
*/
public class NrPop
{
static boolean fbias=false;
static float eps=(float)0.3;
static float alfa=(float)0.3;
static float ra=(float)-0.6;
static float rb=(float)+0.6;
public static Rand rnd= new Rand();
/**
* It sets the default range for random weigth choosing (def. -0.6 +0.6)
*/
public static void setDefRandRange(float ra,float rb){NrPop.ra=ra;NrPop.rb=rb;}
/**
* It sets the default learning factor in EBP procedure (def. 0.3)
*/
public static void setDefLearnFactor(float e){NrPop.eps=e;}
/**
* It sets the default momentum factor in EBP procedure (def. 0.3)
*/
public static void setDefMomentFactor(float a){NrPop.alfa=a;}
/**
* It sets the default flag for bias training in EBP procedure (def. false)
*/
public static void setDefFlagBiasLearn(boolean fbias){NrPop.fbias=fbias;}
/**
* It reads the default learning factor in EBP procedure
*/
public static float readDefLearnFactor(){return NrPop.eps;}
/**
* It reads the default momentum factor in EBP procedure
*/
public static float readDefMomentFactor(){return NrPop.alfa;}
/**
* It reads the default flag for bias training in EBP procedure
*/
public static boolean readDefFlagBiasLearn(){return NrPop.fbias;}
/**
* This method set seed for random generator (same sequence with same seed)
*/
public static void setSeed(int s){rnd.setSeed((long)s);}
/**
* This method set seed for random generator using the time (it is the default)
*/
public static void setSeed(){rnd.setSeed(System.currentTimeMillis());}
/**
* This method return a random integer number between a and b (included)
*/
public static int riab(int a,int b) {return rnd.iab(a,b);}
/**
* This method return a random float number between a and b (included)
*/
public static float rfab(float a, float b) {return rnd.fab(a,b);}
/**
* This method return a random float number with gaussian distribution with <TT>"dev"</TT>
* as standard deviation
*/
public static float rgauss(float dev) {return rnd.gauss(dev);}
Vector Pnet;
Fitness fitness;
/**
* This constructor creates an empty population of <TT>"peoplenum"</TT> dimension
*/
public NrPop(int peoplenum) {Pnet= new Vector(peoplenum);}
/**
* This constructor creates a population of <TT>"peoplenum"</TT> networks, heach of them
* with tree layers and a buffer memory (0,1,2,3) or not (0,1,2). <BR>
* When <TT>"mem"</TT> = true, a buffer of hiden layer is created and this
* memory layer is linked with the hiden layer (Elman context memory)<BR>
* The weights of the net are randomicaly choosen with range <TT>"ra"</TT> - <TT>"rb"</TT>.<BR>
* <TT>"nodeinp"</TT> is the number of first layer nodes, <TT>"nodehid"</TT> of hiden layer and
* <TT>"nodeout"</TT> of last layer. <BR>
* The first layer nodes (layer 0) are liner node (NodeLin); but in hidden layer
* and in last layer nodes ave sigmoid transfer function of activation (NodeSigm).
*/
public NrPop(int peoplenum,int nodeinp,int nodehid,int nodeout,boolean mem,float ra,float rb)
{int i; Pnet= new Vector(peoplenum);
for (i=1;i<=peoplenum;i++) Pnet.add(new NNet(nodeinp,nodehid,nodeout,mem,ra,rb));
}
/**
* This constructor creates a population of <TT>"peoplenum"</TT> networks, heach of them
* with tree layers and a buffer memory or not. <BR>
* When <TT>"mem"</TT> = true, a buffer of hiden layer is created and this
* memory layer is linked with the hiden layer (Elman context memory) <BR>
* The weights of the net are randomicaly choosen with default range.<BR>
* <TT>"nodeinp"</TT> is the number of first layer nodes, <TT>"nodehid"</TT> of hiden layer and
* <TT>"nodeout"</TT> of last layer. <BR>
* The first layer nodes (layer 0) are liner node (NodeLin); but in hidden layer
* and in last layer nodes ave sigmoid transfer function of activation (NodeSigm)
*/
public NrPop(int peoplenum,int nodeinp,int nodehid,int nodeout,boolean mem)
{this(peoplenum,nodeinp,nodehid,nodeout,mem,NrPop.ra,NrPop.rb);}
/**
* This constructor creates a population of networks in the most flexible way.
* The string array <TT>"descrrec"</TT> contains the records that describes the network
* population in a simple predefined laguage:
* <PRE>
* 4 records types
* <B>
* net=n[,m] </B>
* meens: next definition is for net number "n" or for all nets from "n" to "m"
* <B>
* layer=n [tnode=n,[m] nname=xxxx... copytoml=n] </B>
* meens: definition of layer number "n" with "n" total-node (or n x m nodes
* if is a bidimesional layer) and with a buffer realised by layer "n"
* The node type is defined by the name (Ex. NodeLin, NodeSigm)
* <B>
* linktype=[xxx...] fromlayer=n[(h[,k])] tolayer=n[(h[,k])] [ value=na,[nb]] </B>
* meens: link "all" to all (default) or "one" to one from layer number "n" to
* layer number "n". As default link is realized between all layers
* node, but it is possible to describe a link for a sub-set of nodes
* from node "h" to node "k". As default weights are randomically chosen
* in default range, but it is possible define a different range between
* "na" and "nb". If only "na" is present, that meens all weights = "na"
* <B>
* biasval=na[,nb] oflayer=n[(h[,k])] </B>
* meens: as previous kind of record but for bias values
*
* example:
*
* net=5,10
* layer=0 tnode=2 nname=NodeLin
* layer=1 tnode=2 nname=NodeSigm
* layer=2 tnode=1 nname=NodeSigm
* linktype=all fromlayer=0 tolayer=1
* linktype=all fromlayer=1 tolayer=2
*
* </PRE>
* it defines 6 networks with 3 layers and 2,2,1 nodes (for XOR problem for instance)
* NodeLin is a kind of node that reproduce on output the input value
* NodeSigm is a sigmoide node.
*/
public NrPop(String descrrec[])
{
int i,n,fromnet=1,tonet=1;
String t,val;
String a,b,c;
StringTokenizer tok;
Pnet= new Vector();
for (i=0;i<descrrec.length;i++)
{
tok=new StringTokenizer(descrrec[i]);
if (tok.countTokens()<1) continue;
t=tok.nextToken();
if (t.startsWith("net="))
{val=t.substring(t.indexOf('='));a=values(val,1);b=values(val,2);
try {fromnet=Integer.parseInt(a);} catch (NumberFormatException e){NNError.err("Net Number missed at rec "+i);}
try {tonet=Integer.parseInt(b);} catch (NumberFormatException e){tonet=fromnet;}
if (tonet>this.PopSize()) this.setSize(tonet);
for (n=fromnet;n<=tonet;n++){NNet nnet= new NNet(descrrec,i+1);this.setNNet(n,nnet);}
}
}
}
/**
* This constructor opens a file named <TT>"descrfile"</TT> and read networks description.
* Similar to constructor NrPop(String Descrrec[]) but useful for external networks definition.
*/
public NrPop(String descrfile)
{
int i,n,fromnet=1,tonet=1;
boolean endfile=false;
String t="",val,line;
String a,b,c;
Vector rec=new Vector(100);
String descrnet[];
StringTokenizer tok;
FileReader dfile;
LineNumberReader dline;
try
{
dfile= new FileReader(descrfile);
dline= new LineNumberReader(dfile);
try{ do
{tok=new StringTokenizer(dline.readLine()); t=tok.nextToken();}
while (t.startsWith("net="));}
catch (IOException e){endfile=true;}
while (!endfile)
{
val=t.substring(t.indexOf('='));a=values(val,1);b=values(val,2);
try {fromnet=Integer.parseInt(a);} catch (NumberFormatException e){NNError.err("Net Number missed at rec "+dline.getLineNumber());}
try {tonet=Integer.parseInt(b);} catch (NumberFormatException e){tonet=fromnet;}
if (tonet>this.PopSize()) this.setSize(tonet);
while(true)
{ try {line=dline.readLine();tok=new StringTokenizer(line);t=tok.nextToken();
if (t.startsWith("net=")) break; else rec.addElement(line);}
catch (IOException e) {endfile=true;break;}
}
if (endfile)break;
descrnet=(String[])rec.toArray();
for (n=fromnet;n<=tonet;n++){NNet nnet= new NNet(descrnet,0);this.setNNet(n,nnet);}
}
try{dline.close();dfile.close();} catch (IOException e){};
}
catch (FileNotFoundException e){NNError.err("File not found "+descrfile);}
}
/**
* Returns a pointer to a net that is in the <TT>"index"</TT> position of population vector.
* In a population NrPop the nets are numbered starting from 1
*/
public NNet getNNet(int index){return (NNet)Pnet.get(index-1);}
/**
* Increases vector size of 1 and put the net pointer in this last position.
* In a population NrPop the nets are numbered starting from 1
*/
public void addNNet(NNet nnet){Pnet.add(nnet);}
/**
* Puts the net pointer in this <TT>"index"</TT> position.
* In a population NrPop the nets are numbered starting from 1
*/
public void setNNet(int index,NNet nnet){Pnet.setElementAt(nnet,index-1);}
/**
* Returns the population size (vector dimension).
* In a population NrPop the nets are numbered starting from 1
*/
public int PopSize(){return Pnet.size();}
/**
* Sets the size o population vector (increase o decrease with null values).
* In a population NrPop the nets are numbered starting from 1
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -