📄 neuralnetwork.java
字号:
if ( prevLayer.getNumberOfNodes() < outputLayer.getNumberOfNodes() )
throw new MiningException("can't connect output layer");
for (int i = 0; i < outputLayer.getNumberOfNodes(); i++)
this.connectNodes( prevLayer.getNeuralNodes()[i], outputLayer.getNeuralNodes()[i], false );
}
/**
* Removes all connections between neurons.
*/
public void removeAllConnections() {
Enumeration nr = neurons.elements();
while ( nr.hasMoreElements() ) {
NeuralNode NN = (NeuralNode) nr.nextElement();
NN.removeAllParents();
NN.removeAllChildren();
}
}
/**
* Creates a connection between two neural nodes. Notice that it
* is also possible (though rarely used) to connect two nodes
* which do not belong to neighboring layers, e.g. to connect
* a parent node from layer 2 with a child node from layer 5.
*
* @param parent parent node, i.e. node from the input side
* @param child child node, i.e. node from the output side
* @param addWeights extend weight vector of child neural node, not if false
* @return true if could connect the two nodes, else false
*
* @exception MiningException error during connection process
*/
public boolean connectNodes(NeuralNode parent, NeuralNode child, boolean addWeights)
throws MiningException {
// Empty node:
if (parent == null || child == null)
return false;
// Parent node doesn't exist in network:
String parId = parent.getId();
if ( parId == null || neurons.get(parId) == null)
return false;
// Child node doesn't exist in network:
String childId = child.getId();
if ( childId == null || neurons.get(childId) == null)
return false;
// Node connected with itself:
if ( parId.equals(childId) )
return false;
// Parent node doesn't belong to a neural layer:
NeuralLayer parlay = parent.getNeuralLayer();
if (parlay == null) return false;
// Child node doesn't belong to a neural layer:
NeuralLayer childlay = child.getNeuralLayer();
if (childlay == null) return false;
// Nodes cannot belong to the same neural layer:
if (parlay == childlay)
return false;
// Input node cannot be a child:
if (childlay.getLayerType() == NeuralLayer.NEURAL_INPUT )
return false;
// Output node cannot be a parent:
if (parlay.getLayerType() == NeuralLayer.NEURAL_OUTPUT )
return false;
// Connect nodes:
parent.addChildNode(child);
child.addParentNode(parent);
// Extend weight vector of child for neurons:
if ( child instanceof Neuron && addWeights )
((Neuron) child).addWeight();
return true;
}
/**
* Disonnects two neural nodes.
*
* @param parent parent node, i.e. node from the input side
* @param child child node, i.e. node from the output side
* @return true if could disconnect the two nodes, else false
*
* @exception MiningException error during disconnection process
*/
public boolean disconnectNodes(NeuralNode parent, NeuralNode child)
throws MiningException {
// Empty node:
if (parent == null || child == null)
return false;
// Parent node doesn't exist in network:
String parId = parent.getId();
if ( parId == null || neurons.get(parId) == null)
return false;
// Child node doesn't exist in network:
String childId = child.getId();
if ( childId == null || neurons.get(childId) == null)
return false;
// Disconnect:
parent.removeChild(child);
child.removeParent(parent);
return true;
}
/**
* Creates complete Neural Network from given number of layers and
* neurons per layer.
*
* @param layers array with numbers of nodes per layer
* @exception MiningException couldn't create network
*/
public void createNetworkAutomatically(int[] layers) throws MiningException {
// There must be at least three layers:
if (layers == null || layers.length < 3)
throw new MiningException("invalid layer specification");
// Init:
int id = 0;
int nLayer = layers.length;
NeuralLayer[] neuLayer = new NeuralLayer[nLayer];
// 1. CREATE NEURAL LAYERS
// Input layer:
int nnode = layers[0];
if (nnode <= 0)
throw new MiningException("input layer must contain at least one neural node");
neuLayer[0] = new NeuralLayer(nnode, NeuralLayer.NEURAL_INPUT);
for (int i = 0; i < nnode; i++) {
neuLayer[0].getNeuralNodes()[i].setId( String.valueOf(id) );
id = id + 1;
};
// Hidden layers:
for (int i = 1; i < nLayer - 1; i++) {
nnode = layers[i];
if (nnode <= 0)
throw new MiningException("layer " + i + " must contain at least one neural node");
neuLayer[i] = new NeuralLayer(nnode, NeuralLayer.NEURON);
for (int j = 0; j < nnode; j++) {
neuLayer[i].getNeuralNodes()[j].setId( String.valueOf(id) );
id = id + 1;
};
};
// Output layer:
nnode = layers[nLayer-1];
if (nnode <= 0)
throw new MiningException("output layer must contain at least one neural node");
neuLayer[nLayer-1] = new NeuralLayer(nnode, NeuralLayer.NEURAL_OUTPUT);
for (int i = 0; i < nnode; i++) {
neuLayer[nLayer-1].getNeuralNodes()[i].setId( String.valueOf(id) );
id = id + 1;
};
// 2. ADD LAYERS TO NETWORK
// Now create Network topology:
setNeuralLayer(neuLayer);
// 3. CONNECT NETWORK
// Connect the nodes of the network:
connectAllLayers();
}
// -----------------------------------------------------------------------
// Calculation methods
// -----------------------------------------------------------------------
/**
* Reset all internal calculation values of the network by applying
* same method to the output layer.
*/
public void resetValues() {
NeuralLayer outputLayer = getOutputLayer();
if (outputLayer != null)
outputLayer.resetValues();
}
/**
* Calculates output values of the network by applying same
* method to the output layer.
*
* @param newcalc calculate the output new
* @return output values of network, missing if not calculated, null if error
* @exception MiningException error while calculating output values
*/
public double[] outputValues(boolean newcalc) throws MiningException {
NeuralLayer outputLayer = getOutputLayer();
if (outputLayer != null)
return outputLayer.outputValues(newcalc);
return null;
}
/**
* Calculates error values of the network by applying same
* method to the input layer.
*
* @param newcalc calculate the error new
* @return error values of network, missing if not calculated, null if error
* @exception MiningException error while calculating error value
*/
public double[] errorValues(boolean newcalc) throws MiningException {
NeuralLayer inputLayer = getInputLayer();
if (inputLayer != null)
return inputLayer.errorValues(newcalc);
return null;
}
/**
* Updates the weights of the nodes of the network by applying same
* method to the output layer.
*
* @param learningRate the learning rate
* @param momentum the momentum
* @exception MiningException cannot update weights
*/
public void updateWeights(double learningRate, double momentum)
throws MiningException {
NeuralLayer outputLayer = getOutputLayer();
if (outputLayer != null)
outputLayer.updateWeights(learningRate, momentum);
}
/**
* Calculates training error of current vector. This is the squared
* error upon the outputs.
*
* @return network error
* @exception MiningException couldn't calculate training error
*/
public double trainingError() throws MiningException {
NeuralLayer outputLayer = getOutputLayer();
if (outputLayer != null) {
double error = 0.0;
for (int i = 0; i < outputLayer.getNumberOfNodes(); i++) {
double errn = outputLayer.getNeuralNodes()[i].errorValue(false);
error = error + errn*errn;
};
return error;
}
return Category.MISSING_VALUE;
}
// -----------------------------------------------------------------------
// Applies Neural Network to new data
// -----------------------------------------------------------------------
/**
* Application of the Neural Network to a new mining vector.
*
* @param miningVector mining vector to be classified
* @return resulting score value of NN for presented vector
* @exception MiningException mining vector could not be classified accurately
*/
public double apply(MiningVector miningVector) throws MiningException {
// Bring mining vector to same meta data like model:
MetaDataOperations metaOp = metaData.getMetaDataOp();
metaOp.setUsageType( MetaDataOperations.USE_ATT_NAMES_AND_TYPES );
miningVector = metaOp.transform(miningVector);
// Assign values to input neurons:
NeuralLayer inputLayer = getInputLayer();
if (inputLayer == null)
throw new MiningException("neural network does not contain input layer");
int classIndex = metaData.getAttributeIndex( metaData.getMiningAttribute(className) );
for (int i = 0; i < metaData.getAttributesNumber(); i++) {
if (i == classIndex) continue;
NeuralInput ni = (NeuralInput) inputLayer.getNeuralNodes()[i];
ni.setInput( miningVector.getValue(i) );
};
// Apply neural network:
resetValues();
double[] result = outputValues(true);
if (result == null)
throw new MiningException("error while applying network to new data");
// Get node of maximum value:
int ind = 0;
double max = result[0];
int nout = result.length;
for (int i = 1; i < nout; i++) {
if (result[i] > max) {
max = result[i];
ind = i;
}
}
// Regression:
double retVal = max;
// Target attribute is categorical => classification:
MiningAttribute target = metaData.getMiningAttribute( className );
if (target instanceof CategoricalAttribute) {
retVal = ind;
}
return retVal;
}
//<<17/03/2005, Frank J. Xu
//Do not add any changes for the current version except implementing the interface.
// -----------------------------------------------------------------------
// Applies Neural Network to new data
// -----------------------------------------------------------------------
/**
* Application of the Neural Network to a new mining vector.
*
* @param miningVector mining vector to be classified
* @return resulting score value of NN for presented vector
* @exception MiningException mining vector could not be classified accurately
*/
public double apply(MiningVector miningVector, Object a_wekaInstances) throws MiningException {
// Bring mining vector to same meta data like model:
MetaDataOperations metaOp = metaData.getMetaDataOp();
metaOp.setUsageType( MetaDataOperations.USE_ATT_NAMES_AND_TYPES );
miningVector = metaOp.transform(miningVector);
// Assign values to input neurons:
NeuralLayer inputLayer = getInputLayer();
if (inputLayer == null)
throw new MiningException("neural network does not contain input layer");
int classIndex = metaData.getAttributeIndex( metaData.getMiningAttribute(className) );
for (int i = 0; i < metaData.getAttributesNumber(); i++) {
if (i == classIndex) continue;
NeuralInput ni = (NeuralInput) inputLayer.getNeuralNodes()[i];
ni.setInput( miningVector.getValue(i) );
};
// Apply neural network:
resetValues();
double[] result = outputValues(true);
if (result == null)
throw new MiningException("error while applying network to new data");
// Get node of maximum value:
int ind = 0;
double max = result[0];
int nout = result.length;
for (int i = 1; i < nout; i++) {
if (result[i] > max) {
max = result[i];
ind = i;
}
}
// Regression:
double retVal = max;
// Target attribute is categorical => classification:
MiningAttribute target = metaData.getMiningAttribute( className );
if (target instanceof CategoricalAttribute) {
retVal = ind;
}
return retVal;
}
//17/03/2005, Frank J. Xu>>
// -----------------------------------------------------------------------
// Further methods
// -----------------------------------------------------------------------
/**
* Delivers string representation of neural network.
*
* @return string representation of neural network
*/
public String toString() {
String s = "Neural Network, ";
s = s + "number of layers: " + getNumberOfLayers() + "\n";
for (int i = 0; i < getNumberOfLayers(); i++)
s = s + "layer " + i + ": " + neuralLayer[i].toString() + "\n";
return s;
}
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -