📄 layer.java
字号:
package org.joone.engine;import org.joone.log.*;import org.joone.engine.learning.*;import org.joone.exception.*;import org.joone.inspection.*;import org.joone.inspection.implementations.*;import org.joone.io.*;import org.joone.net.*;import org.joone.util.*;import java.io.*;import java.util.*;/** * The Layer object is the basic element forming the neural net. * Primarily it consists of a number of neurons that apply a transfer * function to the sum of a number of input patterns and convey the result * to the output pattern. The input patterns are received from connected * input listeners and the transformed results are passed to connected output * listeners. The component also handles learning by accepting patterns of error * gradients from output listeners, applying a reverse (inverse) transfer function * and passing the result to the input listeners. Layers execute their own * Threads to perform the perform the pattern conveyance, so that a network * of Layers can operate in a multi-threaded manner. The execution and termination * of the Thread is controlled by a Monitor object. */public abstract class Layer implements NeuralLayer, Runnable, Serializable, Inspectable, LearnableLayer { /** Stop flag. If the step has this value, the execution thread terminates. */ public static final int STOP_FLAG = -1; /** Serial version ID for this class */ private static final long serialVersionUID = -1572591602454639355L; /** The name of the layer */ private String LayerName; /** The number of neurons in the layer */ private int rows = 0; /** Holds the bias of neurons of the layer */ protected Matrix bias; /** * The monitor of the layer. * Contains all parameters needed to the learning phase */ protected Monitor monitor; /** Not used but maintained for backward serialization compatability. */ protected int m_batch; /** The Net's phase: false == recall; true == learning */ protected boolean learning; /** Contains true if for the Layer must be used * a Learner instead of a built-in learning algorithm. * Set it in the constructor of any inherited class. * Used by the getLearner method. * @see getLearner */ protected boolean learnable = false; /** Contains the list of input connected listeners (InputPatternListener) */ protected Vector inputPatternListeners = null; /** Contains the list of output connected listeners (OutputPatternListener) */ protected Vector outputPatternListeners = null; /** The execution Thread for this layer. */ private transient Thread myThread = null; /** The monitor used to control read/write access to myThread */ private transient volatile Object myThreadMonitor; /** * Set of output values passed from this layer * to connected OutputListeners durng the recall phase. */ protected transient double[] outs; /** * Set of input values passed to this layer * from connected InputListeners during the recall phase. */ protected transient double[] inps; /** * Set of input error gradient values passed to this layer * from connected OutputListenrs during the learning phase. */ protected transient double[] gradientInps; /** * Set of output error gradient values passed from this layer * to connected InputListenrs during the learning phase. */ protected transient double[] gradientOuts; /** The step number of the network run. */ protected transient int step = 0; /** Whether the layer is running */ protected transient volatile boolean running = false; /** The Learner for this layer. */ protected transient Learner myLearner = null; /** Logger for this class */ private static final ILogger log = LoggerFactory.getLogger(Layer.class); /** The empty constructor */ public Layer() { } /** * Creates a named layer * @param ElemName The name of the layer */ public Layer(String ElemName) { this.setLayerName(ElemName); } /** * Adds a noise componentto the biases of the layer * and to all the input connected synapses. * @param amplitude the noise's amplitude in terms of distance from zero; * e.g. a value equal 0.3 means a noise range from -0.3 to 0.3 */ public void addNoise(double amplitude) { InputPatternListener elem; bias.addNoise(amplitude); if (inputPatternListeners == null) { return; } int currentSize = inputPatternListeners.size(); for (int index = 0; index < currentSize; index++) { elem = (InputPatternListener) inputPatternListeners.elementAt(index); if (elem != null) { if (elem instanceof Synapse) ((Synapse) elem).addNoise(amplitude); } } } /** * Initialize the weights of the biases and of all the connected synapses * @param amplitude the amplitude of the applied noise */ public void randomize(double amplitude) { InputPatternListener elem; // bias.randomize(-1.0 * amplitude, amplitude); bias.initialize(); if (inputPatternListeners == null) { return; } int currentSize = inputPatternListeners.size(); for (int index = 0; index < currentSize; index++) { elem = (InputPatternListener) inputPatternListeners.elementAt(index); if (elem != null) { if (elem instanceof Synapse) ((Synapse) elem).randomize(amplitude); } } } /** * Reverse transfer function of the component. * @param pattern input pattern on which to apply the transfer function * @throws JooneRuntimeException */ protected abstract void backward(double[] pattern) throws JooneRuntimeException; /** * Copies one layer into another, to obtain a type-transformation * from one kind of Layer to another. * The old Layer is disconnected from the net, and the new Layer * takes its place. * @param newLayer the new layer with which to replace this one * @return The new layer */ public NeuralLayer copyInto(NeuralLayer newLayer) { newLayer.setMonitor(getMonitor()); newLayer.setRows(getRows()); newLayer.setBias(getBias()); newLayer.setLayerName(getLayerName()); newLayer.setAllInputs((Vector) getAllInputs().clone()); newLayer.setAllOutputs((Vector) getAllOutputs().clone()); removeAllInputs(); removeAllOutputs(); return newLayer; } /** * Calls all the fwdGet methods on the input synapses to get the input patterns */ protected void fireFwdGet() { double[] patt; Pattern tPatt; InputPatternListener tempListener = null; int currentSize = inputPatternListeners.size(); step = 0; for (int index = 0; (index < currentSize) && running; index++) { tempListener = (InputPatternListener) inputPatternListeners.elementAt(index); if (tempListener != null) { tPatt = tempListener.fwdGet(); if (tPatt != null) { patt = tPatt.getArray(); if (patt.length != inps.length) { adjustSizeToFwdPattern(patt); } //Sum the received pattern into inps. sumInput(patt); if (step != STOP_FLAG) /* In case of a recurrent network, the layer could receive * patterns with different sequence numbers. * The stored sequence number is the higher one. */ if ((step < tPatt.getCount()) || (tPatt.getCount() == STOP_FLAG)) // The stop is guaranteed step = tPatt.getCount(); } } } } /** * Calls all the fwdPut methods on the output synapses to pass * them the calculated patterns * @param pattern the Pattern to pass to the output synapses */ protected void fireFwdPut(Pattern pattern) { if (outputPatternListeners == null) { return; } int currentSize = outputPatternListeners.size(); OutputPatternListener tempListener = null; for (int index = 0; (index < currentSize) && running; index++) { tempListener = (OutputPatternListener) outputPatternListeners.elementAt(index); if (tempListener != null) { boolean loop = false; if (tempListener instanceof Synapse) loop = ((Synapse)tempListener).isLoopBack(); if ((currentSize == 1) && getMonitor().isLearningCicle(pattern.getCount()) && !loop) tempListener.fwdPut(pattern); else tempListener.fwdPut((Pattern) pattern.clone()); } } } /** * Calls all the revGet methods on the output synapses to get the error gradients */ protected void fireRevGet() { if (outputPatternListeners == null) { return; } double[] patt; Pattern tPatt; int currentSize = outputPatternListeners.size(); OutputPatternListener tempListener = null; for (int index = 0; (index < currentSize) && running; index++) { tempListener = (OutputPatternListener) outputPatternListeners.elementAt(index); if (tempListener != null) { tPatt = tempListener.revGet(); if (tPatt != null) { patt = tPatt.getArray(); if (patt.length != gradientInps.length) { adjustSizeToRevPattern(patt); } //Sum the received error gradient pattern into outs. sumBackInput(patt); } } } } /** * Calls all the revPut methods on the input synapses to get the input patterns * and pass them the resulting calculated gradients * @param pattern the Pattern to pass to the input listeners */ protected void fireRevPut(Pattern pattern) { if (inputPatternListeners == null) { return; } int currentSize = inputPatternListeners.size(); InputPatternListener tempListener = null; for (int index = 0; (index < currentSize) && running; index++) { tempListener = (InputPatternListener) inputPatternListeners.elementAt(index); if (tempListener != null) { boolean loop = false; if (tempListener instanceof Synapse) loop = ((Synapse)tempListener).isLoopBack(); if ((currentSize == 1) && !loop) tempListener.revPut(pattern); else tempListener.revPut((Pattern) pattern.clone()); } } } /** * Adjusts the size of a layer if the size of the forward pattern differs. * * @param aPattern the pattern holding a different size than the layer * (dimension of neurons is not in accordance with the dimension of the * pattern that is being forwarded). */ protected void adjustSizeToFwdPattern(double[] aPattern) { // this function is included to give layers (e.g. Rbf layers) a // change to take different actions (by overwriting this function) // in case the pattern has a different size than the layer int myOldSize = getRows(); setRows(aPattern.length); log.warn("Pattern size mismatches #neurons. #neurons in layer '" + getLayerName() +"' adjusted [fwd pass, " + myOldSize + " -> " + getRows() + "]."); } /** * Adjusts the size of a layer if the size of the reverse pattern differs. * * @param aPattern the pattern holding a different size than the layer * (dimension of neurons is not in accordance with the dimension of the * pattern that is being reversed). */ protected void adjustSizeToRevPattern(double[] aPattern) { // this function is included to give layers (e.g. Rbf layers) a // change to take different actions (by overwriting this function) // in case the pattern has a different size than the layer int myOldSize = getRows(); setRows(aPattern.length); log.warn("Pattern size mismatches #neurons. #neurons in layer '" + getLayerName() +"' adjusted [rev pass, " + myOldSize + " -> " + getRows() + "]."); } /** * Transfer function to recall a result on a trained net * @param pattern input pattern to which to apply the rtransfer function * @throws JooneRuntimeException */ // TO DO: Transform the JooneRuntimeException to JoonePropagationException protected abstract void forward(double[] pattern) throws JooneRuntimeException; /** * Returns the vector of the input listeners * @return the connected input pattern listeners */ public Vector getAllInputs() { return inputPatternListeners; } /** * Returns the vector of the output listeners * @return the connected output pattern listeners */ public Vector getAllOutputs() { return outputPatternListeners; } /** * Return the bias matrix * @return the layer biases */ public Matrix getBias() { return bias; } /** * Returns the number of neurons contained in the layer * @return the number of neurons in the layer. */ public int getDimension() { return getRows(); } /** * Returns the name of the layer * @return the name of the layer */ public String getLayerName() { return LayerName; } /** * Returns the monitor object * @return the layer's Monitor object */ public Monitor getMonitor() { return monitor; } /** * Returns the dimension (# of neurons) of the Layer * @return the number of neurons in the layer */ public int getRows() { return rows; } /** * Remove all the input listeners of the layer */
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -