📄 layer.java
字号:
public void removeAllInputs() { if (inputPatternListeners != null) { Vector tempVect = (Vector) inputPatternListeners.clone(); for (int i = 0; i < tempVect.size(); ++i) this.removeInputSynapse( (InputPatternListener) tempVect.elementAt(i)); inputPatternListeners = null; } } /** * Remove all the output listeners of the layer */ public void removeAllOutputs() { if (outputPatternListeners != null) { Vector tempVect = (Vector) outputPatternListeners.clone(); for (int i = 0; i < tempVect.size(); ++i) this.removeOutputSynapse( (OutputPatternListener) tempVect.elementAt(i)); outputPatternListeners = null; } } /** * Remove an input Listener * @param newListener the input listener to remove */ public void removeInputSynapse(InputPatternListener newListener) { if (inputPatternListeners != null) { inputPatternListeners.removeElement(newListener); newListener.setInputFull(false); if (newListener instanceof NeuralNetListener) { removeListener((NeuralNetListener)newListener); } if (inputPatternListeners.size() == 0) inputPatternListeners = null; } } /** * Remove an output listener from the layer * @param newListener the output listener to remove */ public void removeOutputSynapse(OutputPatternListener newListener) { if (outputPatternListeners != null) { outputPatternListeners.removeElement(newListener); newListener.setOutputFull(false); if (newListener instanceof NeuralNetListener) { removeListener((NeuralNetListener)newListener); } if (outputPatternListeners.size() == 0) outputPatternListeners = null; } } protected void removeListener(NeuralNetListener listener) { if (getMonitor() != null) getMonitor().removeNeuralNetListener(listener); } /** * Gets the values lastly outputed by the neurons of this layer. * * @return the values lastly outputed. */ public double[] getLastOutputs() { return (double[])outs.clone(); } /** * The core running engine of the layer. * Called from the method <CODE>start()</CODE> * @throws JooneRuntimeException */ public void run() throws JooneRuntimeException { Pattern patt = new Pattern(); while (running) { // Recall phase inps = new double[getRows()]; try { fireFwdGet(); if (running) { forward(inps); patt.setArray(outs); patt.setCount(step); fireFwdPut(patt); } if (step != STOP_FLAG) if (monitor != null) { // Gets if the next step is a learning step learning = monitor.isLearningCicle(step); } else learning = false; else // Stops the layer running = false; } catch (JooneRuntimeException jre) { String msg = "JooneException thrown in run() method." + jre.getMessage(); log.error(msg); running = false; new NetErrorManager(getMonitor(), msg); } // Learning phase if (learning && running) { gradientInps = new double[getDimension()]; try { fireRevGet(); backward(gradientInps); patt.setArray(gradientOuts); patt.setOutArray(outs); // Added for some unsupervised learning algorithm (See org.joone.engine.Pattern) patt.setCount(step); fireRevPut(patt); } catch (JooneRuntimeException jre) { String msg = "In run() JooneException thrown." + jre.getMessage(); log.error(msg); running = false; new NetErrorManager(getMonitor(), msg); } } } // END while (running = false) resetInputListeners(); synchronized(getThreadMonitor()) { myThread = null;} } /** * Sets the Vector that contains all the input listeners. * Can be useful to set the input synapses taken from another Layer * @param newInputPatternListeners The vector containing the list of input synapses */ public synchronized void setAllInputs(Vector newInputPatternListeners) { inputPatternListeners = newInputPatternListeners; if (inputPatternListeners != null) for (int i = 0; i < inputPatternListeners.size(); ++i) this.setInputDimension( (InputPatternListener) inputPatternListeners.elementAt(i)); notifyAll(); } /** * Sets the Vector that contains all the input listeners. * It accepts an ArrayList as parameter. Added for Spring * Can be useful to set the input synapses taken from another Layer * @param newInputPatternListeners The vector containing the list of input synapses */ public void setInputSynapses(ArrayList newInputPatternListeners) { this.setAllInputs(new Vector(newInputPatternListeners)); } /** * Sets the Vector that contains all the output listeners. * Can be useful to set the output synapses taken from another Layer * @param newOutputPatternListeners The vector containing the list of output synapses */ public void setAllOutputs(Vector newOutputPatternListeners) { outputPatternListeners = newOutputPatternListeners; if (outputPatternListeners != null) for (int i = 0; i < outputPatternListeners.size(); ++i) this.setOutputDimension( (OutputPatternListener) outputPatternListeners.elementAt(i)); } /** * Sets the Vector that contains all the output listeners. * It accepts an ArrayList as parameter. Added for Spring * Can be useful to set the output synapses taken from another Layer * @param newOutputPatternListeners The vector containing the list of output synapses */ public void setOutputSynapses(ArrayList newOutputPatternListeners) { this.setAllOutputs(new Vector(newOutputPatternListeners)); } /** * Sets the matrix of biases * @param newBias The Matrix object containing the biases */ public void setBias(Matrix newBias) { bias = newBias; } /** * Sets the dimension of the layer. * Override to define how the internal buffers must be sized. */ protected abstract void setDimensions(); /** * Sets the dimension of the listener passed as parameter. * Called after a new input listener is added. * @param syn the listener to be affected */ protected void setInputDimension(InputPatternListener syn) { if (syn.getOutputDimension() != getRows()) syn.setOutputDimension(getRows()); } /** * Adds a new input synapse to the layer * @param newListener The new input synapse to add * @return whether the listener was added */ public synchronized boolean addInputSynapse(InputPatternListener newListener) { if (inputPatternListeners == null) { inputPatternListeners = new Vector(); } boolean retValue = false; if (!inputPatternListeners.contains(newListener)) if (!newListener.isInputFull()) { inputPatternListeners.addElement(newListener); if (newListener.getMonitor() == null) newListener.setMonitor(getMonitor()); newListener.setInputFull(true); this.setInputDimension(newListener); retValue = true; } notifyAll(); return retValue; } /** * Sets the name of the layer * @param newLayerName The name */ public void setLayerName(String newLayerName) { LayerName = newLayerName; } /** * Sets the monitor object * @param mon The Monitor */ public void setMonitor(Monitor mon) { monitor = mon; // Sets the Monitor object of all input and output synapses setVectMonitor(inputPatternListeners, mon); setVectMonitor(outputPatternListeners, mon); } /** * Set the monitor object for all pattern listeners in a Vector * @param vect the Vector of pattern listeners * @param mon the Monitor to be set */ private void setVectMonitor(Vector vect, Monitor mon) { if (vect != null) { int currentSize = vect.size(); Object tempListener = null; for (int index = 0; index < currentSize; index++) { tempListener = vect.elementAt(index); if (tempListener != null) ((NeuralElement) tempListener).setMonitor(mon); } } } /** * Sets the dimension of the listener passed as parameter. * Called after a new output listener is added. * @param syn the OutputPatternListener to affect */ protected void setOutputDimension(OutputPatternListener syn) { if (syn.getInputDimension() != getRows()) syn.setInputDimension(getRows()); } /** * Adds a new output synapse to the layer * @param newListener The new output synapse * @return whether the listener was added */ public boolean addOutputSynapse(OutputPatternListener newListener) { if (outputPatternListeners == null) outputPatternListeners = new Vector(); boolean retValue = false; if (!outputPatternListeners.contains(newListener)) if (!newListener.isOutputFull()) { outputPatternListeners.addElement(newListener); newListener.setMonitor(getMonitor()); newListener.setOutputFull(true); this.setOutputDimension(newListener); retValue = true; } return retValue; } /** * Sets the dimension (# of neurons) of the Layer * @param newRows The number of the neurons contained in the Layer */ public void setRows(int newRows) { if (rows != newRows) { rows = newRows; setDimensions(); setConnDimensions(); bias = new Matrix(getRows(), 1); } } /** * Starts the Layer */ public void start() { synchronized(getThreadMonitor()) { if (myThread == null) { // Check if some input synapse is connected if (inputPatternListeners != null) { if (checkInputEnabled()) { // If all the input synapses are disabled, the layer doesn't start running = true; if (getLayerName() != null) myThread = new Thread(this, getLayerName()); else myThread = new Thread(this); this.init(); myThread.start(); } else { String msg = "Can't start: '" + getLayerName() + "' has not input synapses connected and/or enabled"; log.error(msg); throw new JooneRuntimeException(msg); } } else { String msg = "Can't start: '" + getLayerName() + "' has not input synapses connected"; log.error(msg); throw new JooneRuntimeException(msg); } } } } public void init() { this.initLearner(); // initialize all the output synapses if (outputPatternListeners != null) { Vector tempVect = (Vector) outputPatternListeners.clone(); for (int i = 0; i < tempVect.size(); ++i) { if (tempVect.elementAt(i) instanceof NeuralElement) ((NeuralElement) tempVect.elementAt(i)).init(); } } } /** * Checks if at least one input synapse is enabled * @return false if all the input synapses are disabled */ protected boolean checkInputEnabled() { for (int i = 0; i < inputPatternListeners.size(); ++i) { InputPatternListener iPatt = (InputPatternListener) inputPatternListeners.elementAt(i); if (iPatt.isEnabled()) return true; } return false; } /** * Stops the Layer */ public void stop() { synchronized(getThreadMonitor()) { if (myThread != null) { running = false; myThread.interrupt(); } } } /** * Reset all the input listeners */ protected void resetInputListeners() { int currentSize = inputPatternListeners.size(); for (int index = 0; index < currentSize; index++) { InputPatternListener tempListener = (InputPatternListener) inputPatternListeners.elementAt(index); if (tempListener != null) tempListener.reset(); } } /** * Calculates the net input of the error gradents during the learning phase * @param pattern array of input values */ protected void sumBackInput(double[] pattern) { int x = 0; try { for (; x < gradientInps.length; ++x) gradientInps[x] += pattern[x]; } catch (IndexOutOfBoundsException iobe) { log.warn( getLayerName() + " gradInps.size:" + gradientInps.length + " pattern.size:" + pattern.length + " x:" + x); } } /** * Calculates the net input of the values in the recall phase * @param pattern array of input values */ protected void sumInput(double[] pattern) { for (int x = 0; x < inps.length; ++x) { inps[x] += pattern[x]; } } /** * Read in a serialised version of this layer * @param in the serialised stream * @throws IOException * @throws ClassNotFoundException
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -