📄 layer.java
字号:
*/ private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { if (in.getClass().getName().indexOf("xstream") != -1) { in.defaultReadObject(); } else { LayerName = (String) in.readObject(); rows = in.readInt(); bias = (Matrix) in.readObject(); monitor = (Monitor) in.readObject(); m_batch = in.readInt(); learning = in.readBoolean(); inputPatternListeners = readVector(in); outputPatternListeners = readVector(in); } setDimensions(); } /** * Write a serialized version of this layer * @param out the output stream to write this layer to * @throws IOException */ private void writeObject(ObjectOutputStream out) throws IOException { if (out.getClass().getName().indexOf("xstream") != -1) { out.defaultWriteObject(); } else { out.writeObject(LayerName); out.writeInt(rows); out.writeObject(bias); out.writeObject(monitor); out.writeInt(m_batch); out.writeBoolean(learning); writeVector(out, inputPatternListeners); writeVector(out, outputPatternListeners); } } /** * This method is useful to serialize only the vector's * elements that don't implement the Serialize interface, * only when the Monitor.isExporting returns the value TRUE. * @param out the output stream to write to * @param vect the Vector to serialize * @throws IOException */ private void writeVector(ObjectOutputStream out, Vector vect) throws IOException { if (vect != null) { boolean exporting = false; if ((monitor != null) && (monitor.isExporting())) exporting = true; for (int i = 0; i < vect.size(); ++i) { Object obj = vect.elementAt(i); if (!(obj instanceof NotSerialize) || !(exporting)) out.writeObject(obj); } } out.writeObject(null); } /** * Create a Vector from a serialized version * @param in the input stream serialized version * @return the deserialized Vector * @throws IOException * @throws ClassNotFoundException */ private Vector readVector(ObjectInputStream in) throws IOException, ClassNotFoundException { Vector vect = new Vector(); Object obj = in.readObject(); while (obj != null) { vect.addElement(obj); obj = in.readObject(); } return vect; } /** * Sets the input and output synapses' dimensions */ protected void setConnDimensions() { if (inputPatternListeners != null) { int currentSize = inputPatternListeners.size(); InputPatternListener tempListener = null; for (int index = 0; index < currentSize; index++) { tempListener = (InputPatternListener) inputPatternListeners.elementAt(index); if (tempListener != null) { setInputDimension(tempListener); } } } if (outputPatternListeners != null) { int currentSize = outputPatternListeners.size(); OutputPatternListener tempListener = null; for (int index = 0; index < currentSize; index++) { tempListener = (OutputPatternListener) outputPatternListeners.elementAt(index); if (tempListener != null) { setOutputDimension(tempListener); } } } } /** * Determine whether the execution thread is running * @return whether it is running */ public boolean isRunning() { synchronized(getThreadMonitor()) { if (myThread != null && myThread.isAlive()) { return true; } return false; } } /** * Get check messages from listeners. * Subclasses should call this method from thier own check method. * * @see NeuralLayer * @return validation errors. */ public TreeSet check() { // Prepare an empty set for check messages; TreeSet checks = new TreeSet(); // All layers must have at least one input patern listener. // The absense of an output patern listener is acceptable. if ((inputPatternListeners == null) || (inputPatternListeners.size() == 0)) { checks.add(new NetCheck(NetCheck.FATAL, "Layer has no input synapses attached.", this)); } // Get the input patern listener check messages; if (inputPatternListeners != null) { for (int i = 0; i < inputPatternListeners.size(); i++) { InputPatternListener listener = (InputPatternListener) inputPatternListeners.elementAt(i); checks.addAll(listener.check()); if (listener instanceof StreamInputSynapse) { StreamInputSynapse sis = (StreamInputSynapse) listener; int cols = sis.numColumns(); if (cols != rows) { checks.add(new NetCheck(NetCheck.FATAL, "Rows parameter does not match the number of columns for the attached input stream .", this)); } } } } // Get the input patern listener check messages; if (outputPatternListeners != null) { for (int i = 0; i < outputPatternListeners.size(); i++) { OutputPatternListener listener = (OutputPatternListener) outputPatternListeners.elementAt(i); checks.addAll(listener.check()); } } // Return check messages return checks; } /** * Produce a String representation of this layer * @see Object#toString() * @return string representation of the layer */ public String toString() { return getLayerName();// StringBuffer buf = new StringBuffer();// buf.append("Name : ")// .append(LayerName)// .append(", rows : ")// .append(rows)// .append(", Bias : ")// .append(bias)// .append(", Monitor : ")// .append(monitor);//// return buf.toString(); } /** * Method to help remove disused references quickly * when the layer goes out of scope. * @see Object#finalize() * @throws Throwable */ public void finalize() throws Throwable { super.finalize(); LayerName = null; bias = null; monitor = null; if(inputPatternListeners != null) { inputPatternListeners.clear(); inputPatternListeners = null; } if(outputPatternListeners != null) { outputPatternListeners.clear(); outputPatternListeners = null; } } /** * Method to get a collection of bias inspections for this layer * @return */ public Collection Inspections() { Collection col = new ArrayList(); col.add(new BiasInspection(bias)); return col; } /** * Get the title for the inspectable interface * @return */ public String InspectableTitle() { return getLayerName(); } /** * Determine whether this layer has an input synapse attached * that is a step counter. * @return whether it is a step counter. */ public boolean hasStepCounter() { Vector inps = getAllInputs(); if (inps == null) return false; for (int x = 0; x < inps.size(); ++x) { if (inps.elementAt(x) instanceof InputSynapse) { InputSynapse inp = (InputSynapse) inps.elementAt(x); if (inp.isStepCounter()) return true; } } return false; } /** * Determine whether this is an input layer. * @return whether this is an input layer */ public boolean isInputLayer() { Vector inputListeners = getAllInputs(); return checkInputs(inputListeners); } /** * Determine whether ther are any stream input synapses attached. * @param inputListeners Vector to check. * @return whether there are any attached StreamInputSynapses */ protected boolean checkInputs(Vector inputListeners) { if (inputListeners == null || inputListeners.size() == 0) { return true; } for (int x = 0; x < inputListeners.size(); ++x) { if (inputListeners.elementAt(x) instanceof StreamInputSynapse) { return true; } } return false; } /** * Determine whether this is an output layer. * @return whether this is an output layer */ public boolean isOutputLayer() { Vector outputVectors = getAllOutputs(); return checkOutputs(outputVectors); } /** * Determine whether ther are any stream output or teach synapses attached. * Also checks the attached listeners of OutputSwitchSynapses. * Also checks for loopback condition. * All connected synapses must be of this type. * @param outputListeners Vector to check. * @return whether there are any attached StreamOutputSynapses */ protected boolean checkOutputs(Vector outputListeners) { boolean lastListener = false; if (outputListeners == null || outputListeners.size() == 0) { return true; } for (int x = 0; x < outputListeners.size(); ++x) { if ((outputListeners.elementAt(x) instanceof StreamOutputSynapse) || (outputListeners.elementAt(x) instanceof TeachingSynapse) || (outputListeners.elementAt(x) instanceof TeacherSynapse)) lastListener = true; else if (outputListeners.elementAt(x) instanceof OutputSwitchSynapse) { OutputSwitchSynapse os = (OutputSwitchSynapse) outputListeners.elementAt(x); if (checkOutputs(os.getAllOutputs())) lastListener = true; else return false; } else if (outputListeners.elementAt(x) instanceof Synapse) { Synapse syn = (Synapse) outputListeners.elementAt(x); if (syn.isLoopBack()) lastListener = true; else return false; } } return lastListener; } /** Returns the appropriate Learner object for this class * depending on the Monitor.learningMode property value * @return the Learner object if applicable, otherwise null * @see org.joone.engine.Learnable#getLearner() */ public Learner getLearner() { if (!learnable) { return null; } return getMonitor().getLearner(); } /** * Initialize the Learner object of this layer * @see org.joone.engine.Learnable#initLearner() */ public void initLearner() { myLearner = getLearner(); if(myLearner != null) { myLearner.registerLearnable(this); } } /** * Getter for property myThreadMonitor. * @return Value of property myThreadMonitor. */ protected Object getThreadMonitor() { if (myThreadMonitor == null) myThreadMonitor = new Object(); return myThreadMonitor; } /** Waits for the current layer's thread to stop */ public void join() { try { if (myThread != null) myThread.join(); } catch (InterruptedException doNothing) { } catch (NullPointerException doNothing) { /* As we cannot synchronize this method, we could get * a NullPointerException on calling myThread.join() */ } } /********************************************************* * Implementation code for the single-thread version of Joone * /********************************************************* * * /** * This method serves to a single forward step * when the Layer is called from an external thread */ public void fwdRun(Pattern pattIn) { Pattern patt = new Pattern(); inps = new double[getRows()]; running = true; if (pattIn == null) { fireFwdGet(); } else { inps = pattIn.getArray(); } if (running) { forward(inps); patt.setArray(outs); if ((pattIn == null) || (pattIn.getCount() != -1)) { patt.setCount(step); } else { patt.setCount(-1); } fireFwdPut(patt); } running = false; } /** * This method serves to a single backward step * when the Layer is called from an external thread */ public void revRun(Pattern pattIn) { Pattern patt = new Pattern(); gradientInps = new double[getDimension()]; running = true; if (pattIn == null) { fireRevGet(); } else { gradientInps = pattIn.getArray(); } if (running) { backward(gradientInps); patt.setArray(gradientOuts); patt.setOutArray(outs); patt.setCount(step); fireRevPut(patt); } running = false; } }
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -