📄 joonetools.java
字号:
nnet.addLayer(layers[0], NeuralNet.INPUT_LAYER); // Output layer switch (outputType) { case WTA: layers[1] = new WTALayer(); ((WTALayer)layers[1]).setLayerWidth(nodes[1]); ((WTALayer)layers[1]).setLayerHeight(nodes[2]); break; case GAUSSIAN: layers[1] = new GaussianLayer(); ((GaussianLayer)layers[1]).setLayerWidth(nodes[1]); ((GaussianLayer)layers[1]).setLayerHeight(nodes[2]); break; default: throw new IllegalArgumentException("create_unsupervised: output type not supported"); } layers[1].setLayerName("output"); nnet.addLayer(layers[1], NeuralNet.OUTPUT_LAYER); // Synapse connect(layers[0], new KohonenSynapse(), layers[1]); Monitor mon = nnet.getMonitor(); mon.setLearningRate(0.7); return nnet; } /** * Interrogate a neural network with an array of doubles and returns the output * of the neural network. * @param nnet The neural network to interrogate * @param input The input pattern (must have the size = # of input nodes) * @return An array of double having size = # of output nodes */ public static double[] interrogate(NeuralNet nnet, double[] input) { nnet.removeAllInputs(); nnet.removeAllOutputs(); DirectSynapse inputSynapse = new DirectSynapse(); DirectSynapse outputSynapse = new DirectSynapse(); nnet.addInputSynapse(inputSynapse); nnet.addOutputSynapse(outputSynapse); Pattern inputPattern = new Pattern(input); inputPattern.setCount(1); nnet.getMonitor().setLearning(false); // Start the network // TODO: Adapt to the single-thread mode nnet.start(); // Interrogate the network inputSynapse.fwdPut(inputPattern); Pattern outputPattern = outputSynapse.fwdGet(); // Stop the network inputSynapse.fwdPut(stopPattern(input.length)); outputSynapse.fwdGet(); nnet.join(); return outputPattern.getArray(); } /** * Trains a neural network using the input/desired pairs contained in 2D arrays of double. * If Monitor.trainingPatterns = 0, all the input array's rows will be used for training. * @param nnet The neural network to train * @param input 2D array of double containing the training data. The # of columns must be equal to the # of input nodes * @param desired 2D array of double containing the target data. The # of columns must be equal to the # of output nodes * @param epochs Number of max training epochs * @param stopRMSE The desired min error at which the training must stop. If zero, the training continues until the last epoch is reached. * @param epochs_btw_reports Number of epochs between the notifications on the stdOut * @param stdOut The object representing the output. It can be either a PrintStream or a NeuralNetListener instance. If null, no notifications will be made. * @param async if true, the method returns after having stated the network, without waiting for the completition. In this case, the value returned is zero. * @return The final training RMSE (or MSE) */ public static double train(NeuralNet nnet, double[][] input, double[][] desired, int epochs, double stopRMSE, int epochs_btw_reports, Object stdOut, boolean async) { MemoryInputSynapse memInput = new MemoryInputSynapse(); memInput.setInputArray(input); memInput.setAdvancedColumnSelector("1-"+input[0].length); MemoryInputSynapse memTarget = null; if (desired != null) { memTarget = new MemoryInputSynapse(); memTarget.setInputArray(desired); memTarget.setAdvancedColumnSelector("1-"+desired[0].length); } Monitor mon = nnet.getMonitor(); if (mon.isValidation()) { if (mon.getValidationPatterns() == 0) mon.setValidationPatterns(input.length); } else { if (mon.getTrainingPatterns() == 0) mon.setTrainingPatterns(input.length); } return train_on_stream(nnet, memInput, memTarget, epochs, stopRMSE, epochs_btw_reports, stdOut, async); } /** * Trains a neural network in unsupervised mode (SOM and PCA networks) * using the input contained in a 2D array of double. * @param nnet The neural network to train * @param input 2D array of double containing the training data. The # of columns must be equal to the # of input nodes * @param epochs Number of max training epochs * @param epochs_btw_reports Number of epochs between the notifications on the stdOut * @param stdOut The object representing the output. It can be either the System.out or a NeuralNetListener instance. * @param async if true, the method returns after having stated the network, without waiting for the completition. In this case, the value returned is zero. */ public static void train_unsupervised(NeuralNet nnet, double[][] input, int epochs, int epochs_btw_reports, Object stdOut, boolean async) { nnet.getMonitor().setSupervised(false); train(nnet, input, null, epochs, 0, epochs_btw_reports, stdOut, async); } /** * Trains a neural network using StreamInputSynapses as the input/desired data sources. * The Monitor.trainingPatterns must be set before to call this method. * @param nnet The neural network to train * @param input the StreamInputSynapse containing the training data. The advColumnSelector must be set according to the # of input nodes * @param desired the StreamInputSynapse containing the target data. The advColumnSelector must be set according to the # of output nodes * @param epochs Number of max training epochs * @param stopRMSE The desired min error at which the training must stop. If zero, the training continues until the last epoch is reached. * @param epochs_btw_reports Number of epochs between the notifications on the stdOut * @param stdOut The object representing the output. It can be either a PrintStream or a NeuralNetListener instance. If null, no notifications will be made. * @param async if true, the method returns after having stated the network, without waiting for the completition. In this case, the value returned is zero. * @return The final training RMSE (or MSE) */ public static double train_on_stream(NeuralNet nnet, StreamInputSynapse input, StreamInputSynapse desired, int epochs, double stopRMSE, int epochs_btw_reports, Object stdOut, boolean async) { nnet.removeAllInputs(); nnet.removeAllOutputs(); nnet.addInputSynapse(input); if (desired != null) { TeachingSynapse teacher = new TeachingSynapse(); teacher.setDesired(desired); nnet.addOutputSynapse(teacher); nnet.setTeacher(teacher); } return train_complete(nnet, epochs, stopRMSE, epochs_btw_reports, stdOut, async); } /** * Trains a complete neural network, i.e. a network having * all the parameters and the I/O components already set. * @param nnet The neural network to train * @param epochs Number of max training epochs * @param stopRMSE The desired min error at which the training must stop. If zero, the training continues until the last epoch is reached. * @param epochs_btw_reports Number of epochs between the notifications on the stdOut * @param stdOut The object representing the output. It can be either a PrintStream or a NeuralNetListener instance. If null, no notifications will be made. * @param async if true, the method returns after having stated the network, without waiting for the completition. In this case, the value returned is zero. * @return The final training RMSE (or MSE) */ public static double train_complete(NeuralNet nnet, int epochs, double stopRMSE, int epochs_btw_reports, Object stdOut, boolean async) { nnet.removeAllListeners(); Monitor mon = nnet.getMonitor(); if (stdOut != null) { mon.addNeuralNetListener(createListener(nnet, stdOut, epochs_btw_reports)); } ErrorBasedTerminator term = null; if (stopRMSE > 0) { term = new ErrorBasedTerminator(stopRMSE); term.setNeuralNet(nnet); mon.addNeuralNetListener(term); } mon.setTotCicles(epochs); mon.setLearning(!mon.isValidation()); TreeSet tree = nnet.check(); if (tree.isEmpty()) { nnet.go(!async); // Returns if async=true if (async) return 0.0d; NeuralNetAttributes attrib = nnet.getDescriptor(); if (term != null) { if (term.isStopRequestPerformed()) { attrib.setLastEpoch(term.getStoppedCycle()); } else { attrib.setLastEpoch(mon.getTotCicles()); } } if (mon.isValidation()) { attrib.setValidationError(mon.getGlobalError()); } else { attrib.setTrainingError(mon.getGlobalError()); } return mon.getGlobalError(); } else { throw new IllegalArgumentException("Cannot start, errors found:"+tree.toString()); } } /** * Tests a neural network using the input/desired pairs contained in 2D arrays of double. * This method doesn't change the weights, but calculates only the RMSE. * If Monitor.validationPatterns = 0, all the input array's rows will be used for testing. * @param nnet The neural network to test * @param input 2D array of double containing the test data. The # of columns must be equal to the # of input nodes * @param desired 2D array of double containing the target data. The # of columns must be equal to the # of output nodes * @return The test RMSE (or MSE) */ public static double test(NeuralNet nnet, double[][] input, double[][] desired) { nnet.getMonitor().setValidation(true); return train(nnet, input, desired, 1, 0, 0, null, false); } /** * Tests a neural network using using StreamInputSynapses as the input/desired data sources. * This method doesn't change the weights, but calculates only the RMSE. * The Monitor.validationPatterns must be set before the call to this method. * @param nnet The neural network to test * @param input the StreamInputSynapse containing the test data. The advColumnSelector must be set according to the # of input nodes * @param desired the StreamInputSynapse containing the target data. The advColumnSelector must be set according to the # of output nodes * @return The test RMSE (or MSE) */ public static double test_on_stream(NeuralNet nnet, StreamInputSynapse input, StreamInputSynapse desired) { nnet.getMonitor().setValidation(true); return train_on_stream(nnet, input, desired, 1, 0, 0, null, false); } /** * Permits to compare the output and target data of a trained neural network using 2D array of double as the input/desired data sources. * If Monitor.validationPatterns = 0, all the input array's rows will be used for testing. * @param nnet The neural network to test * @param input 2D array of double containing the test data. The # of columns must be equal to the # of input nodes * @param desired 2D array of double containing the target data. The # of columns must be equal to the # of output nodes * @return a 2D of double containing the output+desired data for each pattern. */ public static double[][] compare(NeuralNet nnet, double[][] input, double[][] desired) { MemoryInputSynapse memInput = new MemoryInputSynapse(); memInput.setInputArray(input); memInput.setAdvancedColumnSelector("1-"+input[0].length);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -