📄 converterutils.java
字号:
return result; } /** * returns the next element and sets the specified dataset, null if * none available. * * @param dataset the dataset to set for the instance * @return the next Instance */ public Instance nextElement(Instances dataset) { Instance result; result = null; if (isIncremental()) { // is there still an instance in the buffer? if (m_IncrementalBuffer != null) { result = m_IncrementalBuffer; m_IncrementalBuffer = null; } else { try { result = m_Loader.getNextInstance(dataset); } catch (Exception e) { e.printStackTrace(); result = null; } } } else { if (m_BatchCounter < m_BatchBuffer.numInstances()) { result = m_BatchBuffer.instance(m_BatchCounter); m_BatchCounter++; } } result.setDataset(dataset); return result; } /** * convencience method for loading a dataset in batch mode. * * @param location the dataset to load * @return the dataset * @throws Exception if loading fails * @see #DataSource(String) */ public static Instances read(String location) throws Exception { DataSource source; Instances result; source = new DataSource(location); result = source.getDataSet(); return result; } /** * convencience method for loading a dataset in batch mode from a stream. * * @param stream the stream to load the dataset from * @return the dataset * @throws Exception if loading fails * @see #DataSource(InputStream) */ public static Instances read(InputStream stream) throws Exception { DataSource source; Instances result; source = new DataSource(stream); result = source.getDataSet(); return result; } /** * convencience method for loading a dataset in batch mode. * * @param loader the loader to get the dataset from * @return the dataset * @throws Exception if loading fails * @see #DataSource(Loader) */ public static Instances read(Loader loader) throws Exception { DataSource source; Instances result; source = new DataSource(loader); result = source.getDataSet(); return result; } /** * for testing only - takes a data file as input. * * @param args the commandline arguments * @throws Exception if something goes wrong */ public static void main(String[] args) throws Exception { if (args.length != 1) { System.out.println("\nUsage: " + DataSource.class.getName() + " <file>\n"); System.exit(1); } DataSource loader = new DataSource(args[0]); System.out.println("Incremental? " + loader.isIncremental()); System.out.println("Loader: " + loader.getLoader().getClass().getName()); System.out.println("Data:\n"); Instances structure = loader.getStructure(); System.out.println(structure); while (loader.hasMoreElements(structure)) System.out.println(loader.nextElement(structure)); Instances inst = loader.getDataSet(); loader = new DataSource(inst); System.out.println("\n\nProxy-Data:\n"); System.out.println(loader.getStructure()); while (loader.hasMoreElements(structure)) System.out.println(loader.nextElement(inst)); } } /** * Helper class for saving data to files. Via the ConverterUtils * class it determines which converter to use for saving the data. * It is the logical counterpart to <code>DataSource</code>. * * @author FracPete (fracpete at waikato dot ac dot nz) * @version $Revision: 1.14 $ * @see DataSource */ public static class DataSink implements Serializable { /** for serialization. */ private static final long serialVersionUID = -1504966891136411204L; /** the saver to use for storing the data. */ protected Saver m_Saver = null; /** the stream to store the data in (always in ARFF format). */ protected OutputStream m_Stream = null; /** * initializes the sink to save the data to the given file. * * @param filename the file to save data to * @throws Exception if set of saver fails */ public DataSink(String filename) throws Exception { m_Stream = null; if (DataSource.isArff(filename)) m_Saver = new ArffSaver(); else m_Saver = getSaverForFile(filename); ((AbstractFileSaver) m_Saver).setFile(new File(filename)); } /** * initializes the sink to save the data to the given Saver (expected to be * fully configured). * * @param saver the saver to use for saving the data */ public DataSink(Saver saver) { m_Saver = saver; m_Stream = null; } /** * initializes the sink to save the data in the stream (always in ARFF * format). * * @param stream the output stream to use for storing the data in ARFF * format */ public DataSink(OutputStream stream) { m_Saver = null; m_Stream = stream; } /** * writes the given data either via the saver or to the defined * output stream (depending on the constructor). In case of the stream, * the stream is only flushed, but not closed. * * @param data the data to save * @throws Exception if saving fails */ public void write(Instances data) throws Exception { if (m_Saver != null) { m_Saver.setInstances(data); m_Saver.writeBatch(); } else { m_Stream.write(data.toString().getBytes()); m_Stream.flush(); } } /** * writes the data to the given file. * * @param filename the file to write the data to * @param data the data to store * @throws Exception if writing fails */ public static void write(String filename, Instances data) throws Exception { DataSink sink; sink = new DataSink(filename); sink.write(data); } /** * writes the data via the given saver. * * @param saver the saver to use for writing the data * @param data the data to store * @throws Exception if writing fails */ public static void write(Saver saver, Instances data) throws Exception { DataSink sink; sink = new DataSink(saver); sink.write(data); } /** * writes the data to the given stream (always in ARFF format). * * @param stream the stream to write the data to (ARFF format) * @param data the data to store * @throws Exception if writing fails */ public static void write(OutputStream stream, Instances data) throws Exception { DataSink sink; sink = new DataSink(stream); sink.write(data); } /** * for testing only - takes a data file as input and a data file for the * output. * * @param args the commandline arguments * @throws Exception if something goes wrong */ public static void main(String[] args) throws Exception { if (args.length != 2) { System.out.println( "\nUsage: " + DataSource.class.getName() + " <input-file> <output-file>\n"); System.exit(1); } // load data Instances data = DataSource.read(args[0]); // save data DataSink.write(args[1], data); } } /** the core loaders - hardcoded list necessary for RMI/Remote Experiments * (comma-separated list). */ public final static String CORE_FILE_LOADERS = weka.core.converters.ArffLoader.class.getName() + "," + weka.core.converters.C45Loader.class.getName() + "," + weka.core.converters.CSVLoader.class.getName() + "," + weka.core.converters.DatabaseConverter.class.getName() + "," + weka.core.converters.LibSVMLoader.class.getName() + "," + weka.core.converters.SerializedInstancesLoader.class.getName() + "," + weka.core.converters.TextDirectoryLoader.class.getName() + "," + weka.core.converters.XRFFLoader.class.getName(); /** the core savers - hardcoded list necessary for RMI/Remote Experiments * (comma-separated list). */ public final static String CORE_FILE_SAVERS = weka.core.converters.ArffSaver.class.getName() + "," + weka.core.converters.C45Saver.class.getName() + "," + weka.core.converters.CSVSaver.class.getName() + "," + weka.core.converters.DatabaseConverter.class.getName() + "," + weka.core.converters.LibSVMSaver.class.getName() + "," + weka.core.converters.SerializedInstancesSaver.class.getName() + "," + weka.core.converters.XRFFSaver.class.getName(); /** all available loaders (extension <-> classname). */ protected static Hashtable<String,String> m_FileLoaders; /** all available URL loaders (extension <-> classname). */ protected static Hashtable<String,String> m_URLFileLoaders; /** all available savers (extension <-> classname). */ protected static Hashtable<String,String> m_FileSavers; // determine all loaders/savers static { Vector classnames; try { // generate properties // Note: does NOT work with RMI, hence m_FileLoadersCore/m_FileSaversCore GenericPropertiesCreator creator = new GenericPropertiesCreator(); creator.execute(false); Properties props = creator.getOutputProperties(); // init m_FileLoaders = new Hashtable<String,String>(); m_URLFileLoaders = new Hashtable<String,String>(); m_FileSavers = new Hashtable<String,String>(); // loaders m_FileLoaders = getFileConverters( props.getProperty(Loader.class.getName(), CORE_FILE_LOADERS), new String[]{FileSourcedConverter.class.getName()}); // URL loaders m_URLFileLoaders = getFileConverters( props.getProperty(Loader.class.getName(), CORE_FILE_LOADERS), new String[]{ FileSourcedConverter.class.getName(), URLSourcedLoader.class.getName()}); // savers m_FileSavers = getFileConverters( props.getProperty(Saver.class.getName(), CORE_FILE_SAVERS), new String[]{FileSourcedConverter.class.getName()}); } catch (Exception e) { // ignore } finally { // loaders if (m_FileLoaders.size() == 0) { classnames = GenericObjectEditor.getClassnames(AbstractFileLoader.class.getName()); if (classnames.size() > 0) m_FileLoaders = getFileConverters( classnames, new String[]{FileSourcedConverter.class.getName()}); else m_FileLoaders = getFileConverters( CORE_FILE_LOADERS, new String[]{FileSourcedConverter.class.getName()}); } // URL loaders if (m_URLFileLoaders.size() == 0) { classnames = GenericObjectEditor.getClassnames(AbstractFileLoader.class.getName()); if (classnames.size() > 0) m_URLFileLoaders = getFileConverters( classnames, new String[]{ FileSourcedConverter.class.getName(), URLSourcedLoader.class.getName()}); else m_URLFileLoaders = getFileConverters( CORE_FILE_LOADERS, new String[]{ FileSourcedConverter.class.getName(), URLSourcedLoader.class.getName()}); } // savers if (m_FileSavers.size() == 0) { classnames = GenericObjectEditor.getClassnames(AbstractFileSaver.class.getName()); if (classnames.size() > 0) m_FileSavers = getFileConverters( classnames, new String[]{FileSourcedConverter.class.getName()}); else m_FileSavers = getFileConverters(
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -