⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 sequencefileoutputformat.java

📁 hadoop:Nutch集群平台
💻 JAVA
字号:
/** * Copyright 2005 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * *     http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */package org.apache.hadoop.mapred;import java.io.IOException;import java.io.File;                              // deprecatedimport java.util.Arrays;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.SequenceFile;import org.apache.hadoop.io.WritableComparable;import org.apache.hadoop.io.Writable;import org.apache.hadoop.io.SequenceFile.CompressionType;import org.apache.hadoop.io.compress.CompressionCodec;import org.apache.hadoop.io.compress.DefaultCodec;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.util.*;/** An {@link OutputFormat} that writes {@link SequenceFile}s. */public class SequenceFileOutputFormat extends OutputFormatBase {  public RecordWriter getRecordWriter(FileSystem fs, JobConf job,                                      String name, Progressable progress)                                      throws IOException {    Path file = new Path(job.getOutputPath(), name);    CompressionCodec codec = null;    CompressionType compressionType = CompressionType.NONE;    if (getCompressOutput(job)) {      // find the kind of compression to do      compressionType = SequenceFile.getCompressionType(job);      // find the right codec      Class codecClass = getOutputCompressorClass(job, DefaultCodec.class);      codec = (CompressionCodec)                  ReflectionUtils.newInstance(codecClass, job);    }    final SequenceFile.Writer out =       SequenceFile.createWriter(fs, job, file,                              job.getOutputKeyClass(),                              job.getOutputValueClass(),                              compressionType,                              codec,                              progress);    return new RecordWriter() {        public void write(WritableComparable key, Writable value)          throws IOException {          out.append(key, value);        }        public void close(Reporter reporter) throws IOException { out.close();}      };  }  /** @deprecated Call {@link #getReaders(Configuration, Path)} instead. */  public static SequenceFile.Reader[] getReaders(Configuration conf, File dir)     throws IOException {    return getReaders(conf, new Path(dir.toString()));  }  /** Open the output generated by this format. */  public static SequenceFile.Reader[] getReaders(Configuration conf, Path dir)    throws IOException {    FileSystem fs = FileSystem.get(conf);    Path[] names = fs.listPaths(dir);        // sort names, so that hash partitioning works    Arrays.sort(names);        SequenceFile.Reader[] parts = new SequenceFile.Reader[names.length];    for (int i = 0; i < names.length; i++) {      parts[i] = new SequenceFile.Reader(fs, names[i], conf);    }    return parts;  }}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -