📄 dfsshell.java
字号:
/** * Copyright 2005 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */package org.apache.hadoop.dfs;import java.io.*;import org.apache.hadoop.conf.*;import org.apache.hadoop.fs.*;import org.apache.hadoop.util.ToolBase;/************************************************** * This class provides some DFS administrative access. * * @author Mike Cafarella **************************************************/public class DFSShell extends ToolBase { FileSystem fs; /** */ public DFSShell() { } public void init() throws IOException { this.fs = FileSystem.get(conf); } /** * Add a local file to the indicated name in DFS. src is kept. */ void copyFromLocal(Path src, String dstf) throws IOException { fs.copyFromLocalFile(src, new Path(dstf)); } /** * Add a local file to the indicated name in DFS. src is removed. */ void moveFromLocal(Path src, String dstf) throws IOException { fs.moveFromLocalFile(src, new Path(dstf)); } /** * Obtain the indicated DFS files that match the file pattern <i>srcf</i> * and copy them to the local name. srcf is kept. * When copying mutiple files, the destination must be a directory. * Otherwise, IOException is thrown. * @param srcf: a file pattern specifying source files * @param dstf: a destination local file/directory * @exception: IOException * @see org.apache.hadoop.fs.FileSystem.globPaths */ void copyToLocal(String srcf, String dstf) throws IOException { Path [] srcs = fs.globPaths( new Path(srcf) ); if( srcs.length > 1 && !new File( dstf ).isDirectory()) { throw new IOException( "When copy multiple files, " + "destination should be a directory." ); } Path dst = new Path( dstf ); for( int i=0; i<srcs.length; i++ ) { fs.copyToLocalFile( srcs[i], dst ); } } /** * Get all the files in the directories that match the source file * pattern and merge and sort them to only one file on local fs * srcf is kept. * @param srcf: a file pattern specifying source files * @param dstf: a destination local file/directory * @exception: IOException * @see org.apache.hadoop.fs.FileSystem.globPaths */ void copyMergeToLocal(String srcf, Path dst) throws IOException { copyMergeToLocal(srcf, dst, false); } /** * Get all the files in the directories that match the source file pattern * and merge and sort them to only one file on local fs * srcf is kept. * * Also adds a string between the files (useful for adding \n * to a text file) * @param srcf: a file pattern specifying source files * @param dstf: a destination local file/directory * @param endline: if an end of line character is added to a text file * @exception: IOException * @see org.apache.hadoop.fs.FileSystem.globPaths */ void copyMergeToLocal(String srcf, Path dst, boolean endline) throws IOException { Path [] srcs = fs.globPaths( new Path( srcf ) ); for( int i=0; i<srcs.length; i++ ) { if(endline) { FileUtil.copyMerge(fs, srcs[i], FileSystem.getNamed("local", conf), dst, false, conf, "\n"); } else { FileUtil.copyMerge(fs, srcs[i], FileSystem.getNamed("local", conf), dst, false, conf, null); } } } /** * Obtain the indicated DFS file and copy to the local name. * srcf is removed. */ void moveToLocal(String srcf, Path dst) throws IOException { System.err.println("Option '-moveToLocal' is not implemented yet."); } /** * Fetch all DFS files that match the file pattern <i>srcf</i> and display * their content on stdout. * @param srcf: a file pattern specifying source files * @exception: IOException * @see org.apache.hadoop.fs.FileSystem.globPaths */ void cat(String srcf) throws IOException { Path [] srcs = fs.globPaths( new Path( srcf ) ); for( int i=0; i<srcs.length; i++ ) { cat(srcs[i]); } } /* print the content of src to screen */ private void cat(Path src) throws IOException { FSDataInputStream in = fs.open(src); try { BufferedReader din = new BufferedReader(new InputStreamReader(in)); String line; while((line = din.readLine()) != null) { System.out.println(line); } } finally { in.close(); } } /** * Parse the incoming command string * @param cmd * @param pos ignore anything before this pos in cmd * @throws IOException */ private void setReplication(String[] cmd, int pos) throws IOException { if(cmd.length-pos<2 || (cmd.length-pos==2 && cmd[pos].equalsIgnoreCase("-R"))) { System.err.println("Usage: [-R] <repvalue> <path>"); System.exit(-1); } boolean recursive = false; short rep = 3; if("-R".equalsIgnoreCase(cmd[pos])) { recursive=true; pos++; } try { rep = Short.parseShort(cmd[pos]); pos++; } catch (NumberFormatException e) { System.err.println("Cannot set replication to: " + cmd[pos]); System.exit(-1); } setReplication(rep, cmd[pos], recursive); } /** * Set the replication for files that match file pattern <i>srcf</i> * if it's a directory and recursive is true, * set replication for all the subdirs and those files too * @param newRep: new replication factor * @param srcf: a file pattern specifying source files * @param recursive: if need to set replication factor for files in subdirs * @exception: IOException * @see org.apache.hadoop.fs.FileSystem.globPaths */ public void setReplication(short newRep, String srcf, boolean recursive) throws IOException { Path[] srcs = fs.globPaths( new Path(srcf) ); for( int i=0; i<srcs.length; i++ ) { setReplication( newRep, srcs[i], recursive ); } } private void setReplication(short newRep, Path src, boolean recursive) throws IOException { if(!fs.isDirectory(src)) { setFileReplication(src, newRep); return; } Path items[] = fs.listPaths(src); if (items == null) { System.out.println("Could not get listing for " + src); } else { for (int i = 0; i < items.length; i++) { Path cur = items[i]; if(!fs.isDirectory(cur)) { setFileReplication(cur, newRep); } else if(recursive) { setReplication(newRep, cur, recursive); } } } } /** * Actually set the replication for this file * If it fails either throw IOException or print an error msg * @param file: a dfs file/directory * @param newRep: new replication factor * @throws IOException */ private void setFileReplication(Path file, short newRep) throws IOException { if(fs.setReplication(file, newRep)) { System.out.println("Replication " + newRep + " set: " + file); } else { System.err.println("Could not set replication for: " + file); } } /** * Get a listing of all files in DFS that match the file pattern <i>srcf</i> * @param srcf: a file pattern specifying source files * @param recursive: if need to list files in subdirs * @exception: IOException * @see org.apache.hadoop.fs.FileSystem.globPaths */ public void ls(String srcf, boolean recursive) throws IOException { Path[] srcs = fs.globPaths( new Path(srcf) ); boolean printHeader = (srcs.length == 1) ? true: false; for(int i=0; i<srcs.length; i++) { ls(srcs[i], recursive, printHeader); } } /* list all files in dfs under the directory <i>src</i>*/ private void ls(Path src, boolean recursive, boolean printHeader ) throws IOException { Path items[] = fs.listPaths(src); if (items == null) { System.out.println("Could not get listing for " + src); } else { if(!recursive && printHeader ) { System.out.println("Found " + items.length + " items"); } for (int i = 0; i < items.length; i++) { Path cur = items[i]; System.out.println(cur + "\t" + (fs.isDirectory(cur) ? "<dir>" : ("<r " + fs.getReplication(cur) + ">\t" + fs.getLength(cur)))); if(recursive && fs.isDirectory(cur)) { ls(cur, recursive, printHeader); } } } } /** * Show the size of all files in DFS that match the file pattern <i>srcf</i> * @param srcf: a file pattern specifying source files * @exception: IOException * @see org.apache.hadoop.fs.FileSystem.globPaths
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -