hadoop 2.2 hdfs 操作例子

来源:互联网 发布:linux 内核升级方式 编辑:程序博客网 时间:2024/06/13 23:03
package hdfs;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;public class Delete {public static void main(String[] args)throws Exception{Configuration conf=new Configuration();FileSystem fs=FileSystem.get(conf);fs.delete(new Path(args[0]), true);fs.close();}}
package hdfs;import java.net.URI;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IOUtils;public class DoubleCat {public static void main(String[] args)throws Exception {Configuration conf=new Configuration();FileSystem fs=FileSystem.get(URI.create(args[0]), conf);FSDataInputStream in=null;try{in=fs.open(new Path(args[0]));IOUtils.copyBytes(in, System.out, 1024, false);in.seek(3);IOUtils.copyBytes(in, System.out, 1024, false);}finally{IOUtils.closeStream(in);}}}

package hdfs;import java.io.InputStream;import java.net.URI;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IOUtils;public class FileSystemCat {public static void main(String[] args) throws Exception{Configuration conf=new Configuration();FileSystem fileSystem=FileSystem.get(URI.create(args[0]), conf);InputStream in=null;try{in=fileSystem.open(new Path(args[0]));IOUtils.copyBytes(in, System.out, 1024,false);}finally{IOUtils.closeStream(in);}}}

package hdfs;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.FileUtil;import org.apache.hadoop.fs.Path;public class GlobStatus {public static void main(String[] args)throws Exception {Configuration conf=new Configuration();FileSystem fs=FileSystem.get(conf);FileStatus[] fStatus=fs.globStatus(new Path(args[0]));Path[] paths=FileUtil.stat2Paths(fStatus);for(Path path:paths){System.out.println(path);}}}

package hdfs;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.FileUtil;import org.apache.hadoop.fs.Path;public class ListStatus {public static void main(String[] args)throws Exception {Configuration conf=new Configuration();FileSystem fs=FileSystem.get(conf);FileStatus[] fileStatuses=fs.listStatus(new Path(args[0]));Path[]paths=FileUtil.stat2Paths(fileStatuses);for(Path path:paths){System.out.println(path);}}}

package hdfs;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;public class Mkdir {public static void main(String[] args)throws Exception {Configuration conf=new Configuration();FileSystem fs=FileSystem.get(conf);fs.mkdirs(new Path(args[0]));fs.close();}}

package hdfs;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.FileUtil;import org.apache.hadoop.fs.Path;import org.apache.hadoop.fs.PathFilter;public class PathFilterExample {private static class StartWithPathFilter implements PathFilter{@Overridepublic boolean accept(Path path) {if(path.getName().startsWith("o")){return true;}else{return false;}}}public static void main(String[] args) throws Exception{Configuration conf=new Configuration();FileSystem fs=FileSystem.get(conf);FileStatus[] fStatus=fs.listStatus(new Path(args[0]), new StartWithPathFilter());Path [] paths=FileUtil.stat2Paths(fStatus);for(Path path:paths){System.out.println(path);}}}

package hdfs;import java.io.BufferedInputStream;import java.io.FileInputStream;import java.io.InputStream;import java.io.OutputStream;import java.net.URI;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IOUtils;import org.apache.hadoop.util.Progressable;public class PutData {public static void main(String[] args)throws Exception {Configuration conf=new Configuration();FileSystem fs=FileSystem.get(URI.create(args[1]), conf);OutputStream out=fs.create(new Path(args[1]), new Progressable() {@Overridepublic void progress() {System.out.println("*");}});InputStream in=new BufferedInputStream(new FileInputStream(args[0]));try{IOUtils.copyBytes(in, out, 1024, false);}finally{IOUtils.closeStream(in);IOUtils.closeStream(out);}}}

package hdfs;import java.io.InputStream;import java.net.URL;import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;import org.apache.hadoop.io.IOUtils;public class UrlCat {static{URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());}public static void main(String[] args)throws Exception{InputStream in=null;try{in=new URL(args[0]).openStream();IOUtils.copyBytes(in, System.out, 1024,false);}finally{IOUtils.closeStream(in);}}}


1 0
原创粉丝点击