hadoop 2.2 hdfs 操作例子
来源:互联网 发布:linux 内核升级方式 编辑:程序博客网 时间:2024/06/13 23:03
package hdfs;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;public class Delete {public static void main(String[] args)throws Exception{Configuration conf=new Configuration();FileSystem fs=FileSystem.get(conf);fs.delete(new Path(args[0]), true);fs.close();}}
package hdfs;import java.net.URI;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IOUtils;public class DoubleCat {public static void main(String[] args)throws Exception {Configuration conf=new Configuration();FileSystem fs=FileSystem.get(URI.create(args[0]), conf);FSDataInputStream in=null;try{in=fs.open(new Path(args[0]));IOUtils.copyBytes(in, System.out, 1024, false);in.seek(3);IOUtils.copyBytes(in, System.out, 1024, false);}finally{IOUtils.closeStream(in);}}}
package hdfs;import java.io.InputStream;import java.net.URI;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IOUtils;public class FileSystemCat {public static void main(String[] args) throws Exception{Configuration conf=new Configuration();FileSystem fileSystem=FileSystem.get(URI.create(args[0]), conf);InputStream in=null;try{in=fileSystem.open(new Path(args[0]));IOUtils.copyBytes(in, System.out, 1024,false);}finally{IOUtils.closeStream(in);}}}
package hdfs;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.FileUtil;import org.apache.hadoop.fs.Path;public class GlobStatus {public static void main(String[] args)throws Exception {Configuration conf=new Configuration();FileSystem fs=FileSystem.get(conf);FileStatus[] fStatus=fs.globStatus(new Path(args[0]));Path[] paths=FileUtil.stat2Paths(fStatus);for(Path path:paths){System.out.println(path);}}}
package hdfs;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.FileUtil;import org.apache.hadoop.fs.Path;public class ListStatus {public static void main(String[] args)throws Exception {Configuration conf=new Configuration();FileSystem fs=FileSystem.get(conf);FileStatus[] fileStatuses=fs.listStatus(new Path(args[0]));Path[]paths=FileUtil.stat2Paths(fileStatuses);for(Path path:paths){System.out.println(path);}}}
package hdfs;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;public class Mkdir {public static void main(String[] args)throws Exception {Configuration conf=new Configuration();FileSystem fs=FileSystem.get(conf);fs.mkdirs(new Path(args[0]));fs.close();}}
package hdfs;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.FileUtil;import org.apache.hadoop.fs.Path;import org.apache.hadoop.fs.PathFilter;public class PathFilterExample {private static class StartWithPathFilter implements PathFilter{@Overridepublic boolean accept(Path path) {if(path.getName().startsWith("o")){return true;}else{return false;}}}public static void main(String[] args) throws Exception{Configuration conf=new Configuration();FileSystem fs=FileSystem.get(conf);FileStatus[] fStatus=fs.listStatus(new Path(args[0]), new StartWithPathFilter());Path [] paths=FileUtil.stat2Paths(fStatus);for(Path path:paths){System.out.println(path);}}}
package hdfs;import java.io.BufferedInputStream;import java.io.FileInputStream;import java.io.InputStream;import java.io.OutputStream;import java.net.URI;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IOUtils;import org.apache.hadoop.util.Progressable;public class PutData {public static void main(String[] args)throws Exception {Configuration conf=new Configuration();FileSystem fs=FileSystem.get(URI.create(args[1]), conf);OutputStream out=fs.create(new Path(args[1]), new Progressable() {@Overridepublic void progress() {System.out.println("*");}});InputStream in=new BufferedInputStream(new FileInputStream(args[0]));try{IOUtils.copyBytes(in, out, 1024, false);}finally{IOUtils.closeStream(in);IOUtils.closeStream(out);}}}
package hdfs;import java.io.InputStream;import java.net.URL;import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;import org.apache.hadoop.io.IOUtils;public class UrlCat {static{URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());}public static void main(String[] args)throws Exception{InputStream in=null;try{in=new URL(args[0]).openStream();IOUtils.copyBytes(in, System.out, 1024,false);}finally{IOUtils.closeStream(in);}}}
1 0
- hadoop 2.2 hdfs 操作例子
- hadoop 操作 hdfs
- Hadoop操作HDFS命令
- Hadoop HDFS文件操作
- Hadoop操作HDFS命令
- Hadoop操作HDFS命令
- Hadoop操作HDFS命令
- Hadoop操作HDFS命令
- Hadoop操作HDFS命令
- Hadoop操作HDFS命令
- Hadoop操作HDFS命令
- hadoop hdfs API操作
- hadoop 操作 hdfs
- hadoop hdfs基础操作
- hadoop java操作hdfs
- hadoop hdfs操作
- hadoop的HDFS操作
- hadoop HDFS操作类
- 微信公众平台开发(76) 获取用户基本信息
- 【原创】ubuntu dhcp-server 安装、配置
- java以流的形式显示文件
- Android 4.3 ViewOverlay
- 项目经验
- hadoop 2.2 hdfs 操作例子
- Dll等资源嵌入到exe文件中,运行时释放
- install ruby and rails 安装debian
- Storm Trident 教程
- oracle批量修改数据库的表结构
- bmp 文件格式
- 字节不对齐,后果很严重
- 开源软件
- 利用github搭建属于自己的免费另类博客!