hdfs api
来源:互联网 发布:erp软件图标素材 编辑:程序博客网 时间:2024/06/11 12:15
public static FileSystem getHDFS() {<span style="white-space:pre"></span>FileSystem fs =null;<span style="white-space:pre"></span><span style="white-space:pre"></span>Configuration conf = new Configuration();<span style="white-space:pre"></span>try {<span style="white-space:pre"></span>fs= FileSystem.get(conf);<span style="white-space:pre"></span>} catch (IOException e) {<span style="white-space:pre"></span>e.printStackTrace();<span style="white-space:pre"></span>}<span style="white-space:pre"></span>return fs;<span style="white-space:pre"></span>}
</pre><pre name="code" class="java">
</pre><pre name="code" class="java">
</pre><pre name="code" class="java">
</pre><pre name="code" class="java">/** * 上传本地文件 */@Testpublic void testUpload() {try {FileSystem hdfs = HDFSUtils.getHDFS();hdfs.copyFromLocalFile(localPath, descPath);FileStatus[] fileStatus = hdfs.listStatus(descPath);for (FileStatus status : fileStatus) {Path path = status.getPath();System.out.println(path.getName());}hdfs.close();} catch (IOException e) {e.printStackTrace();}}/** * 创建新文件 * * @throws Exception */@Testpublic void testCreate() throws Exception {byte[] buff = "hello world!".getBytes();FileSystem hdfs = HDFSUtils.getHDFS();Path dst = new Path(descPath + "/hello4.txt");FSDataOutputStream outputStream = null;try {outputStream = hdfs.create(dst);outputStream.write(buff, 0, buff.length);} catch (Exception e) {e.printStackTrace();} finally {if (outputStream != null) {outputStream.close();}}FileStatus files[] = hdfs.listStatus(dst);for (FileStatus file : files) {System.out.println(file.getPath());}}/** * 遍历HDFS上的文件和目录 */@Testpublic void testList() {FileSystem hdfs = HDFSUtils.getHDFS();Path path = new Path("/");try {FileStatus[] fileStatus = hdfs.listStatus(path);for (FileStatus status : fileStatus) {Path p = status.getPath();String str = status.isDir() ? "目录" : "文件";System.out.println(str + ":" + p);}} catch (IOException e) {e.printStackTrace();}}/** * 文件重命名 */@Testpublic void testRename() {Path oldPath = new Path("/home/hell.txt");Path newPath = new Path("/home/new.log");FileSystem hdfs = HDFSUtils.getHDFS();try {boolean falg = hdfs.rename(oldPath, newPath);hdfs.close();System.out.println(falg);} catch (IOException e) {e.printStackTrace();}}/** * 读取文件的内容 */@Testpublic void readFile() {Path path = new Path("/home/big1.pdf");FileSystem hdfs = HDFSUtils.getHDFS();try {FSDataInputStream in = hdfs.open(path);IOUtils.copyBytes(in, System.out, 4096, false); // 复制到标准输出流} catch (IOException e) {e.printStackTrace();}}/** * 删除文件 */@Testpublic void delete() {FileSystem hdfs = HDFSUtils.getHDFS();Path path = new Path("/home/new.log");// 删除目录 不管是否为空目录// boolean falg=hdfs.delete(new Path("/home/test"));// 删除目录 不为空目录// boolean b=hdfs.delete(new Path("/home/test"),false);// 删除目录 不管是否为空目录// boolean falg=hdfs.delete(new Path("/home/aa"),true);try {boolean falg = hdfs.deleteOnExit(path);System.out.println(falg);} catch (IOException e) {e.printStackTrace();}}
url
public class HDFSUrl {static {URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());}@Testpublic void readFile() {try {URL url = new URL("hdfs://192.168.102.130:9000/home/hello.txt");InputStream in = url.openStream();IOUtils.copyBytes(in, System.out, 1024);} catch (MalformedURLException e) {e.printStackTrace();} catch (Exception e) {e.printStackTrace();}}}
0 0
- HDFS API
- hdfs api
- hdfs简介&hdfs-shell&hdfs-API
- HDFS API hello world
- HDFS API 文件操作
- HDFS API详解
- HDFS 常用api
- hdfs-JAVA-API
- HDFS API hello world
- Java API操作HDFS
- hadoop hdfs API操作
- HDFS API编程
- HDFS JAVA API
- HDFS的Java API
- HDFS API基本操作
- HDFS API入门
- hadoop-2 HDFS API
- hdfs的FileSystem API
- UE4在VS2013中各个编译配置代表意义
- 使用WebRTC搭建前端视频聊天室-03——数据通道篇
- EXTJS-1 表单和ajax数据交换
- ContentResolver之短信读取
- session学习记录
- hdfs api
- hdu 4292 Food (最大流)
- 使用WebRTC搭建前端视频聊天室-04——点对点通信篇
- UE4制作插件的插件神器pluginCreator
- 分布式程序防止多用户对同一数据更新问题
- POJ 2253 Frogger (dijkstra)
- c++ primer Exercise 5.0
- 内部类
- 反射invoke方法之我见