hadoop-hdfs-文件工具类(Java)
来源:互联网 发布:php curl exec 不输出 编辑:程序博客网 时间:2024/05/17 12:06
import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.*;import org.apache.hadoop.io.IOUtils;import java.io.File;import java.io.FileInputStream;import java.io.FileOutputStream;import java.io.IOException;public class HdfsUtil { /** * ls */ public void listFiles(String specialPath) { FileSystem fileSystem = null; try { fileSystem = this.getFS(); ; FileStatus[] fstats = fileSystem.listStatus(new Path(specialPath)); for (FileStatus fstat : fstats) { System.out.println(fstat.isDirectory() ? "directory" : "file"); System.out.println("Permission:" + fstat.getPermission()); System.out.println("Owner:" + fstat.getOwner()); System.out.println("Group:" + fstat.getGroup()); System.out.println("Size:" + fstat.getLen()); System.out.println("Replication:" + fstat.getReplication()); System.out.println("Block Size:" + fstat.getBlockSize()); System.out.println("Name:" + fstat.getPath()); System.out.println("#############################"); } } catch (IOException e) { e.printStackTrace(); System.err.println("link err"); } finally { if (fileSystem != null) { try { fileSystem.close(); } catch (IOException e) { e.printStackTrace(); } } } } /** * cat * * @param hdfsFilePath */ public void cat(String hdfsFilePath) { FileSystem fileSystem = null; try { fileSystem = this.getFS(); FSDataInputStream fdis = fileSystem.open(new Path(hdfsFilePath)); IOUtils.copyBytes(fdis, System.out, 1024); } catch (IOException e) { e.printStackTrace(); } finally { IOUtils.closeStream(fileSystem); } } /** * 创建目录 * * @param hdfsFilePath */ public void mkdir(String hdfsFilePath) { FileSystem fileSystem = this.getFS(); try { boolean success = fileSystem.mkdirs(new Path(hdfsFilePath)); if (success) { System.out.println("Create directory or file successfully"); } } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { this.closeFS(fileSystem); } } /** * 删除文件或目录 * * @param hdfsFilePath * @param recursive 递归 */ public void rm(String hdfsFilePath, boolean recursive) { FileSystem fileSystem = this.getFS(); try { boolean success = fileSystem.delete(new Path(hdfsFilePath), recursive); if (success) { System.out.println("delete successfully"); } } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { this.closeFS(fileSystem); } } /** * 上传文件到HDFS * * @param localFilePath * @param hdfsFilePath */ public void put(String localFilePath, String hdfsFilePath) { FileSystem fileSystem = this.getFS(); try { FSDataOutputStream fdos = fileSystem.create(new Path(hdfsFilePath)); FileInputStream fis = new FileInputStream(new File(localFilePath)); IOUtils.copyBytes(fis, fdos, 1024); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { IOUtils.closeStream(fileSystem); } } public void read(String fileName) throws Exception { // get filesystem FileSystem fileSystem = this.getFS(); Path readPath = new Path(fileName); // open file FSDataInputStream inStream = fileSystem.open(readPath); try { // read IOUtils.copyBytes(inStream, System.out, 4096, false); } catch (Exception e) { e.printStackTrace(); } finally { // close Stream IOUtils.closeStream(inStream); } } /** * 下载文件到本地 * * @param localFilePath * @param hdfsFilePath */ public void get(String localFilePath, String hdfsFilePath) { FileSystem fileSystem = this.getFS(); try { FSDataInputStream fsis = fileSystem.open(new Path(hdfsFilePath)); FileOutputStream fos = new FileOutputStream(new File(localFilePath)); IOUtils.copyBytes(fsis, fos, 1024); } catch (IllegalArgumentException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { IOUtils.closeStream(fileSystem); } } public void write(String localPath, String hdfspath) throws Exception { FileInputStream inStream = new FileInputStream( new File(localPath) ); FileSystem fileSystem = this.getFS(); Path writePath = new Path(hdfspath); // Output Stream FSDataOutputStream outStream = fileSystem.create(writePath); try { IOUtils.copyBytes(inStream, outStream, 4096, false); } catch (Exception e) { e.printStackTrace(); } finally { IOUtils.closeStream(inStream); IOUtils.closeStream(outStream); } } /** * 获取FileSystem实例 * * @return */ private FileSystem getFS() { System.setProperty("hadoop.home.dir", "D:\\04coding\\projects-bigData\\Hadoop\\hadoop-2.5.0"); System.setProperty("HADOOP_USER_NAME", "xiaoyuzhou"); Configuration conf = new Configuration(); conf.set("fs.defaultFS", "hdfs://xyz01.aiso.com:8020/"); conf.set("mapred.remote.os", "Linux"); FileSystem fileSystem = null; try { fileSystem = FileSystem.get(conf); return fileSystem; } catch (IOException e) { e.printStackTrace(); } return null; } /** * 关闭FileSystem * * @param fileSystem */ private void closeFS(FileSystem fileSystem) { if (fileSystem != null) { try { fileSystem.close(); } catch (IOException e) { e.printStackTrace(); } } }}
0 0
- hadoop-hdfs-文件工具类(Java)
- hadoop-hdfs-文件工具类(Scala)
- hadoop hdfs java api 文件操作类
- hadoop hdfs java api 文件操作类
- Hadoop HDFS文件操作 Java实现类
- Hadoop HDFS文件操作 Java实现类
- 利用java操作Hadoop文件 /hdfs
- Hadoop HDFS文件操作的Java代码
- Hadoop-利用java API操作HDFS文件
- Hadoop HDFS文件操作的Java代码
- java hadoop hdfs 上写文件
- Hadoop java实现读取hdfs文件
- Hadoop HDFS文件操作的Java代码
- hadoop HDFS工具类---对hdfs的读、写
- 第一个hadoop程序(java程序访问hadoop的hdfs文件系统中的文件)
- hadoop hdfs 上传下载文件
- Hadoop HDFS文件操作
- hadoop Hdfs文件上传下载
- 自定义ClassLoader
- 初探物联网协议之MQTT
- MVP模式&简单实例
- Android国际化
- 我正在做的毕业设计,半成品,持续更新中!
- hadoop-hdfs-文件工具类(Java)
- Android 几秒钟点击次数 触发事件
- 修该C++项目的名称
- robot framework环境搭建
- html的相对路径和绝对路径
- 随手摘抄
- magent安装:错误:event.h:没有那个文件或目录
- HDU1213 How Many Tables
- SpringMVC4+thymeleaf3的一个简单实例(form表单数据验证)