Hadoop java api ,HDFS文件操作,便于Web开发的集合
来源:互联网 发布:快速学会唱歌 ktv 知乎 编辑:程序博客网 时间:2024/05/22 17:10
最近在搞一个云计算比赛,要开发HDFS存储云,根据一个博客给的API写了一个完整的JAVA文件集成所有的HDFS文件操作,后期用JSP开发时只要导入这个包就OK了。
感谢原作者:http://www.cnblogs.com/xuqiang/archive/2011/06/03/2042526.html
代码如下:
import java.io.BufferedInputStream;import java.io.File;import java.io.FileInputStream;import java.io.InputStream;import java.net.URI;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.BlockLocation;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FSDataOutputStream;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IOUtils;public class HadoopFileUtil {/** * HDFS文件访问API * @author Jet-Muffin */ public static String hdfsUrl = "hdfs://localhost:9000"; /** * create HDFS folder 创建一个文件夹 * @param dirPath * @return */ public static void createDir(String dirPath) throws Exception{ Configuration conf= new Configuration(); FileSystem hdfs = FileSystem.get(URI.create(hdfsUrl),conf); Path path = new Path(dirPath); hdfs.mkdirs(path); hdfs.close(); } /** * delete HDFS folder 删除一个文件夹 * @param dirPath * @return */ public static void deleteDir(String dirPath) throws Exception{ Configuration conf = new Configuration(); FileSystem hdfs = FileSystem.get(URI.create(hdfsUrl),conf); hdfs.delete(new Path(dirPath)); hdfs.close(); } /** * create a file 创建一个文件 * @param filePath * @return */ public static void createFile(String filePath,String content) throws Exception{ Configuration conf= new Configuration(); FileSystem hdfs = FileSystem.get(URI.create(hdfsUrl),conf); Path path = new Path(filePath); FSDataOutputStream out = hdfs.create(path); out.write(content.getBytes()); out.close(); hdfs.close(); } /** * rename a file 重命名一个文件 * @param oldPath * @param newPath * @return */ public static void renameFile(String oldPath,String newPath) throws Exception{ Configuration conf= new Configuration(); FileSystem hdfs = FileSystem.get(URI.create(hdfsUrl),conf); Path path = new Path(oldPath); Path nPath = new Path(newPath); hdfs.close(); System.out.println(hdfs.rename(path, nPath)); } /** * delete a file 删除一个文件 * @param hadoopFile * @return isDeleted */ public static boolean deleteFile(String hadoopFile) throws Exception{ Configuration conf=new Configuration(); FileSystem hdfs = FileSystem.get(URI.create(hdfsUrl),conf); Path path = new Path(hadoopFile); boolean isDeleted = hdfs.delete(path); hdfs.close(); return isDeleted; } /** * upload a local file 上传文件 * @param localPath * @param hadoopPath * @return */ public static void uploadLocalFile(String localPath,String hadoopPath) throws Exception{ Configuration conf= new Configuration(); FileSystem hdfs = FileSystem.get(URI.create(hdfsUrl),conf); Path src = new Path(localPath); Path dst = new Path(hadoopPath); hdfs.copyFromLocalFile(src, dst); hdfs.close(); } /** * 读取文件于字节缓冲数组 * @param hadoopFile * @return buffer */ public static byte[] readFile(String hadoopFile) throws Exception { Configuration conf = new Configuration(); FileSystem hdfs = FileSystem.get(URI.create(hdfsUrl),conf); Path path = new Path(hadoopFile); if ( hdfs.exists(path) ) { FSDataInputStream in = hdfs.open(path); FileStatus stat = hdfs.getFileStatus(path); byte[] buffer = new byte[Integer.parseInt(String.valueOf(stat.getLen()))]; in.readFully(0, buffer); in.close(); hdfs.close(); return buffer; } else { throw new Exception("the file is not found ."); } } /** * list files under folder 列出文件夹中所有文件 * @param hadoopPath * @return fileString */ public static String listFiles(String hadoopPath) throws Exception{ Configuration conf= new Configuration(); FileSystem hdfs = FileSystem.get(URI.create(hdfsUrl),conf); Path dst = new Path("/test"); FileStatus[] files = hdfs.listStatus(dst); String fileString = ""; for(FileStatus file: files){ System.out.println(file.getPath().toString()); fileString += file.getPath().toString() + " "; } hdfs.close(); return fileString; } /** * list block info of file 查找文件所在的数据块 * @param hadoopPath * @return blockString */ public static String getBlockInfo(String hadoopPath) throws Exception{ Configuration conf= new Configuration(); FileSystem hdfs = FileSystem.get(URI.create(hdfsUrl),conf); Path dst = new Path(hadoopPath); FileStatus fileStatus = hdfs.getFileStatus(dst); BlockLocation[] blkloc=hdfs.getFileBlockLocations(fileStatus,0,fileStatus.getLen()); //查找文件所在数据块 String blockString = ""; for(BlockLocation loc: blkloc){ for(int i=0;i < loc.getHosts().length;i++) System.out.println(loc.getHosts()[i]); // blockString += loc.getHosts()[i] + " "; } hdfs.close(); return blockString; } public static void main(String[] args) throws Exception { //createDir("/test2"); //deleteDir("/test2"); //createFile("/test/helloworld.txt"); //renameFile("/test/a.txt","/test/b.txt"); //uploadLocalFile("/home/had/in","/test"); //listFiles("/test"); //getBlockInfo("/test/b.txt"); //deleteFile("/test/hellwo.txt"); /* byte[] buffer = readFile("/test/in"); String out = new String(buffer); System.out.println(out); */ }}
0 0
- Hadoop java api ,HDFS文件操作,便于Web开发的集合
- hadoop hdfs java api 文件操作类
- hadoop hdfs java api 文件操作类
- Hadoop-利用java API操作HDFS文件
- Hadoop HDFS 的 Java API 操作方式
- 使用Hadoop的Java API操作HDFS
- JAVA操作HDFS API(hadoop)
- JAVA操作HDFS API(hadoop)
- Hadoop HDFS文件操作的Java代码
- Hadoop HDFS文件操作的Java代码
- Hadoop HDFS文件操作的Java代码
- hadoop中hdfs对文件的操作Api
- Hadoop系列-HDFS文件操作的JAVA API用法(七)
- Hadoop - HDFS API 对文件进行操作
- Hadoop学习笔记(3)-java操作hdfs的API接口
- JAVA操作HDFS API(hadoop) HDFS API详解
- hadoop hdfs java api操作实战
- hadoop hdfs API操作
- 使用电脑无线网卡分享网络命令
- 小三上位中的数学问题
- MRC小测
- 感知哈希算法(Perceptual hash algorithm)的OpenCV实现
- MediaPlayer和SurfaceView播放视频
- Hadoop java api ,HDFS文件操作,便于Web开发的集合
- STM32待机模式唤醒测试以及独立看门狗测试
- ORACLE里面自带的参数
- 浅谈IT企业挑选技术人员招聘几个要点
- 第十一周项目1-函数版星号图(一)
- 半色调技术简介
- android4.4以及以上的系统,无法获取相册图片解决方法。
- JAVA annotation入门
- Android Fragments的概述