HDFS java api接口测试demo
来源:互联网 发布:jo2系列电机数据 编辑:程序博客网 时间:2024/06/01 15:37
1. 创建mapreduce工程,设置hadoop home
2. 创建HDFSUtil 类
package Bruce.Hadoop.HDFSManger;
import java.util.Iterator;import java.util.Map.Entry;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FSDataOutputStream;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.hdfs.DistributedFileSystem;import org.apache.hadoop.hdfs.protocol.DatanodeInfo;public class HDFSUtil { public synchronized static FileSystem getFileSystem(String ip, int port) { FileSystem fs = null; String url = "hdfs://" + ip + ":" + String.valueOf(port); Configuration config = new Configuration(); config.set("fs.default.name", url); try { fs = FileSystem.get(config); } catch (Exception e) { e.printStackTrace(); } return fs; } public synchronized static void listNode(FileSystem fs) { DistributedFileSystem dfs = (DistributedFileSystem) fs; try { DatanodeInfo[] infos = dfs.getDataNodeStats(); for (DatanodeInfo node : infos) { System.out.println("HostName: " + node.getHostName() + "/n" + node.getDatanodeReport()); System.out.println("--------------------------------"); } } catch (Exception e) { e.printStackTrace(); } } /** * 打印系统配置 * * @param fs */ public synchronized static void listConfig(FileSystem fs) { Iterator<Entry<String, String>> entrys = fs.getConf().iterator(); while (entrys.hasNext()) { Entry<String, String> item = entrys.next(); System.out.println(item.getKey() + ": " + item.getValue()); } } /** * 创建目录和父目录 * * @param fs * @param dirName */ public synchronized static void mkdirs(FileSystem fs, String dirName) { // Path home = fs.getHomeDirectory(); Path workDir = fs.getWorkingDirectory(); String dir = workDir + "/" + dirName; Path src = new Path(dir); // FsPermission p = FsPermission.getDefault(); boolean succ; try { succ = fs.mkdirs(src); if (succ) { System.out.println("create directory " + dir + " successed. "); } else { System.out.println("create directory " + dir + " failed. "); } } catch (Exception e) { e.printStackTrace(); } } /** * 删除目录和子目录 * * @param fs * @param dirName */ public synchronized static void rmdirs(FileSystem fs, String dirName) { // Path home = fs.getHomeDirectory(); Path workDir = fs.getWorkingDirectory(); String dir = workDir + "/" + dirName; Path src = new Path(dir); boolean succ; try { succ = fs.delete(src, true); if (succ) { System.out.println("remove directory " + dir + " successed. "); } else { System.out.println("remove directory " + dir + " failed. "); } } catch (Exception e) { e.printStackTrace(); } } /** * 上传目录或文件 * * @param fs * @param local * @param remote */ public synchronized static void upload(FileSystem fs, String local, String remote) { // Path home = fs.getHomeDirectory(); Path workDir = fs.getWorkingDirectory(); Path dst = new Path(workDir + "/" + remote); Path src = new Path(local); try { fs.copyFromLocalFile(false, true, src, dst); System.out.println("upload " + local + " to " + remote + " successed. "); } catch (Exception e) { e.printStackTrace(); } } /** * 下载目录或文件 * * @param fs * @param local * @param remote */ public synchronized static void download(FileSystem fs, String local, String remote) { // Path home = fs.getHomeDirectory(); Path workDir = fs.getWorkingDirectory(); Path dst = new Path(workDir + "/" + remote); Path src = new Path(local); try { fs.copyToLocalFile(false, dst, src); System.out.println("download from " + remote + " to " + local + " successed. "); } catch (Exception e) { e.printStackTrace(); } } /** * 字节数转换 * * @param size * @return */ public synchronized static String convertSize(long size) { String result = String.valueOf(size); if (size < 1024 * 1024) { result = String.valueOf(size / 1024) + " KB"; } else if (size >= 1024 * 1024 && size < 1024 * 1024 * 1024) { result = String.valueOf(size / 1024 / 1024) + " MB"; } else if (size >= 1024 * 1024 * 1024) { result = String.valueOf(size / 1024 / 1024 / 1024) + " GB"; } else { result = result + " B"; } return result; } /** * 遍历HDFS上的文件和目录 * * @param fs * @param path */ public synchronized static void listFile(FileSystem fs, String path) { Path workDir = fs.getWorkingDirectory(); Path dst; if (null == path || "".equals(path)) { dst = new Path(workDir + "/" + path); } else { dst = new Path(path); } try { String relativePath = ""; FileStatus[] fList = fs.listStatus(dst); for (FileStatus f : fList) { if (null != f) { relativePath = new StringBuffer() .append(f.getPath().getParent()).append("/") .append(f.getPath().getName()).toString(); if (f.isDir()) { listFile(fs, relativePath); } else { System.out.println(convertSize(f.getLen()) + "/t/t" + relativePath); } } } } catch (Exception e) { e.printStackTrace(); } finally { } } public synchronized static void write(FileSystem fs, String path, String data) { // Path home = fs.getHomeDirectory(); Path workDir = fs.getWorkingDirectory(); Path dst = new Path(workDir + "/" + path); try { FSDataOutputStream dos = fs.create(dst); dos.writeUTF(data); dos.close(); System.out.println("write content to " + path + " successed. "); } catch (Exception e) { e.printStackTrace(); } } public synchronized static void append(FileSystem fs, String path, String data) { // Path home = fs.getHomeDirectory(); Path workDir = fs.getWorkingDirectory(); Path dst = new Path(workDir + "/" + path); try { FSDataOutputStream dos = fs.append(dst); dos.writeUTF(data); dos.close(); System.out.println("append content to " + path + " successed. "); } catch (Exception e) { e.printStackTrace(); } } public synchronized static String read(FileSystem fs, String path) { String content = null; // Path home = fs.getHomeDirectory(); Path workDir = fs.getWorkingDirectory(); Path dst = new Path(workDir + "/" + path); try { // reading FSDataInputStream dis = fs.open(dst); content = dis.readUTF(); dis.close(); System.out.println("read content from " + path + " successed. "); } catch (Exception e) { e.printStackTrace(); } return content; } }
3. 创建测试用例package Bruce.Hadoop.HDFSManger;//必须 是hadoop程序才行import java.io.IOException;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;public class HDFSTest {/** * @param args */public static void main(String[] args) {// TODO Auto-generated method stubFileSystem fs = HDFSUtil.getFileSystem("192.168.100.3", 9000); HDFSUtil.listNode(fs); //打印各个node信息String Dir = "input";String FileName = "Name.txt";try {if(!fs.exists(new Path("input"))){HDFSUtil.mkdirs(fs, Dir);System.out.println("mkdir" + Dir);}else{System.out.println( Dir + " exists!");}} catch (IOException e) {// TODO Auto-generated catch blocke.printStackTrace();}HDFSUtil.write(fs, Dir+"/"+FileName, "bruce wang"); //会重写的//HDFSUtil.append(fs, Dir+"/"+FileName, "/ntest-测试2"); //最好不用对hdfs文件进行追加操作。支持 性不好System.out.println("write " + Dir+"/"+FileName);String sFileContend = HDFSUtil.read(fs, Dir+"/"+FileName);System.out.println(sFileContend);System.out.println("read " + Dir+"/"+FileName);}}4. run as hadoop,得到:
12/02/12 01:17:57 WARN conf.Configuration: DEPRECATED: hadoop-site.xml found in the classpath. Usage of hadoop-site.xml is deprecated. Instead use core-site.xml, mapred-site.xml and hdfs-site.xml to override properties of core-default.xml, mapred-default.xml and hdfs-default.xml respectivelyHostName: BruceWangUbuntu/nName: 192.168.100.3:50010Decommission Status : NormalConfigured Capacity: 20608348160 (19.19 GB)DFS Used: 24609 (24.03 KB)Non DFS Used: 4905893855 (4.57 GB)DFS Remaining: 15702429696(14.62 GB)DFS Used%: 0%DFS Remaining%: 76.19%Last contact: Sun Feb 12 01:17:48 CST 2012--------------------------------input exists!write content to input/Name.txt successed. write input/Name.txtread content from input/Name.txt successed. bruce wangread input/Name.txt
- HDFS java api接口测试demo
- HDFS的JAVA接口API操作实例
- HDFS的JAVA接口API操作实例
- HDFS的JAVA接口API操作实例
- HDFS的JAVA接口API操作实例
- HDFS的JAVA接口API操作实例
- HDFS的JAVA接口API操作实例
- HDFS中Java的API使用测试
- hdfs java读写hdfs demo
- 饿了么外卖api接口完整测试demo
- 饿了么外卖api接口完整测试demo
- (转)HDFS的JAVA接口API操作实例
- Hadoop学习笔记(3)-java操作hdfs的API接口
- hadoop学习笔记--5.HDFS的java api接口访问
- Java 实现HDFS API接口 与获取Active NameNode Address
- hadoop之HDFS/MapReduce的java接口简单测试
- 接口测试Demo
- webservice 接口测试demo
- 解决安全模式问题 “hadoop Cannot create directory Name node is in safe mode.”
- 软件恢复默认设置UE等
- 从此刻起干点儿公益的事情_分享!
- 只要6分钟,告诉你少走6年弯路
- WindowsPhone自定义控件详解(一) - 控件类库分析
- HDFS java api接口测试demo
- [转]据说不错的书
- PHP | 魔术方法 | __toString(),__clone(),__call(),__autoload() 详解
- WindowsPhone自定义控件详解(二) - 模板类库分析
- 浏览器缓存机制
- 自动化应用在企业中的应用
- android学习网站
- Cache-control的说明
- HTTP/1.1 Cache-Control的理解