HDFS java api接口测试demo

来源:互联网 发布:jo2系列电机数据 编辑:程序博客网 时间:2024/06/01 15:37

1. 创建mapreduce工程,设置hadoop home


2. 创建HDFSUtil  类

package Bruce.Hadoop.HDFSManger;

import java.util.Iterator;import java.util.Map.Entry;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FSDataOutputStream;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.hdfs.DistributedFileSystem;import org.apache.hadoop.hdfs.protocol.DatanodeInfo;public class HDFSUtil {      public synchronized static FileSystem getFileSystem(String ip, int port) {          FileSystem fs = null;          String url = "hdfs://" + ip + ":" + String.valueOf(port);          Configuration config = new Configuration();          config.set("fs.default.name", url);          try {              fs = FileSystem.get(config);          } catch (Exception e) {              e.printStackTrace();         }          return fs;      }      public synchronized static void listNode(FileSystem fs) {          DistributedFileSystem dfs = (DistributedFileSystem) fs;          try {              DatanodeInfo[] infos = dfs.getDataNodeStats();              for (DatanodeInfo node : infos) {                  System.out.println("HostName: " + node.getHostName() + "/n"                          + node.getDatanodeReport());                  System.out.println("--------------------------------");              }          } catch (Exception e) {           e.printStackTrace();         }      }      /**      * 打印系统配置      *       * @param fs      */      public synchronized static void listConfig(FileSystem fs) {          Iterator<Entry<String, String>> entrys = fs.getConf().iterator();          while (entrys.hasNext()) {              Entry<String, String> item = entrys.next();              System.out.println(item.getKey() + ": " + item.getValue());          }      }      /**      * 创建目录和父目录      *       * @param fs      * @param dirName      */      public synchronized static void mkdirs(FileSystem fs, String dirName) {          // Path home = fs.getHomeDirectory();          Path workDir = fs.getWorkingDirectory();          String dir = workDir + "/" + dirName;          Path src = new Path(dir);          // FsPermission p = FsPermission.getDefault();          boolean succ;          try {              succ = fs.mkdirs(src);              if (succ) {              System.out.println("create directory " + dir + " successed. ");              } else {              System.out.println("create directory " + dir + " failed. ");              }          } catch (Exception e) {              e.printStackTrace();          }      }      /**      * 删除目录和子目录      *       * @param fs      * @param dirName      */      public synchronized static void rmdirs(FileSystem fs, String dirName) {          // Path home = fs.getHomeDirectory();          Path workDir = fs.getWorkingDirectory();          String dir = workDir + "/" + dirName;          Path src = new Path(dir);          boolean succ;          try {              succ = fs.delete(src, true);              if (succ) {              System.out.println("remove directory " + dir + " successed. ");              } else {              System.out.println("remove directory " + dir + " failed. ");              }          } catch (Exception e) {              e.printStackTrace();          }      }      /**      * 上传目录或文件      *       * @param fs      * @param local      * @param remote      */      public synchronized static void upload(FileSystem fs, String local,              String remote) {          // Path home = fs.getHomeDirectory();          Path workDir = fs.getWorkingDirectory();          Path dst = new Path(workDir + "/" + remote);          Path src = new Path(local);          try {              fs.copyFromLocalFile(false, true, src, dst);              System.out.println("upload " + local + " to  " + remote + " successed. ");          } catch (Exception e) {              e.printStackTrace();          }      }      /**      * 下载目录或文件      *       * @param fs      * @param local      * @param remote      */      public synchronized static void download(FileSystem fs, String local,              String remote) {          // Path home = fs.getHomeDirectory();          Path workDir = fs.getWorkingDirectory();          Path dst = new Path(workDir + "/" + remote);          Path src = new Path(local);          try {              fs.copyToLocalFile(false, dst, src);              System.out.println("download from " + remote + " to  " + local                      + " successed. ");          } catch (Exception e) {          e.printStackTrace();          }      }      /**      * 字节数转换      *       * @param size      * @return      */      public synchronized static String convertSize(long size) {          String result = String.valueOf(size);          if (size < 1024 * 1024) {              result = String.valueOf(size / 1024) + " KB";          } else if (size >= 1024 * 1024 && size < 1024 * 1024 * 1024) {              result = String.valueOf(size / 1024 / 1024) + " MB";          } else if (size >= 1024 * 1024 * 1024) {              result = String.valueOf(size / 1024 / 1024 / 1024) + " GB";          } else {              result = result + " B";          }          return result;      }      /**      * 遍历HDFS上的文件和目录      *       * @param fs      * @param path      */      public synchronized static void listFile(FileSystem fs, String path) {          Path workDir = fs.getWorkingDirectory();          Path dst;          if (null == path || "".equals(path)) {              dst = new Path(workDir + "/" + path);          } else {              dst = new Path(path);          }          try {              String relativePath = "";              FileStatus[] fList = fs.listStatus(dst);              for (FileStatus f : fList) {                  if (null != f) {                      relativePath = new StringBuffer()                              .append(f.getPath().getParent()).append("/")                              .append(f.getPath().getName()).toString();                      if (f.isDir()) {                          listFile(fs, relativePath);                      } else {                          System.out.println(convertSize(f.getLen()) + "/t/t"                                  + relativePath);                      }                  }              }          } catch (Exception e) {              e.printStackTrace();          } finally {          }      }      public synchronized static void write(FileSystem fs, String path,              String data) {          // Path home = fs.getHomeDirectory();          Path workDir = fs.getWorkingDirectory();          Path dst = new Path(workDir + "/" + path);          try {              FSDataOutputStream dos = fs.create(dst);              dos.writeUTF(data);              dos.close();              System.out.println("write content to " + path + " successed. ");          } catch (Exception e) {              e.printStackTrace();          }      }      public synchronized static void append(FileSystem fs, String path,              String data) {          // Path home = fs.getHomeDirectory();          Path workDir = fs.getWorkingDirectory();          Path dst = new Path(workDir + "/" + path);          try {              FSDataOutputStream dos = fs.append(dst);              dos.writeUTF(data);              dos.close();              System.out.println("append content to " + path + " successed. ");          } catch (Exception e) {              e.printStackTrace();          }      }      public synchronized static String read(FileSystem fs, String path) {          String content = null;          // Path home = fs.getHomeDirectory();          Path workDir = fs.getWorkingDirectory();          Path dst = new Path(workDir + "/" + path);          try {              // reading              FSDataInputStream dis = fs.open(dst);              content = dis.readUTF();              dis.close();              System.out.println("read content from " + path + " successed. ");          } catch (Exception e) {              e.printStackTrace();          }          return content;      }  }  
3. 创建测试用例

package Bruce.Hadoop.HDFSManger;//必须 是hadoop程序才行import java.io.IOException;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;public class HDFSTest {/** * @param args */public static void main(String[] args) {// TODO Auto-generated method stubFileSystem fs = HDFSUtil.getFileSystem("192.168.100.3", 9000); HDFSUtil.listNode(fs); //打印各个node信息String Dir = "input";String FileName = "Name.txt";try {if(!fs.exists(new Path("input"))){HDFSUtil.mkdirs(fs, Dir);System.out.println("mkdir" + Dir);}else{System.out.println( Dir + " exists!");}} catch (IOException e) {// TODO Auto-generated catch blocke.printStackTrace();}HDFSUtil.write(fs, Dir+"/"+FileName, "bruce wang"); //会重写的//HDFSUtil.append(fs, Dir+"/"+FileName, "/ntest-测试2"); //最好不用对hdfs文件进行追加操作。支持 性不好System.out.println("write " + Dir+"/"+FileName);String sFileContend = HDFSUtil.read(fs, Dir+"/"+FileName);System.out.println(sFileContend);System.out.println("read " + Dir+"/"+FileName);}}
4. run as hadoop,得到:
12/02/12 01:17:57 WARN conf.Configuration: DEPRECATED: hadoop-site.xml found in the classpath. Usage of hadoop-site.xml is deprecated. Instead use core-site.xml, mapred-site.xml and hdfs-site.xml to override properties of core-default.xml, mapred-default.xml and hdfs-default.xml respectivelyHostName: BruceWangUbuntu/nName: 192.168.100.3:50010Decommission Status : NormalConfigured Capacity: 20608348160 (19.19 GB)DFS Used: 24609 (24.03 KB)Non DFS Used: 4905893855 (4.57 GB)DFS Remaining: 15702429696(14.62 GB)DFS Used%: 0%DFS Remaining%: 76.19%Last contact: Sun Feb 12 01:17:48 CST 2012--------------------------------input exists!write content to input/Name.txt successed. write input/Name.txtread content from input/Name.txt successed. bruce wangread input/Name.txt





原创粉丝点击