hadoop hdfs操作

来源:互联网 发布:手机淘宝怎么看直播间 编辑:程序博客网 时间:2024/06/06 07:24
package com.guolin.hadoop;import java.io.IOException;import java.net.URI;import java.util.ArrayList;import java.util.List;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.BlockLocation;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FSDataOutputStream;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.hdfs.DistributedFileSystem;import org.apache.hadoop.hdfs.protocol.DatanodeInfo;import org.apache.hadoop.io.IOUtils;public class Test {    public static void main(String[] args) throws Exception {        URI uri = new URI("hdfs://192.168.213.142:9000/");//      ReadFile(uri,new Configuration(),"hdfs://192.168.213.141:9000/hello2.txt");      PutFile(uri,new Configuration(),"C:\\hello.txt","hdfs://192.168.213.142:9000/hello2.txt");//      GetFile(uri,new Configuration(),"hdfs://192.168.213.131:9000/hello2.txt","D:\\hello2.txt");//      CreateFile(uri,new Configuration(),"hdfs://192.168.213.131:9000/hello.txt");//      ReNameFile(uri,new Configuration(),"hdfs://192.168.213.131:9000/hello.txt","hdfs://192.168.213.131:9000/hello1.txt");//      DelFile(uri,new Configuration(),"hdfs://192.168.213.131:9000/hello1.txt",true);//      long time = GetFileModTime(uri,new Configuration(),"hdfs://192.168.213.131:9000/hello2.txt");//      System.out.println(new Date(time));//      boolean isExist = CheckFileExist(uri,new Configuration(),"hdfs://192.168.213.131:9000/hello2.txt");//      System.out.println(isExist);//      List<String []> res =  GetFileBolckHost(uri,new Configuration(),"hdfs://192.168.213.131:9000/hello2.txt");        String[] allnode = GetAllNodeName(uri,new Configuration());        for(int i = 0;i<allnode.length;i++){            System.out.println(allnode[i]);        }    }    //read the file from HDFS      public static void ReadFile(URI uri,Configuration conf, String FileName) throws Exception{        try{              FileSystem hdfs = FileSystem.get(uri,conf);              FSDataInputStream dis = hdfs.open(new Path(FileName));              IOUtils.copyBytes(dis, System.out, 4096, false);                dis.close();          }catch (IOException e) {              // TODO Auto-generated catch block              e.printStackTrace();          }      }      //copy the local file to HDFS      public static void PutFile(URI uri,Configuration conf, String srcFile, String dstFile){      try {            FileSystem hdfs = FileSystem.get(uri,conf);            Path srcPath = new Path(srcFile);            Path dstPath = new Path(dstFile);            hdfs.copyFromLocalFile(srcPath, dstPath);          } catch (IOException e) {              // TODO Auto-generated catch block              e.printStackTrace();          }      }      //copy the file from HDFS to local      public static void GetFile(URI uri,Configuration conf, String srcFile, String dstFile){          try {              FileSystem hdfs = FileSystem.get(uri,conf);                Path srcPath = new Path(srcFile);                Path dstPath = new Path(dstFile);                hdfs.copyToLocalFile(false,srcPath, dstPath);          }catch (IOException e) {              // TODO Auto-generated catch block              e.printStackTrace();          }      }      //create the new file      public static FSDataOutputStream CreateFile(URI uri,Configuration conf, String FileName){      try {            FileSystem hdfs = FileSystem.get(uri,conf);            Path path = new Path(FileName);            FSDataOutputStream outputStream = hdfs.create(path);            return outputStream;          } catch (IOException e) {          // TODO Auto-generated catch block          e.printStackTrace();          }          return null;      }      //rename the file name      public static boolean ReNameFile(URI uri,Configuration conf, String srcName, String dstName){      try {              FileSystem hdfs = FileSystem.get(uri,conf);              Path fromPath = new Path(srcName);              Path toPath = new Path(dstName);              boolean isRenamed = hdfs.rename(fromPath, toPath);              return isRenamed;          }catch (IOException e) {              // TODO Auto-generated catch block              e.printStackTrace();          }          return false;      }      //delete the file      // tyep = true, delete the directory      // type = false, delece the file      public static boolean DelFile(URI uri,Configuration conf, String FileName, boolean type){          try {                FileSystem hdfs = FileSystem.get(uri,conf);                Path path = new Path(FileName);                boolean isDeleted = hdfs.delete(path, type);                return isDeleted;          }catch (IOException e) {              // TODO Auto-generated catch block              e.printStackTrace();          }          return false;      }      //Get HDFS file last modification time      public static long GetFileModTime(URI uri,Configuration conf, String FileName){      try{                FileSystem hdfs = FileSystem.get(uri,conf);                Path path = new Path(FileName);                FileStatus fileStatus = hdfs.getFileStatus(path);                long modificationTime = fileStatus.getModificationTime();                return modificationTime;          }catch(IOException e){              e.printStackTrace();          }          return 0;      }      //checke if a file  exists in HDFS      public static boolean CheckFileExist(URI uri,Configuration conf, String FileName){      try{                FileSystem hdfs = FileSystem.get(uri,conf);                Path path = new Path(FileName);                boolean isExists = hdfs.exists(path);                return isExists;          }catch(IOException e){              e.printStackTrace();          }          return false;      }      //Get the locations of a file in the HDFS cluster      public static List<String []> GetFileBolckHost(URI uri,Configuration conf, String FileName){          try{                List<String []> list = new ArrayList<String []>();                FileSystem hdfs = FileSystem.get(uri,conf);                Path path = new Path(FileName);                FileStatus fileStatus = hdfs.getFileStatus(path);                BlockLocation[] blkLocations = hdfs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());                int blkCount = blkLocations.length;                for (int i=0; i < blkCount; i++) {                  String[] hosts = blkLocations[i].getHosts();                  list.add(hosts);                 }                return list;              }catch(IOException e){                  e.printStackTrace();              }              return null;      }      //Get a list of all the nodes host names in the HDFS cluster      public static String[] GetAllNodeName(URI uri,Configuration conf){          try{                FileSystem fs = FileSystem.get(uri,conf);                DistributedFileSystem hdfs = (DistributedFileSystem) fs;                DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats();                String[] names = new String[dataNodeStats.length];                for (int i = 0; i < dataNodeStats.length; i++) {                    names[i] = dataNodeStats[i].getHostName();                }                return names;          }catch(IOException e){              e.printStackTrace();          }          return null;      }  }
0 0
原创粉丝点击