HDFS基本命令与JAVA操作

来源:互联网 发布:示范性软件学院排名 编辑:程序博客网 时间:2024/05/16 15:06

1、启动Hadoop集群

2、查看文件验证hdfs是否可用

  hadoop fs -ls /

3、对hdfs操作的命令格式是hadoop fs

     hadoop fs -help <command> 查看命令介绍     hadoop fs -ls <path> 表示对hdfs下一级目录的查看     hadoop fs -lsr <path> 表示对hdfs目录的递归查看     hadoop fs -mkdir <path> 创建目录     hadoop fs -put <src> <des> 从linux上传文件到hdfs     hadoop fs -get <src> <des> 从hdfs下载文件到linux     hadoop fs -text <path> 查看文件内容     hadoop fs -rm <path> 表示删除文件     hadoop fs -rmr <path> 表示递归删除文件

4、尝试编写Java操作Hdfs
配置Eclipse+Hadoop的开发环境
对hdfs进行增删上传下载操作,参考api都非常简单

package hadoop.hdfs;import java.io.FileInputStream;import java.io.IOException;import java.net.URI;import java.net.URISyntaxException;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FSDataOutputStream;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IOUtils;public class HdfsCommon {    private String url = "hdfs://192.168.100.10:9000";//  public static String DIR = "hdfs://192.168.100.10:9000/d3";    private FileSystem fileSystem;    /**     * @param url hdfs://192.168.100.10:9000     * @throws IOException     * @throws URISyntaxException     */    public HdfsCommon(String url) throws IOException, URISyntaxException{        this.url = url;        this.fileSystem = FileSystem.get(new URI(url), new Configuration());    }    public FileSystem getFileSystem() {        return fileSystem;    }    /**     * 新建文件夹     * @param dirPath 文件夹路径  dir/dir/dir     * @return     */    public boolean createDir(String dirPath){        try {            return fileSystem.mkdirs(new Path(url+"/"+dirPath));        } catch (IllegalArgumentException e) {            e.printStackTrace();        } catch (IOException e) {            e.printStackTrace();        }        return false;    }    /**     * 删除文件夹     * @param dir 文件夹路径  dir/dir/dir     * @return     */    public boolean deleteDir(String dir){        try {            return fileSystem.delete(new Path(url+"/"+dir), true);        } catch (IllegalArgumentException e) {            e.printStackTrace();        } catch (IOException e) {            e.printStackTrace();        };        return false;    }    /**     * 上传文件     * @param srcFile  源文件路径     * @param desFile  目标文件路径     * @return     */    public boolean uploadFile(String srcFile ,String desFile){        try {            FSDataOutputStream fsout = fileSystem.create(new Path(desFile));            FileInputStream fis = new FileInputStream(srcFile);            IOUtils.copyBytes(fis, fsout, 1024,true);            return true;        } catch (IllegalArgumentException e) {            // TODO Auto-generated catch block            e.printStackTrace();        } catch (IOException e) {            // TODO Auto-generated catch block            e.printStackTrace();        }        return false;    }    /**     * 下载     * @param srcFile     * @return     */    public boolean downloadFile(String srcFile){        FSDataInputStream in = null;        try {            in = new FSDataInputStream(fileSystem.open(new Path(srcFile)));            IOUtils.copyBytes(in, System.out, 1024,true);            return true;        } catch (IllegalArgumentException e) {            e.printStackTrace();        } catch (IOException e) {            e.printStackTrace();        }finally {            try {                in.close();            } catch (IOException e) {                e.printStackTrace();            }            System.out.close();        }        return false;    }    public boolean deleteFile(String path){        try {            if(fileSystem.exists(new Path(path))){                fileSystem.delete(new Path(path), true);                return true;            }        } catch (IllegalArgumentException e) {            e.printStackTrace();        } catch (IOException e) {            e.printStackTrace();        }        return false;    }}
package hadoop.hdfs;import java.io.IOException;import java.net.URISyntaxException;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;public class HdfsDemo {    public static void main(String[] args) throws IOException, URISyntaxException {        String url = "hdfs://192.168.100.10:9000";        String newDir = "/d3";        HdfsCommon hdfsCommon = new HdfsCommon(url);        FileSystem fileSystem = hdfsCommon.getFileSystem();        //判断文件夹是否存在        boolean isDir = fileSystem.isDirectory(new Path(url+"/d3"));        if(!isDir){            //创建文件夹            boolean createDir = hdfsCommon.createDir(newDir);            System.out.println(createDir);        }        System.out.println(isDir);        //上传文件至指定文件夹        String srcFile = "F://hello.txt";        String desFile = "d3/test";        boolean uploadFile = hdfsCommon.uploadFile(srcFile, "/"+desFile);        System.out.println(uploadFile);        //下载//      hdfsCommon.downloadFile("/"+desFile);//不添加 " / "会默认添加本地用户的路径如:/user/kobe/d3/test        //删除文件//      boolean deleteFile = hdfsCommon.deleteFile("/"+desFile);//      System.out.println(deleteFile);        boolean exists = hdfsCommon.getFileSystem().exists(new Path("/"+desFile));        System.out.println("exists:"+exists);        //遍历文件        FileStatus[] listfs =  fileSystem.listStatus(new Path("/d3"));        for (int i = 0; i < listfs.length; i++) {            String res = listfs[i].isDirectory()? "文件夹":"文件";            System.out.println(res);            String per = listfs[i].getPermission().toString();            long leng = listfs[i].getLen();            long size = listfs[i].getBlockSize();            Path path = listfs[i].getPath();            System.out.println(per+","+leng+","+size+","+path);        }    }}
0 0
原创粉丝点击