hdfs api

来源:互联网 发布:erp软件图标素材 编辑:程序博客网 时间:2024/06/11 12:15
public static FileSystem getHDFS() {<span style="white-space:pre"></span>FileSystem fs =null;<span style="white-space:pre"></span><span style="white-space:pre"></span>Configuration conf = new Configuration();<span style="white-space:pre"></span>try {<span style="white-space:pre"></span>fs= FileSystem.get(conf);<span style="white-space:pre"></span>} catch (IOException e) {<span style="white-space:pre"></span>e.printStackTrace();<span style="white-space:pre"></span>}<span style="white-space:pre"></span>return fs;<span style="white-space:pre"></span>}
</pre><pre name="code" class="java">
</pre><pre name="code" class="java">
</pre><pre name="code" class="java">
</pre><pre name="code" class="java">/** * 上传本地文件 */@Testpublic void testUpload() {try {FileSystem hdfs = HDFSUtils.getHDFS();hdfs.copyFromLocalFile(localPath, descPath);FileStatus[] fileStatus = hdfs.listStatus(descPath);for (FileStatus status : fileStatus) {Path path = status.getPath();System.out.println(path.getName());}hdfs.close();} catch (IOException e) {e.printStackTrace();}}/** * 创建新文件 *  * @throws Exception */@Testpublic void testCreate() throws Exception {byte[] buff = "hello world!".getBytes();FileSystem hdfs = HDFSUtils.getHDFS();Path dst = new Path(descPath + "/hello4.txt");FSDataOutputStream outputStream = null;try {outputStream = hdfs.create(dst);outputStream.write(buff, 0, buff.length);} catch (Exception e) {e.printStackTrace();} finally {if (outputStream != null) {outputStream.close();}}FileStatus files[] = hdfs.listStatus(dst);for (FileStatus file : files) {System.out.println(file.getPath());}}/** *  遍历HDFS上的文件和目录 */@Testpublic void testList() {FileSystem hdfs = HDFSUtils.getHDFS();Path path = new Path("/");try {FileStatus[] fileStatus = hdfs.listStatus(path);for (FileStatus status : fileStatus) {Path p = status.getPath();String str = status.isDir() ? "目录" : "文件";System.out.println(str + ":" + p);}} catch (IOException e) {e.printStackTrace();}}/** * 文件重命名 */@Testpublic void testRename() {Path oldPath = new Path("/home/hell.txt");Path newPath = new Path("/home/new.log");FileSystem hdfs = HDFSUtils.getHDFS();try {boolean falg = hdfs.rename(oldPath, newPath);hdfs.close();System.out.println(falg);} catch (IOException e) {e.printStackTrace();}}/** * 读取文件的内容 */@Testpublic void readFile() {Path path = new Path("/home/big1.pdf");FileSystem hdfs = HDFSUtils.getHDFS();try {FSDataInputStream in = hdfs.open(path);IOUtils.copyBytes(in, System.out, 4096, false); // 复制到标准输出流} catch (IOException e) {e.printStackTrace();}}/** * 删除文件 */@Testpublic void delete() {FileSystem hdfs = HDFSUtils.getHDFS();Path path = new Path("/home/new.log");// 删除目录 不管是否为空目录// boolean falg=hdfs.delete(new Path("/home/test"));// 删除目录 不为空目录// boolean b=hdfs.delete(new Path("/home/test"),false);// 删除目录 不管是否为空目录// boolean falg=hdfs.delete(new Path("/home/aa"),true);try {boolean falg = hdfs.deleteOnExit(path);System.out.println(falg);} catch (IOException e) {e.printStackTrace();}}


url


public class HDFSUrl {static {URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());}@Testpublic void readFile() {try {URL url = new URL("hdfs://192.168.102.130:9000/home/hello.txt");InputStream in = url.openStream();IOUtils.copyBytes(in, System.out, 1024);} catch (MalformedURLException e) {e.printStackTrace();} catch (Exception e) {e.printStackTrace();}}}



0 0
原创粉丝点击