Hadoop javaAPI
来源:互联网 发布:英语软件有哪些 编辑:程序博客网 时间:2024/05/20 11:23
1、读
package com.sdnware.start01.hadoop;import java.io.ByteArrayOutputStream;import java.io.IOException;import java.io.InputStream;import java.net.URI;import java.net.URISyntaxException;import java.util.Arrays;import java.util.List;import java.util.function.Consumer;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IOUtils;public class HDFSRead { public static void main(String[] args) { read1(); } public static void read0(){ System.setProperty("hadoop.home.dir", "D:/soft/hadoop-2.6.0"); System.setProperty("HADOOP_USER_NAME","root");//制定上传的用户 Configuration conf = new Configuration(); try { FileSystem hdfs = FileSystem.get(new URI("hdfs://f1:9000"), conf); InputStream in = hdfs.open(new Path("/user/wordcount/My Father0.txt")); ByteArrayOutputStream out = new ByteArrayOutputStream(); IOUtils.copyBytes(in, out, 4096,true); String str = out.toString(); System.out.println(str); } catch (IOException e) { e.printStackTrace(); } catch (URISyntaxException e) { e.printStackTrace(); } } public static void read1(){ System.setProperty("hadoop.home.dir", "D:/soft/hadoop-2.6.0"); System.setProperty("HADOOP_USER_NAME","root");//制定上传的用户 Configuration conf = new Configuration(); try { final FileSystem hdfs = FileSystem.get(new URI("hdfs://f1:9000"), conf); Path path = new Path("/user/wordcount"); if(hdfs.exists(path)){ List<FileStatus> listStatus = Arrays.asList(hdfs.listStatus(path)); listStatus.forEach(new Consumer<FileStatus>() { public void accept(FileStatus t) { if(t.isFile()){ FSDataInputStream in = null; try { Path path2 = t.getPath(); in = hdfs.open(path2); System.out.println("Read File:"+ path2.getName()); ByteArrayOutputStream out = new ByteArrayOutputStream(); IOUtils.copyBytes(in, out, 4096,true); String str = out.toString(); System.out.println(str); } catch (IOException e) { e.printStackTrace(); }finally{ IOUtils.closeStream(in); } } } }); } } catch (IOException e) { e.printStackTrace(); } catch (URISyntaxException e) { e.printStackTrace(); } }}
2、写
package com.sdnware.start01.hadoop;import java.io.IOException;import java.net.URI;import java.net.URISyntaxException;import org.apache.commons.io.IOUtils;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FSDataOutputStream;import org.apache.hadoop.fs.FileStatus;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;public class HDFSWrite { public static void main(String[] args) { HDFSWrite writer = new HDFSWrite(); writer.writer1(); } public void writer0() { FSDataOutputStream out = null; FSDataInputStream in = null; try { System.setProperty("hadoop.home.dir", "D:/soft/hadoop-2.6.0"); Configuration conf = new Configuration(); FileSystem hdfs = FileSystem.get(new URI("hdfs://f1:9000"), conf); FileSystem local = FileSystem.getLocal(conf); Path inputDir = new Path("D:/testfile/"); // 获取本地待被上传的文件 // 会把该文件夹下的内容都上传 Path hdfsFile = new Path("/user/wordcount"); // 获取hdfs上需要获取文件的地址 hdfs.mkdirs(hdfsFile); // 在hdfs上新建该文件夹 FileStatus[] inputFiles = local.listStatus(inputDir); // 获取本地待上传文件夹的信息 for (int i = 0; i < inputFiles.length; i++) { // i循环本地文件夹内的文件个数 System.out.println(inputFiles[i].getPath().getName()); in = local.open(inputFiles[i].getPath()); // 获取输入流 out = hdfs.create(new Path("/user/wordcount/" + inputFiles[i].getPath().getName())); // 输出流位置获取 byte buffer[] = new byte[256]; int bytesRead = 0; while ((bytesRead = in.read(buffer)) > 0) { // 每次读取buffer大小的部分 out.write(buffer, 0, bytesRead); // 每次写入buffer中bytesRead大小的部分 } } } catch (IOException e) { e.printStackTrace(); } catch (URISyntaxException e) { e.printStackTrace(); }finally{ IOUtils.closeQuietly(in); IOUtils.closeQuietly(out); } } public void writer1() { FSDataOutputStream out = null; FSDataInputStream in = null; try { System.setProperty("hadoop.home.dir", "D:/soft/hadoop-2.6.0"); System.setProperty("HADOOP_USER_NAME","root");//制定上传的用户 Configuration conf = new Configuration(); FileSystem hdfs = FileSystem.get(new URI("hdfs://f1:9000"), conf); FileSystem local = FileSystem.getLocal(conf); Path inputDir = new Path("D:/testfile/"); // 获取本地待被上传的文件 // 会把该文件夹下的内容都上传 Path hdfsFile = new Path("/user/wordcount"); // 获取hdfs上需要获取文件的地址 hdfs.mkdirs(hdfsFile); // 在hdfs上新建该文件夹 FileStatus[] inputFiles = local.listStatus(inputDir); // 获取本地待上传文件夹的信息 for (int i = 0; i < inputFiles.length; i++) { // i循环本地文件夹内的文件个数 System.out.println(inputFiles[i].getPath().getName()); in = local.open(inputFiles[i].getPath()); // 获取输入流 out = hdfs.create(new Path("/user/wordcount/" + inputFiles[i].getPath().getName())); // 输出流位置获取 org.apache.hadoop.io.IOUtils.copyBytes(in, out, 4096,true); } } catch (IOException e) { e.printStackTrace(); } catch (URISyntaxException e) { e.printStackTrace(); }finally{ org.apache.hadoop.io.IOUtils.closeStream(in); org.apache.hadoop.io.IOUtils.closeStream(out); } }}
3、删除
package com.sdnware.start01.hadoop;import java.io.IOException;import java.net.URI;import java.net.URISyntaxException;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;public class HDFSDelete { public static void main(String[] args) { delete1(); } public static void delete0(){//删除文件 System.setProperty("hadoop.home.dir", "D:/soft/hadoop-2.6.0"); System.setProperty("HADOOP_USER_NAME","root");//制定上传的用户 Configuration conf = new Configuration(); try { FileSystem hdfs = FileSystem.get(new URI("hdfs://f1:9000"), conf); Path hdfsFile = new Path("/user/wordcount/My Father.txt"); hdfs.delete(hdfsFile, false); } catch (IOException e) { e.printStackTrace(); } catch (URISyntaxException e) { e.printStackTrace(); } } public static void delete1(){//删除目录 System.setProperty("hadoop.home.dir", "D:/soft/hadoop-2.6.0"); System.setProperty("HADOOP_USER_NAME","root");//制定上传的用户 Configuration conf = new Configuration(); try { FileSystem hdfs = FileSystem.get(new URI("hdfs://f1:9000"), conf); Path hdfsFile = new Path("/usr"); hdfs.delete(hdfsFile, true); } catch (IOException e) { e.printStackTrace(); } catch (URISyntaxException e) { e.printStackTrace(); } }}
0 0
- Hadoop javaAPI
- JavaApi操作Hadoop
- hadoop实战之HDFS常用JavaAPI
- Hadoop教程(四)HDFS文件系统JavaAPI接口
- 使用JavaAPI操作hadoop hdfs <一>
- JavaAPI
- JavaAPI
- javaAPI:
- JavaAPI
- javaAPI
- JavaAPI
- JavaAPI
- JavaAPI
- hadoop系列之六hadoop hdfs操作javaAPI错误记录
- hadoop系列之五JavaAPI操作HDFS文本系统
- hadoop学习之使用命令行以及javaAPI管理 hdfs中的数据
- 大数据学习篇:hadoop深入浅出系列之HDFS(六) ——JavaAPI操作
- hadoop学习之HDFS(2.8):hdfs的javaAPI使用及示例
- Wifi破解
- JavaScript 时钟实例
- android动态注册广播实例
- 闲谈程序员的情怀和梦想
- 线程的取消pthread_cancel()函数(线程三)
- Hadoop javaAPI
- 中英文对照 —— 标点符号(punctuation)
- PopUpWindow使用详解(一)——基本使用
- HDOJ 1527 取石子游戏(威佐夫博奕)
- HDU 5816 Hearthstone (Probability dp, Conbinations)
- 微服务与SOA之间差了一个ESB
- Android开源库V
- 【LeetCode】455. Assign Cookies
- PopUpWindow使用详解(二)——进阶及答疑