hadoop上传和下载文件

来源:互联网 发布:网络贩毒群 编辑:程序博客网 时间:2024/06/03 09:02
上传windows文件到hadoop:
package test;import java.io.BufferedInputStream;import java.io.FileInputStream;import java.io.IOException;import java.io.InputStream;import java.io.OutputStream;import java.net.URI;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IOUtils;import org.apache.hadoop.mapreduce.Job;public class updata {public static void main(String[] args) throws Exception{try {String INPUT_PATH = args[0];String OUT_PATH = args[1];//in对应的是本地文件系统的目录InputStream in = new BufferedInputStream(new FileInputStream(INPUT_PATH));Configuration conf = new Configuration();final Job job = new Job(conf,updata.class.getSimpleName());//打包运行必须执行的秘密方法;job.setJarByClass(updata.class);//获得hadoop系统的连接 getfile(OUT_PATH, in, conf);System.out.println("success");} catch (Exception e) {System.out.println(e.toString());}}private static void getfile(String dst, InputStream in, Configuration conf) throws IOException {FileSystem fs = FileSystem.get(URI.create(dst),conf);//out对应的是Hadoop文件系统中的目录OutputStream out = fs.create(new Path(dst));IOUtils.copyBytes(in, out, 4096,true);//4096是4k字节}}

从hadoop中下载文件到windows:

package test;import java.io.BufferedInputStream;import java.io.BufferedOutputStream;import java.io.FileInputStream;import java.io.FileNotFoundException;import java.io.FileOutputStream;import java.io.IOException;import java.io.InputStream;import java.io.OutputStream;import java.net.URI;import java.net.URL;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IOUtils;import org.apache.hadoop.mapreduce.Job;public class download {// 静态块,设置hdfs协议static  {        URL. setURLStreamHandlerFactory ( new  FsUrlStreamHandlerFactory());}public static void main(String[] args) throws Exception{try {//第一个参数传递进来的是Hadoop文件系统中的某个文件的URIString INPUT_PATH =  "hdfs://LS-PC:9000/out/3.jpg";String OUT_PATH = "C://new/3.jpg";Configuration conf = new Configuration();//conf.set("Hadoop.job.ugi", "hadoop-user,hadoop-user");//打包运行必须执行的秘密方法;final Job job = new Job(conf,updata.class.getSimpleName());job.setJarByClass(updata.class);download(INPUT_PATH, OUT_PATH, conf);System.out.println("Download success");} catch (Exception e) {System.out.println(e.toString());}}private static void download(String INPUT_PATH, String OUT_PATH, Configuration conf)throws IOException, FileNotFoundException {//FileSystem是用户操作HDFS的核心类,它获得URI对应的HDFS文件系统 FileSystem fs = FileSystem.get(URI.create(INPUT_PATH), conf); //让FileSystem打开一个uri对应的FSDataInputStream文件输入流,读取这个文件  FSDataInputStream in=null;in=fs.open(new Path(INPUT_PATH));//out对应的是Hadoop文件系统中的目录OutputStream out = new BufferedOutputStream(new FileOutputStream(OUT_PATH));IOUtils.copyBytes(in, out, 4096,true);//4096是4k字节}}

相关干货网址:

https://www.tuicool.com/articles/aeAVJ3



原创粉丝点击