使用java接口上传文件到HDFS

来源:互联网 发布:windows10软件兼容性 编辑:程序博客网 时间:2024/05/01 08:34
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URISyntaxException;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Test;


public class HDFSDemo {
FileSystem fs = null;

@Before
public void init() throws IOException, URISyntaxException, InterruptedException{
fs = FileSystem.get(new URI("hdfs://192.168.1.120:9000"), new Configuration(),"bl");
}

@Test
public void testUpload() throws IllegalArgumentException, IOException{
//读取本地文件系统

InputStream in = new FileInputStream("C://Users//Administrator//Downloads//spark-2.1.0-bin-hadoop2.7.tgz");

//HDFS文件系统路径

OutputStream out = fs.create(new Path("/spark-2.1.0-bin-hadoop2.7.tgz"));
IOUtils.copyBytes(in, out, 4096, true);

}
 
public static void main(String[] args) throws URISyntaxException, IOException {
FileSystem fs = FileSystem.get(new URI("hdfs://192.168.1.121:9000"), new Configuration());


}


}
0 0
原创粉丝点击