hadoop学习1 java操作HDFS

来源:互联网 发布:u盘安装ubuntu系统安装 编辑:程序博客网 时间:2024/06/05 03:58
1、创建目录
package hdfs.operation;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class MakeDir {
public static void main(String[] args) throws IOException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://192.168.86.133:9000"); // master

FileSystem fs = FileSystem.get(conf);
Path path = new Path("/user/hadoop/");
fs.mkdirs(path);
fs.close();
System.out.println("end");
}
}

2、创建文件
fs.create(path);

3、读取文件
package hdfs.operation;

import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class FileReadFromHdfs {

public static void main(String[] args) {
try {
String dsf = "/user/hadoop/write.txt";
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://192.168.86.133:9000"); // master

FileSystem fs = FileSystem.get(URI.create(dsf), conf);
FSDataInputStream hdfsInStream = fs.open(new Path(dsf));

byte[] ioBuffer = new byte[1024];
int readLen = hdfsInStream.read(ioBuffer);
while (readLen != -1) {
System.out.write(ioBuffer, 0, readLen);
readLen = hdfsInStream.read(ioBuffer);
}
hdfsInStream.close();
fs.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}

}

}

等等