HDFS --访问(二)

来源:互联网 发布:大众网络报官方网站 编辑:程序博客网 时间:2024/05/29 12:28

Hdfs的访问方式有两种,第一:类似linux命令,hadoop shell。第二:java API方式。

来看第二种。第二种和第一种完成的功能是一样的,直接上代码,里面一些注释的,在测试时自行打开测试即可。

package com.wmg.data.join2;


import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;


public class HDFSAcess {
/**
* @param args
* @throws IOException
*/
public static void main(String[] args) {
byte[] buffer = new byte[4096];
try {
// create a distributed file system instance
Configuration conf = new Configuration();
// 配置文件信息在conf/core-site..xml中配置,namenode的地址端口信息
conf.set("fs.default.name", "hdfs://10.20.151.7:9000");
// 初始化文件系统,两种方式
/*
* DistributedFileSystem fs1 = new DistributedFileSystem();
* fs1.initialize(FileSystem.getDefaultUri(conf), conf);
*/
FileSystem fs = FileSystem.get(conf);
String dir = "/user/hadoop/wmg";
String dir1 = "/user/hadoop/wmg";
Path dirPath = new Path(dir);
Path dirPath1 = new Path(dir1);
// 创建目录,两种方式均可
// fs1.mkdirs(dirPath);
fs.mkdirs(dirPath1);
// hdfs.mkdirs(dirPath);


// 创建文件
String dst = "/user/hadoop/wmg/wmg.txt";
String dst1 = "/user/hadoop/wmg/outputwmg.txt";
Path dstPath = new Path(dst);
Path dstPath1 = new Path(dst1);
byte[] content = "aaaa".getBytes();
FSDataOutputStream outputStream = fs.create(dstPath);
outputStream.write(content);
outputStream = fs.create(dstPath1);
outputStream.close();
System.out.println("success, create a new file in HDFS: " + dst);


/* * rename a file in HDFS * */
String src = "/user/hadoop/wmg/newwmg.txt";
Path srcPath = new Path(src);
fs.rename(dstPath, srcPath);
System.out.println("ok, file: " + dst + " renamed to: " + src);


/* * delete a hdfs file * * */
/*
* fs.delete(srcPath, false);
* System.out.println("ok, delete file: "+srcPath);
*/


// read data from file
FSDataInputStream is = fs.open(new Path(
"/user/hadoop/wmg/newwmg.txt"));
is.read(buffer);


// overwrite hdfs file
FSDataOutputStream os_w = fs.create(new Path(
"/user/hadoop/wmg/outputwmg.txt"), true);
os_w.write(buffer);


// append data to hdfs file
// FSDataOutputStream os_a = fs.append(new Path("/fs_t02"));
// os_a.write(buffer);


/* * upload the file from local system to HDFS * */
String localsrc = "/home/hadoop/minggang.wumg/localwmg.txt";
String dfst = "/user/hadoop/wmg";
Path localsrcPath = new Path(localsrc);
Path dfstPath = new Path(dfst);
fs.copyFromLocalFile(localsrcPath, dfstPath);
System.out.println("Upload to " + conf.get("fs.default.name"));


// flush & close stream
os_w.flush();
os_w.close();
// os_a.flush();
// os_a.close();
is.close();
fs.close();


System.out.println(new String(buffer, "US-ASCII"));


} catch (IOException e) {
System.err.print(e.toString());
}
}
}基本的文件、目录的操作都全了~~

原创粉丝点击