MapReduce 典型代码

来源:互联网 发布:ubuntu灰色壁纸 编辑:程序博客网 时间:2024/05/23 16:57


####### MyHDFS 1

package com.myblue.myhdfs;

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class MyHDFS {

public static FileSystem getFs() throws IOException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://blue01.mydomain:8020");
FileSystem fs = FileSystem.get(conf);
return fs;
}

public static void main(String[] args) throws IOException {
FileSystem fs = MyHDFS.getFs();
FSDataInputStream inStream = fs.open(new Path("/input/a.txt"));

//创建目录,会将所有不存在的目录一起创建出来
// fs.mkdirs(new Path("/input/xxx/yyy"));
//创建一个空文件,不存在的路径会自动创建
// fs.create(new Path("/input/aaa/c.txt"));
//是否递归删除,连带里面的内容一起删除
// fs.delete(new Path("/input/aaa"), true);
//将Linux本地文件拷到HDFS
// fs.copyFromLocalFile(new Path("/home/tom/a.txt"), new Path("/input/e.txt"));
//得到文件夹内所有文件的信息
// FileStatus[] status=fs.listStatus(new Path("/input"));
// System.out.println(status[0].getPath()+" "+status[0].getLen());
// System.out.println(status[1].getPath()+" "+status[1].getLen());

try {
//后两个参数:缓存大小、结束后是否关闭
IOUtils.copyBytes(inStream, System.out, 4096, false);
} catch (Exception e) {
System.out.println(e);
} finally {
IOUtils.closeStream(inStream);
}
}
}

####### MyHDFS 2
package com.myblue.myhdfs;
import java.io.File;
import java.io.FileInputStream;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

/**
*
* @author beifeng
*
*/
public class MyHDFS2 {

/**
* Get FileSystem
*
* @return
* @throws Exception
*/
public static FileSystem getFileSystem() throws Exception {
// core-site.xml,core-defautl.xml,hdfs-site.xml,hdfs-default.xml
Configuration conf = new Configuration();

// get filesystem
FileSystem fileSystem = FileSystem.get(conf);

// System.out.println(fileSystem);
return fileSystem;
}

public static void main(String[] args) throws Exception {

// String fileName = "/user/beifeng/mapreduce/wordcount/input/wc.input";
// read(fileName);

// get filesystem
FileSystem fileSystem = getFileSystem();

// write path
String putFileName = "/user/beifeng/put-wc.inut";
Path writePath = new Path(putFileName);

// Output Stream
FSDataOutputStream outStream = fileSystem.create(writePath);

// file input Stream
FileInputStream inStream = new FileInputStream(//
new File("/opt/modules/hadoop-2.5.0/wc.input")//
);

// stream read/write
try {
// read
IOUtils.copyBytes(inStream, outStream, 4096, false);
} catch (Exception e) {
e.printStackTrace();
} finally {
// close Stream
IOUtils.closeStream(inStream);
IOUtils.closeStream(outStream);
}

}

}







原创粉丝点击