自定义排序
来源:互联网 发布:混血 知乎 编辑:程序博客网 时间:2024/05/16 12:32
package com.ccse.hadoop.sort;import java.io.DataInput;import java.io.DataOutput;import java.io.IOException;import java.net.URI;import java.net.URISyntaxException;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.LongWritable;import org.apache.hadoop.io.Text;import org.apache.hadoop.io.WritableComparable;import org.apache.hadoop.mapreduce.Job;import org.apache.hadoop.mapreduce.Mapper;import org.apache.hadoop.mapreduce.Reducer;import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;/** * 自定义排序 * @author woshiccna * */public class SortApp {public static final String INPUT_PATH = "hdfs://chaoren1:9000/sortinput";public static final String OUTPUT_PATH = "hdfs://chaoren1:9000/sortoutput";public static void main(String[] args) throws IOException, URISyntaxException, ClassNotFoundException, InterruptedException {Configuration conf = new Configuration();FileSystem fileSystem = FileSystem.get(new URI(OUTPUT_PATH), conf);fileSystem.delete(new Path(OUTPUT_PATH), true);Job job = new Job(conf, SortApp.class.getSimpleName());job.setJarByClass(SortApp.class);FileInputFormat.setInputPaths(job, new Path(INPUT_PATH));job.setMapperClass(MyMapper.class);job.setMapOutputKeyClass(MyK2.class);job.setMapOutputValueClass(LongWritable.class);job.setReducerClass(MyReducer.class);job.setOutputKeyClass(LongWritable.class);job.setOutputValueClass(LongWritable.class);FileOutputFormat.setOutputPath(job, new Path(OUTPUT_PATH));job.waitForCompletion(true);}public static class MyMapper extends Mapper<LongWritable, Text, MyK2, LongWritable> {@Overrideprotected void map(LongWritable key, Text value,Mapper<LongWritable, Text, MyK2, LongWritable>.Context context)throws IOException, InterruptedException {if (value != null) {String[] splitted = value.toString().split("\t");LongWritable second = new LongWritable(Long.parseLong(splitted[1]));context.write(new MyK2(Long.parseLong(splitted[0]), Long.parseLong(splitted[1])), second);}}}public static class MyReducer extends Reducer<MyK2, LongWritable, LongWritable, LongWritable> {@Overrideprotected void reduce(MyK2 key,Iterable<LongWritable> values,Reducer<MyK2, LongWritable, LongWritable, LongWritable>.Context context)throws IOException, InterruptedException {context.write(new LongWritable(key.first), new LongWritable(key.second));}}public static class MyK2 implements WritableComparable<MyK2> {private long first;private long second;public MyK2() {}public MyK2(long first, long second) {this.first = first;this.second = second;}@Overridepublic void write(DataOutput out) throws IOException {out.writeLong(this.first);out.writeLong(this.second);}@Overridepublic void readFields(DataInput in) throws IOException {this.first = in.readLong();this.second = in.readLong();}@Overridepublic int compareTo(MyK2 to) {if (to != null) {long dis = this.first - to.first;if (dis != 0) { //首先根据first进行排序return (int)dis;} else { //然后根据second进行排序return (int)(this.second - to.second);}}return 0;}}}
0 0
- 自定义排序
- 自定义排序
- 自定义排序
- 自定义排序
- 自定义排序
- 自定义排序
- 自定义排序
- 自定义排序
- 自定义排序准则,自定义数据类型
- 自定义-mapreduce编程自定义排序
- Lucene排序以及自定义排序
- Lucene排序以及自定义排序
- 自然排序和自定义排序
- hadoop自定义排序,分组排序
- Hadoop 自定义排序,自定义分区,自定义分组
- 创建自定义排序用户界面
- DataGrid自定义排序
- DisplayTag自定义排序
- Android开发小经验
- SQLite:注意查询条件中空格的影响
- 实验五 树和二叉树
- cocos2dx简单实现描边
- BOOST 1.56 简明配置方法,资源整理
- 自定义排序
- iOS 远程通知
- POJ2486---Apple Tree
- [Bug Fix]Messy Audio语音电话不清晰问题
- 浮点运算是怎么实现的?
- 我的博客开始更新
- C++句柄类
- copy, retain, assign , readonly , readwrite,strong,weak,nonatomic整理
- 手机回声消除原理