MapReduce 例子
来源:互联网 发布:网络推手汇航 编辑:程序博客网 时间:2024/06/05 01:14
目的:
统计手机号在某个时间段的流量
流量分上行流量和下行流量
jar包依赖
<dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>2.2.0</version></dependency><dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-core</artifactId> <version>2.2.0</version></dependency>
代码
public class DataCount{ public static void main(String[] args) throws IOException{ Configuration conf = new Configuration(); //conf.set(); Job job = Job.getInstance(conf); job.setJarByClass(DataCount.class); Job.setMappperClass(DcMapper.class); job.setMapOutputKeyValueClass(Text.class); job.setMapOutputValueClass(DataBean.class); FileInputFormat.setInputPaths(job,new Path(args[0])); job.setReducerClass(DcReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(DataBean.class); FileOutputFormat.setOutputPath(job,new Path(args[1])); } public static class DcMapper extends Mapper<LongWritable,Text,Text,DataBean>{ protected void map(LongWritable key,Text value,Context context) throws IOException,InterruptedException{ String line = value.toString; Stirng[] fields = line.split("\t"); String telNo=fields[1];//根据文件中的位置确定 long upPayLoad=Long.parseLong(fields[3]); long downPayLoad=Long.parseLong(fields[4]); DataBean bean=new DataBean(telNo,up,down); context.write(new Text(telNo),bean); } } public static class DcRducer extends Reducer<Text,DataBean,Text,DataBean>{ protected void reduce(Text key,Iterable<DataBean> v2s,Context context){ long up_sum=0; long down_sum=0; for(Databean bean:v2s){ up_sum+=bean.getUpPayLoad(); down_sum+=bean.getDownPayLoad(); } DataBean bean = new DataBean("",up_sum,down_sum); context.write(key,bean); } }}public class DataBean implements Writable{ private String telNo; private long upPayLoad; private long downPayLoad; private long totalPayLoad; public DataBean(){ } public DataBean(String telNo,long upPayLoad,long downPayLoad){ this.telNo=telNo; this.upPayLoad=upPayLoad; this.downPayLoad=downPayLoad; this.totalPayLoad=upPayLoad+downPayLoad; } //serialize public void write(DataOutput out) throws IOException{ out.writeUTF(telNo); out.writeLong(upPayLoad); out.writeLong(downPayLoad); out.writeLong(totalPayLoad); } //deserialize public void readFields(DataInput in) throws IOException{ this.telNo=in.readUTF(); this.upPayLoad=in.readLong(upPayLoad); this.downPayLoad=in.readLong(downPayLoad); this.totalPayLoad=in.readLong(totalPayLoad); } public String toString(){ return this.upPayLoad+"\t"+this.downPayLoad+"\t"+this.totalPayLoad; } getter\setter}
0 0
- MapReduce 例子
- mapreduce例子
- 【mapreduce】 Hadoop2.6.0 mapreduce 例子
- Hbase MapReduce例子
- mapreduce-wordcount例子
- Hadoop MapReduce 编写例子
- MapReduce例子1--wordcount
- mongodb mapreduce 的例子
- Hadoop2.2.0 mapreduce 例子
- mapreduce join 例子
- Mapreduce 小例子
- Hadoop2.6.0 mapreduce 例子
- hadoop mapreduce 例子
- MapReduce运行的例子
- hadoop MapReduce例子
- MapReduce 例子:WordCount
- mapreduce几个例子
- MapReduce几个典型的例子
- 深刻的理解Fragment生命周期 都在做什么
- C++本质:类的赋值运算符=的重载,以及深拷贝和浅拷贝
- 【华为OJ19】简单错误记录
- BZOJ 1935
- mysql 自定义hash索引
- MapReduce 例子
- Java中HashMap中对value对象进行排序
- iOS推送
- LXH必备技能
- Postfix expressions(Data Structure)
- 用c语言验证哥德巴赫猜想
- webpagetest网站性能分析
- leetCode练习(34)
- info of me