用MapReduce查找100万个数中的最大值

来源:互联网 发布:淘宝购物网页面源代码 编辑:程序博客网 时间:2024/05/22 08:25
//用MapReduce查找100万个数中的最大值
public class TopKApp {
    static final String INPUT_PATH = "hdfs://chaoren:9000/input";
    static final String OUT_PATH = "hdfs://chaoren:9000/out";
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        final FileSystem fileSystem = FileSystem.get(new URI(INPUT_PATH), conf);
        final Path outPath = new Path(OUT_PATH);
        if(fileSystem.exists(outPath)){
            fileSystem.delete(outPath, true);
        }
        
        final Job job = new Job(conf , TopKApp.class.getSimpleName());
        //1.1指定读取的文件位于哪里
        FileInputFormat.setInputPaths(job, INPUT_PATH);
        job.setMapperClass(MyMapper.class);
        
        //2.2 指定自定义reduce类
        job.setReducerClass(MyReducer.class);
        //指定reduce的输出类型
        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(NullWritable.class);
        
        //2.3 指定写出到哪里
        FileOutputFormat.setOutputPath(job, outPath);
        //把job提交给JobTracker运行
        job.waitForCompletion(true);
    }
    
    static class MyMapper extends Mapper<LongWritable, Text, LongWritable, NullWritable>{
        long max=Long.MIN_VALUE;
        protected void map(LongWritable k1, Text v1, Context context) throws java.io.IOException ,InterruptedException {
            //获得数字
            final long temp = Long.parseLong(v1.toString());
            if(temp>max){
                max=temp;
            }
        };
        //当所有的map函数执行完后会执行这个函数
        @Override
        protected void  cleanup(org.apache.hadoop.mapreduce.Mapper<LongWritable,Text,LongWritable, NullWritable>.Context context)
                throws IOException, InterruptedException {
            context.write(new LongWritable(max), NullWritable.get());}
    }
    
    static class MyReducer extends Reducer<LongWritable, NullWritable, LongWritable, NullWritable>{
        long max = Long.MIN_VALUE;
        @Override
        protected void reduce(LongWritable k2, Iterable<NullWritable> v2s,
                Context arg2)
                throws IOException, InterruptedException {
            final long temp = k2.get();
            if(temp>max){
                max = temp;
            }
        }
        
        //当所有的reduce()函数执行完后会执行这个函数
        @Override
        protected void cleanup(org.apache.hadoop.mapreduce.Reducer<LongWritable,NullWritable,LongWritable,NullWritable>.Context context)
                throws IOException, InterruptedException {
            context.write(new LongWritable(max), NullWritable.get());
        }
    }
        
}  
//用MapReduce查找100万个数中的最大值
public class TopKApp {
    static final String INPUT_PATH = "hdfs://chaoren:9000/input";
    static final String OUT_PATH = "hdfs://chaoren:9000/out";
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        final FileSystem fileSystem = FileSystem.get(new URI(INPUT_PATH), conf);
        final Path outPath = new Path(OUT_PATH);
        if(fileSystem.exists(outPath)){
            fileSystem.delete(outPath, true);
        }
        
        final Job job = new Job(conf , TopKApp.class.getSimpleName());
        //1.1指定读取的文件位于哪里
        FileInputFormat.setInputPaths(job, INPUT_PATH);
        job.setMapperClass(MyMapper.class);
        
        //2.2 指定自定义reduce类
        job.setReducerClass(MyReducer.class);
        //指定reduce的输出类型
        job.setOutputKeyClass(LongWritable.class);
        job.setOutputValueClass(NullWritable.class);
        
        //2.3 指定写出到哪里
        FileOutputFormat.setOutputPath(job, outPath);
        //把job提交给JobTracker运行
        job.waitForCompletion(true);
    }
    
    static class MyMapper extends Mapper<LongWritable, Text, LongWritable, NullWritable>{
        long max=Long.MIN_VALUE;
        protected void map(LongWritable k1, Text v1, Context context) throws java.io.IOException ,InterruptedException {
            //获得数字
            final long temp = Long.parseLong(v1.toString());
            if(temp>max){
                max=temp;
            }
        };
        //当所有的map函数执行完后会执行这个函数
        @Override
        protected void  cleanup(org.apache.hadoop.mapreduce.Mapper<LongWritable,Text,LongWritable, NullWritable>.Context context)
                throws IOException, InterruptedException {
            context.write(new LongWritable(max), NullWritable.get());}
    }
    
    static class MyReducer extends Reducer<LongWritable, NullWritable, LongWritable, NullWritable>{
        long max = Long.MIN_VALUE;
        @Override
        protected void reduce(LongWritable k2, Iterable<NullWritable> v2s,
                Context arg2)
                throws IOException, InterruptedException {
            final long temp = k2.get();
            if(temp>max){
                max = temp;
            }
        }
        
        //当所有的reduce()函数执行完后会执行这个函数
        @Override
        protected void cleanup(org.apache.hadoop.mapreduce.Reducer<LongWritable,NullWritable,LongWritable,NullWritable>.Context context)
                throws IOException, InterruptedException {
            context.write(new LongWritable(max), NullWritable.get());
        }
    }
        
}  
0 0
原创粉丝点击