mapreduce系列(7)--查找共同好友

来源:互联网 发布:linux ps aux 编辑:程序博客网 时间:2024/05/20 18:50

一、概述

A:B,C,D,F,E,OB:A,C,E,KC:F,A,D,ID:A,E,F,LE:B,C,D,M,LF:A,B,C,D,E,O,MG:A,C,D,E,FH:A,C,D,E,OI:A,OJ:B,OK:A,C,DL:D,E,FM:E,F,GO:A,H,I,J

求出哪些人两两之间有共同好友,及他俩的共同好友都是谁
比如:

a-b :  c ,e

思路:
首先可以第一步可以把朋友作为key,人作为value,形成:友–>人,人,人。这样的中间结果
第二把,把(人,人,人)进行排序,避免重复,然后进行两两匹配形成:(人-人)–>友。这样的键值对,进行mr统计,最后结果就是两两的共同好友了
第一步代码:
SharedFriendsStepOne.java

package friends;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.LongWritable;import org.apache.hadoop.io.Text;import org.apache.hadoop.mapreduce.Job;import org.apache.hadoop.mapreduce.Mapper;import org.apache.hadoop.mapreduce.Reducer;import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;import java.io.IOException;import java.net.URI;import java.net.URISyntaxException;/** * Created by tianjun on 2017/3/20. */public class SharedFriendsStepOne {    static class SharedFriendsStepOneMapper extends Mapper<LongWritable,Text,Text,Text> {        Text k = new Text();        Text v = new Text();        @Override        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {            String line = value.toString();            String[] person_friends = line.split(":");            String person = person_friends[0];            String[] friends = person_friends[1].split(",");            for(String friend : friends){                k.set(friend);                v.set(person);                //<好友,人>                context.write(k,v);            }        }    }    static class SharedFriendsStepOneReduce extends Reducer<Text,Text,Text,Text>{        @Override        protected void reduce(Text friend, Iterable<Text> persons, Context context) throws IOException, InterruptedException {            StringBuffer sb = new StringBuffer();            for(Text person : persons){                if(sb.length()!=0){                    sb.append(",");                }                sb.append(person);            }            context.write(friend,new Text(sb.toString()));        }    }    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException, URISyntaxException {        String os = System.getProperty("os.name").toLowerCase();        if (os.contains("windows")) {            System.setProperty("HADOOP_USER_NAME", "root");        }        Configuration conf = new Configuration();        conf.set("mapreduce.framework.name","yarn");        conf.set("yarn.resourcemanager.hostname","mini01");        conf.set("fs.defaultFS","hdfs://mini01:9000/");//            默认就是local模式//        conf.set("mapreduce.framework.name","local");//        conf.set("mapreduce.jobtracker.address","local");//        conf.set("fs.defaultFS","file:///");        Job wcjob = Job.getInstance(conf);        wcjob.setJar("F:/myWorkPlace/java/dubbo/demo/dubbo-demo/mr-demo1/target/mr.demo-1.0-SNAPSHOT.jar");        //如果从本地拷贝,是不行的,这时需要使用setJar//        wcjob.setJarByClass(Rjoin.class);        wcjob.setMapperClass(SharedFriendsStepOneMapper.class);        wcjob.setReducerClass(SharedFriendsStepOneReduce.class);        //设置我们的业务逻辑Mapper类的输出key和value的数据类型        wcjob.setMapOutputKeyClass(Text.class);        wcjob.setMapOutputValueClass(Text.class);        //设置我们的业务逻辑Reducer类的输出key和value的数据类型        wcjob.setOutputKeyClass(Text.class);        wcjob.setOutputValueClass(Text.class);        //如果不设置InputFormat,默认就是使用TextInputFormat.class//        wcjob.setInputFormatClass(CombineFileInputFormat.class);//        CombineFileInputFormat.setMaxInputSplitSize(wcjob,4194304);//        CombineFileInputFormat.setMinInputSplitSize(wcjob,2097152);        FileSystem fs = FileSystem.get(new URI("hdfs://mini01:9000"), new Configuration(), "root");        Path path = new Path("hdfs://mini01:9000/wc/friends/stepone");        if (fs.exists(path)) {            fs.delete(path, true);        }        //指定要处理的数据所在的位置        FileInputFormat.setInputPaths(wcjob, new Path("hdfs://mini01:9000/input/friends"));        //指定处理完成之后的结果所保存的位置        FileOutputFormat.setOutputPath(wcjob, new Path("hdfs://mini01:9000/wc/friends/stepone"));        boolean res = wcjob.waitForCompletion(true);        System.exit(res ? 0 : 1);    }}

计算结果:

A       I,K,C,B,G,F,H,O,DB       A,F,J,EC       A,E,B,H,F,G,KD       G,C,K,A,L,F,E,HE       G,M,L,H,A,F,B,DF       L,M,D,C,G,AG       MH       OI       O,CJ       OK       BL       D,EM       E,FO       A,H,I,J,F

为了防止b–>c和c–>b这样同一对朋友的重复,所以,下面基于这个结果处理的时候,需要进行排序,这样就能达到没有重复朋友对的出现。

第二步:
SharedFriendsStepTwo.java

package friends;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.LongWritable;import org.apache.hadoop.io.Text;import org.apache.hadoop.mapreduce.Job;import org.apache.hadoop.mapreduce.Mapper;import org.apache.hadoop.mapreduce.Reducer;import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;import java.io.IOException;import java.net.URI;import java.net.URISyntaxException;import java.util.Arrays;/** * Created by tianjun on 2017/3/20. */public class SharedFriendsStepTwo {    static class SharedFriendsStepTwoMapper extends Mapper<LongWritable,Text,Text,Text> {        Text k = new Text();        Text v = new Text();        @Override        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {            String line = value.toString();            String[] friend_persons = line.split("\t");            String friend = friend_persons[0];            String[] persons = friend_persons[1].split(",");            //排序            Arrays.sort(persons);            for(int i = 0 ; i<persons.length-2;i++){                for(int j=i+1;j<persons.length-1;j++){                    //<人-人,好友> ,这样相同的“人-人”对好友发到一起了                    context.write(new Text(persons[i]+"-"+persons[j]),new Text(friend));                }            }        }    }    static class SharedFriendsStepTwoReduce extends Reducer<Text,Text,Text,Text>{        @Override        protected void reduce(Text person_person, Iterable<Text> friends, Context context) throws IOException, InterruptedException {            StringBuffer sb = new StringBuffer();            for(Text friend : friends){                sb.append(friend).append(" ");            }            context.write(person_person,new Text(sb.toString()));        }    }    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException, URISyntaxException {        String os = System.getProperty("os.name").toLowerCase();        if (os.contains("windows")) {            System.setProperty("HADOOP_USER_NAME", "root");        }        Configuration conf = new Configuration();        conf.set("mapreduce.framework.name","yarn");        conf.set("yarn.resourcemanager.hostname","mini01");        conf.set("fs.defaultFS","hdfs://mini01:9000/");//            默认就是local模式//        conf.set("mapreduce.framework.name","local");//        conf.set("mapreduce.jobtracker.address","local");//        conf.set("fs.defaultFS","file:///");        Job wcjob = Job.getInstance(conf);        wcjob.setJar("F:/myWorkPlace/java/dubbo/demo/dubbo-demo/mr-demo1/target/mr.demo-1.0-SNAPSHOT.jar");        //如果从本地拷贝,是不行的,这时需要使用setJar//        wcjob.setJarByClass(Rjoin.class);        wcjob.setMapperClass(SharedFriendsStepTwoMapper.class);        wcjob.setReducerClass(SharedFriendsStepTwoReduce.class);        //设置我们的业务逻辑Mapper类的输出key和value的数据类型        wcjob.setMapOutputKeyClass(Text.class);        wcjob.setMapOutputValueClass(Text.class);        //设置我们的业务逻辑Reducer类的输出key和value的数据类型        wcjob.setOutputKeyClass(Text.class);        wcjob.setOutputValueClass(Text.class);        //如果不设置InputFormat,默认就是使用TextInputFormat.class//        wcjob.setInputFormatClass(CombineFileInputFormat.class);//        CombineFileInputFormat.setMaxInputSplitSize(wcjob,4194304);//        CombineFileInputFormat.setMinInputSplitSize(wcjob,2097152);        FileSystem fs = FileSystem.get(new URI("hdfs://mini01:9000"), new Configuration(), "root");        Path path = new Path("hdfs://mini01:9000/wc/friends/steptwo");        if (fs.exists(path)) {            fs.delete(path, true);        }        //指定要处理的数据所在的位置        FileInputFormat.setInputPaths(wcjob, new Path("hdfs://mini01:9000/wc/friends/stepone"));        //指定处理完成之后的结果所保存的位置        FileOutputFormat.setOutputPath(wcjob, new Path("hdfs://mini01:9000/wc/friends/steptwo"));        boolean res = wcjob.waitForCompletion(true);        System.exit(res ? 0 : 1);    }}

最后计算得出的两俩好友如下:

[root@mini03 ~]# hdfs dfs -cat /wc/friends/steptwo/*A-B     C E A-C     F D A-D     E F A-E     B C D A-F     C D B E O A-G     D E F C A-H     E O C D A-I     O A-K     D A-L     F E B-C     A B-D     E A B-E     C B-F     E A C B-G     C E A B-H     E C A B-I     A B-K     A B-L     E C-D     F A C-E     D C-F     D A C-G     F A D C-H     A D C-I     A C-K     D A C-L     F D-F     E A D-G     A E F D-H     A E D-I     A D-K     A D-L     F E E-F     C D B E-G     D C E-H     D C E-K     D F-G     C E D A F-H     C A D E O F-I     A O F-K     D A F-L     E G-H     D E C A G-I     A G-K     A D G-L     F E H-I     A O H-K     A D H-L     E I-K     A 
0 0
原创粉丝点击