Hadoop MapReduce 在某一列上自连接(self join)

来源:互联网 发布:为什么淘宝没有电棒 编辑:程序博客网 时间:2024/05/22 13:35
package mapreduce;        import java.util.List;  import java.io.IOException;  import java.util.ArrayList;  import java.util.Iterator;    import org.apache.hadoop.conf.Configuration;    import org.apache.hadoop.fs.Path;    import org.apache.hadoop.io.Text;    import org.apache.hadoop.mapreduce.Job;    import org.apache.hadoop.mapreduce.Mapper;    import org.apache.hadoop.mapreduce.Reducer;    import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;    import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;            public class Self_join {        public static class Map extends Mapper<Object, Text, Text, Text>{            public void map(Object key,Text value, Context context) throws IOException,InterruptedException{                String line = value.toString();                String[] ss = line.split(" ", 2);              context.write(new Text(ss[1]), new Text("left_"+ss[0]));              context.write(new Text(ss[0]), new Text("right_"+ss[1]));          }        }                public static class Reduce extends Reducer<Text, Text, Text, Text>{            private static int time =0;          private static List<String> ch = new ArrayList<String>();          private static List<String> g = new ArrayList<String>();                    public void reduce(Text key,Iterable<Text> values,Context context) throws IOException,InterruptedException{                if(time == 0){                  context.write(new Text("grandchild"), new Text("grandparent"));                  time ++;              }              Iterator<Text> ite = values.iterator();              ch.clear();              g.clear();              while(ite.hasNext()){                String p = ite.next().toString();                if(p.startsWith("left_")){                      ch.add(p.replaceFirst("^left_", ""));                  }                  if(p.startsWith("right_")){                      g.add(p.replaceFirst("^right_", ""));                  }              }                       Iterator<String> chi = ch.iterator();              Iterator<String> gi = g.iterator();                            while(chi.hasNext()){              String c = chi.next();                while(gi.hasNext()){                      context.write(new Text(c), new Text(gi.next()));                  }              }        }        }                      public static void main(String[] args) throws Exception{            Configuration conf = new Configuration();            Job job = Job.getInstance(conf, "Selfjoin");            job.setJarByClass(Self_join.class);            job.setMapperClass(Map.class);            job.setReducerClass(Reduce.class);            job.setOutputKeyClass(Text.class);            job.setOutputValueClass(Text.class);            FileInputFormat.addInputPath(job, new Path(args[0]));            FileOutputFormat.setOutputPath(job, new Path(args[1]));            System.exit(job.waitForCompletion(true) ? 0 : 1);        }            }    

0 0
原创粉丝点击