spark+java1.8+lamda wordCount 实例,并且实现按单词出现的次数的倒序排序

来源:互联网 发布:开淘宝店要营业执照吗 编辑:程序博客网 时间:2024/06/06 08:56

package com.jiangzeyun.spark.core;import java.util.Arrays;import org.apache.spark.SparkConf;import org.apache.spark.api.java.JavaPairRDD;import org.apache.spark.api.java.JavaRDD;import org.apache.spark.api.java.JavaSparkContext;import scala.Tuple2;/** *  * @author jiangzeyun * @brief WordCount 第一个spark程序 * */public class WordCount {public static void main(String[] args) {//本地模式SparkConf conf = new SparkConf().setAppName("WordCount").setMaster("local");@SuppressWarnings("resource")JavaSparkContext sc = new JavaSparkContext(conf);JavaRDD<String> lines = sc.textFile("/usr/local/hadoop/README.txt");JavaRDD<String> words =  lines.flatMap( s -> Arrays.asList(s.split(" ")).iterator()); JavaPairRDD<String, Integer> pairs = words.mapToPair(s -> new Tuple2<String, Integer>(s, 1));JavaPairRDD<String, Integer> counts = pairs.reduceByKey((a, b) -> a + b);//如果要实现按单词出现的次数从高低排名,1,首先要tuple的key value值,然后再按key排序,然后再交换过来JavaPairRDD<Integer, String> tmp = counts.mapToPair(s->new Tuple2<Integer, String>(s._2,s._1)).sortByKey(false);JavaPairRDD<String, Integer> result = tmp.mapToPair(s->new Tuple2<String, Integer>(s._2,s._1));result.foreach(a -> System.out.println(a._1+"=>"+a._2()));sc.stop();}}


0 0
原创粉丝点击