Spark 的JAVA版 wordCount

来源:互联网 发布:白左圣母毁灭欧洲 知乎 编辑:程序博客网 时间:2024/05/20 03:48
package os.unix;import org.apache.spark.SparkConf;import org.apache.spark.api.java.JavaPairRDD;import org.apache.spark.api.java.JavaRDD;import org.apache.spark.api.java.JavaSparkContext;import org.apache.spark.api.java.function.FlatMapFunction;import org.apache.spark.api.java.function.Function2;import org.apache.spark.api.java.function.PairFunction;import org.apache.spark.api.java.function.VoidFunction;import scala.Tuple2;import java.util.Arrays;import java.util.Iterator;import java.util.List;public class WordCount {    public static void main(String[] args) {        SparkConf conf = new SparkConf().setMaster("local").setAppName("WordCount");        JavaSparkContext jsc = new JavaSparkContext(conf);        /**         * 将文件中的内容加载到linesRdd中         */        JavaRDD<String> linesRDD = jsc.textFile("C:\\Users\\os\\Desktop\\test\\word.txt", 3);        /**         * 每一行数据根据空格分割         */        JavaRDD<String> wordsRDD = linesRDD.flatMap(new FlatMapFunction<String, String>() {            @Override            public Iterator<String> call(String s) throws Exception {                /**                 * 参数s代表linesRdd中的每一条数据                 */                String[] split = s.split(" ");                List<String> asList = Arrays.asList(split);                return asList.iterator();            }        });        /**         * wordsRDD是一个非kv格式的RDD变成kv格式RDD         * 在java api中必须使用mapToPARI         */        JavaPairRDD<String, Integer> pariRDD = wordsRDD.mapToPair(new PairFunction<String, String, Integer>() {            @Override            public Tuple2<String, Integer> call(String s) throws Exception {                return new Tuple2<String, Integer>(s, 1);            }        });        /**         * 使用reduceByKey进行聚合         * groupByKey将相同的key分到相同组里面         */        JavaPairRDD<String, Integer> resultRDD = pariRDD.reduceByKey(new Function2<Integer, Integer, Integer>() {            @Override            public Integer call(Integer integer, Integer integer2) throws Exception {                return integer + integer2;            }        });        /**         * 按照单词出现的次数进行排序         */        JavaPairRDD<String, Integer> result = resultRDD.mapToPair(new PairFunction<Tuple2<String, Integer>, Integer, String>() {            @Override            public Tuple2<Integer, String> call(Tuple2<String, Integer> stringIntegerTuple2) throws Exception {                return new Tuple2<>(stringIntegerTuple2._2, stringIntegerTuple2._1);            }        }).sortByKey().mapToPair(new PairFunction<Tuple2<Integer, String>, String, Integer>() {            @Override            public Tuple2<String, Integer> call(Tuple2<Integer, String> integerStringTuple2) throws Exception {                return new Tuple2<>(integerStringTuple2._2, integerStringTuple2._1);            }        });        result.foreach(new VoidFunction<Tuple2<String, Integer>>() {            @Override            public void call(Tuple2<String, Integer> stringIntegerTuple2) throws Exception {                System.out.println(stringIntegerTuple2);            }        });    }}
原创粉丝点击