Spark Streaming---HDFSwordcount

来源:互联网 发布:等离子数控切割机编程 编辑:程序博客网 时间:2024/06/05 23:46
package com.spark.streaming;import java.util.Arrays;import org.apache.spark.SparkConf;import org.apache.spark.api.java.function.FlatMapFunction;import org.apache.spark.api.java.function.Function2;import org.apache.spark.api.java.function.PairFunction;import org.apache.spark.streaming.Durations;import org.apache.spark.streaming.api.java.JavaDStream;import org.apache.spark.streaming.api.java.JavaPairDStream;import org.apache.spark.streaming.api.java.JavaStreamingContext;import scala.Tuple2;public class HDFSWordcount {    public static void main(String[] args) {        SparkConf conf = new SparkConf().setAppName("HDFSWordcount");        JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(5));        JavaDStream<String> lines = jssc.textFileStream("hdfs://node12:8020/Spark/Streaming/WordCount");        JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() {            private static final long serialVersionUID = 1L;            @Override            public Iterable<String> call(String line) throws Exception {                return Arrays.asList(line.split(" "));            }        });        JavaPairDStream<String, Integer> pairs = words.mapToPair(new PairFunction<String, String, Integer>() {            private static final long serialVersionUID = 1L;            @Override            public Tuple2<String, Integer> call(String word) throws Exception {                return new Tuple2<String,Integer>(word, 1);            }        });        JavaPairDStream<String, Integer> wordcounts = pairs.reduceByKey(new Function2<Integer, Integer, Integer>() {            private static final long serialVersionUID = 1L;            @Override            public Integer call(Integer v1, Integer v2) throws Exception {                return v1 + v2;            }        });        wordcounts.print();        jssc.start();        jssc.awaitTermination();        jssc.close();    }}
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58