spark-streaming-kafka-0-8版本 的java连接ZK例子

来源:互联网 发布:织梦cms百科 编辑:程序博客网 时间:2024/06/03 16:51


最近在研究sparkstreaming连接ZK使用的方法,0-8版本能连接ZK存储偏移量,但0-10版本貌似不能连接ZK来存储偏移量。

继续踩坑。。。

package Spark.SparkStreaming;import java.io.Serializable;import java.util.HashMap;import java.util.Map;import org.apache.spark.streaming.Duration;import org.apache.spark.streaming.api.java.*;import org.apache.spark.streaming.kafka.KafkaUtils;import kafka.serializer.StringDecoder;import org.apache.spark.SparkConf;import org.apache.spark.api.java.*;public class SparkStreaming08 implements Serializable {private static final long serialVersionUID = 1L;static final String ZK_QUORUM = "192.168.8.15:2181,192.168.8.15:2181,192.168.8.15:2181";    static final String GROUP = "test-consumer-group";public static <U> void main(String[] args) throws InterruptedException {SparkConf conf = new SparkConf().setAppName("testSparkStreaming").setMaster("local[4]");JavaStreamingContext ssc = new JavaStreamingContext(conf, new Duration(1000));//new configMap<String, String> kafkaParams = new HashMap<>();kafkaParams.put("zookeeper.connect", ZK_QUORUM);kafkaParams.put("group.id", GROUP);//kafkaParams.put("auto.commit.enable", "true");kafkaParams.put("auto.commit.interval.ms", "1000"); //1秒提交一次ZK偏移量,最好和任务时间保持一致。Map<String, Integer> topicMap = new HashMap<>();topicMap.put("shuaige1",2);     JavaPairReceiverInputDStream<String, String> messages = KafkaUtils.createStream(ssc, String.class,        String.class,        StringDecoder.class,        StringDecoder.class, kafkaParams, topicMap,StorageLevels.MEMORY_AND_DISK_2);      messages.print(); ssc.start();             ssc.awaitTermination();  }}