java版本kafka createDirectStream
来源:互联网 发布:科比3d模型数据 编辑:程序博客网 时间:2024/06/07 11:50
package com.ys.streaming;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import kafka.serializer.StringDecoder;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;
import scala.Tuple2;
public class KafkaDirectWordCount {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("wordcount").setMaster("local[2]");
JavaStreamingContext jssc = new JavaStreamingContext(conf,Durations.seconds(5));
// 首先要创建一份kafka参数map
Map<String, String> kafkaParams = new HashMap<String, String>();
// 这里是不需要zookeeper节点,所以这里放broker.list
kafkaParams.put("metadata.broker.list",
"master:9092,slave1:9092,slave2:9092");
kafkaParams.put("auto.offset.reset", "smallest");
// 然后创建一个set,里面放入你要读取的Topic,可以并行读取多个topic
Set<String> topics = new HashSet<String>();
topics.add("20170420");
JavaPairInputDStream<String,String> lines = KafkaUtils.createDirectStream(
jssc,
String.class, // key类型
String.class, // value类型
StringDecoder.class, // 解码器
StringDecoder.class,
kafkaParams,
topics);
JavaDStream<String> words = lines.flatMap(new FlatMapFunction<Tuple2<String,String>, String>(){
private static final long serialVersionUID = 1L;
@Override
public Iterable<String> call(Tuple2<String,String> tuple) throws Exception {
return Arrays.asList(tuple._2.split(" "));
}
});
JavaPairDStream<String, Integer> pairs = words.mapToPair(new PairFunction<String, String, Integer>(){
private static final long serialVersionUID = 1L;
@Override
public Tuple2<String, Integer> call(String word) throws Exception {
return new Tuple2<String, Integer>(word, 1);
}
});
JavaPairDStream<String, Integer> wordcounts = pairs.reduceByKey(new Function2<Integer, Integer, Integer>(){
private static final long serialVersionUID = 1L;
@Override
public Integer call(Integer v1, Integer v2) throws Exception {
return v1 + v2;
}
});
wordcounts.print();
jssc.start();
jssc.awaitTermination();
jssc.close();
}
}
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import kafka.serializer.StringDecoder;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;
import scala.Tuple2;
public class KafkaDirectWordCount {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("wordcount").setMaster("local[2]");
JavaStreamingContext jssc = new JavaStreamingContext(conf,Durations.seconds(5));
// 首先要创建一份kafka参数map
Map<String, String> kafkaParams = new HashMap<String, String>();
// 这里是不需要zookeeper节点,所以这里放broker.list
kafkaParams.put("metadata.broker.list",
"master:9092,slave1:9092,slave2:9092");
kafkaParams.put("auto.offset.reset", "smallest");
// 然后创建一个set,里面放入你要读取的Topic,可以并行读取多个topic
Set<String> topics = new HashSet<String>();
topics.add("20170420");
JavaPairInputDStream<String,String> lines = KafkaUtils.createDirectStream(
jssc,
String.class, // key类型
String.class, // value类型
StringDecoder.class, // 解码器
StringDecoder.class,
kafkaParams,
topics);
JavaDStream<String> words = lines.flatMap(new FlatMapFunction<Tuple2<String,String>, String>(){
private static final long serialVersionUID = 1L;
@Override
public Iterable<String> call(Tuple2<String,String> tuple) throws Exception {
return Arrays.asList(tuple._2.split(" "));
}
});
JavaPairDStream<String, Integer> pairs = words.mapToPair(new PairFunction<String, String, Integer>(){
private static final long serialVersionUID = 1L;
@Override
public Tuple2<String, Integer> call(String word) throws Exception {
return new Tuple2<String, Integer>(word, 1);
}
});
JavaPairDStream<String, Integer> wordcounts = pairs.reduceByKey(new Function2<Integer, Integer, Integer>(){
private static final long serialVersionUID = 1L;
@Override
public Integer call(Integer v1, Integer v2) throws Exception {
return v1 + v2;
}
});
wordcounts.print();
jssc.start();
jssc.awaitTermination();
jssc.close();
}
}
0 0
- java版本kafka createDirectStream
- scala版本kafka createDirectStream
- spark createDirectStream保存kafka offset(JAVA实现)
- spark createDirectStream保存kafka offset(JAVA实现)
- spark createDirectStream保存kafka offset(JAVA实现)
- spark createDirectStream保存kafka offset(JAVA实现)
- spark createDirectStream保存kafka offset(JAVA实现)
- Spark Streaming createDirectStream保存kafka offset(JAVA实现)
- Kafka + spark stream +redis (createStream + createDirectStream)
- Spark Kafka(createDirectStream)自己管理offset
- java版本kafka createStream
- spark的kafka的低阶API createDirectStream的一些总结。
- spark读取kafka数据 createStream和createDirectStream的区别
- spark streaming 实现kafka的createDirectStream方式!!不坑
- spark-streaming kafka api(KafkaUtils.createDirectStream)使用
- spark读取kafka数据 createStream和createDirectStream的区别
- spark读取kafka数据 createStream和createDirectStream的区别
- kafka文档(2)----kafka API(java版本)
- 微信小程序开发常见问题FAQ之八
- 截屏方法
- 自定义倒计时跳过按钮
- Json对象与Json字符串互转(4种转换方式)
- CTPN: Detecting Text in Natural Image with Connectionist Text Proposal Network
- java版本kafka createDirectStream
- 左右箭头轮播图效果
- Linux DTS(Device Tree Source)设备树详解之一(背景基础知识篇)
- c++实现string类
- Android之通过配置Flavor实现一个项目打包成多个apk
- iOS常用宏定义
- 数学定理及部分代码(长期更新)
- iOS OC内联函数 inline
- PHP加密技术