Kafka入门实例

来源:互联网 发布:通信网络集成资质取消 编辑:程序博客网 时间:2024/06/05 07:17

摘要:本文主要讲了Kafka的一个简单入门实例

源码下载:https://github.com/appleappleapple/BigDataLearning

kafka安装过程看这里:Kafka在Windows安装运行

整个工程目录如下:


1、pom文件

[html] view plain copy
  1. <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"  
  2.     xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">  
  3.     <modelVersion>4.0.0</modelVersion>  
  4.     <groupId>com.lin</groupId>  
  5.     <artifactId>Kafka-Demo</artifactId>  
  6.     <version>0.0.1-SNAPSHOT</version>  
  7.   
  8.     <dependencies>  
  9.         <dependency>  
  10.             <groupId>org.apache.kafka</groupId>  
  11.             <artifactId>kafka_2.10</artifactId>  
  12.             <version>0.9.0.0</version>  
  13.         </dependency>  
  14.   
  15.         <dependency>  
  16.             <groupId>org.opentsdb</groupId>  
  17.             <artifactId>java-client</artifactId>  
  18.             <version>2.1.0-SNAPSHOT</version>  
  19.             <exclusions>  
  20.                 <exclusion>  
  21.                     <groupId>org.slf4j</groupId>  
  22.                     <artifactId>slf4j-log4j12</artifactId>  
  23.                 </exclusion>  
  24.                 <exclusion>  
  25.                     <groupId>log4j</groupId>  
  26.                     <artifactId>log4j</artifactId>  
  27.                 </exclusion>  
  28.                 <exclusion>  
  29.                     <groupId>org.slf4j</groupId>  
  30.                     <artifactId>jcl-over-slf4j</artifactId>  
  31.                 </exclusion>  
  32.             </exclusions>  
  33.         </dependency>  
  34.   
  35.         <dependency>  
  36.             <groupId>com.alibaba</groupId>  
  37.             <artifactId>fastjson</artifactId>  
  38.             <version>1.2.4</version>  
  39.         </dependency>  
  40.   
  41.   
  42.     </dependencies>  
  43. </project>  

2、生产者

[html] view plain copy
  1. package com.lin.demo.producer;  
  2.   
  3. import java.util.Properties;  
  4.   
  5. import kafka.javaapi.producer.Producer;  
  6. import kafka.producer.KeyedMessage;  
  7. import kafka.producer.ProducerConfig;  
  8.   
  9. public class KafkaProducer {  
  10.     private final Producer<String, String> producer;  
  11.     public final static String TOPIC = "linlin";  
  12.   
  13.     private KafkaProducer() {  
  14.         Properties props = new Properties();  
  15.         // 此处配置的是kafka的端口  
  16.         props.put("metadata.broker.list", "127.0.0.1:9092");  
  17.         props.put("zk.connect", "127.0.0.1:2181");    
  18.   
  19.         // 配置value的序列化类  
  20.         props.put("serializer.class", "kafka.serializer.StringEncoder");  
  21.         // 配置key的序列化类  
  22.         props.put("key.serializer.class", "kafka.serializer.StringEncoder");  
  23.   
  24.         props.put("request.required.acks", "-1");  
  25.   
  26.         producer = new Producer<String, String>(new ProducerConfig(props));  
  27.     }  
  28.   
  29.     void produce() {  
  30.         int messageNo = 1000;  
  31.         final int COUNT = 10000;  
  32.   
  33.         while (messageNo < COUNT) {  
  34.             String key = String.valueOf(messageNo);  
  35.             String data = "hello kafka message " + key;  
  36.             producer.send(new KeyedMessage<String, String>(TOPIC, key, data));  
  37.             System.out.println(data);  
  38.             messageNo++;  
  39.         }  
  40.     }  
  41.   
  42.     public static void main(String[] args) {  
  43.         new KafkaProducer().produce();  
  44.     }  
  45. }  

右键:run as Java application

运行结果:


3、消费者

[java] view plain copy
  1. package com.lin.demo.consumer;  
  2.   
  3. import java.util.HashMap;  
  4. import java.util.List;  
  5. import java.util.Map;  
  6. import java.util.Properties;  
  7.   
  8. import kafka.consumer.ConsumerConfig;  
  9. import kafka.consumer.ConsumerIterator;  
  10. import kafka.consumer.KafkaStream;  
  11. import kafka.javaapi.consumer.ConsumerConnector;  
  12. import kafka.serializer.StringDecoder;  
  13. import kafka.utils.VerifiableProperties;  
  14.   
  15. import com.lin.demo.producer.KafkaProducer;  
  16.   
  17. public class KafkaConsumer {  
  18.   
  19.     private final ConsumerConnector consumer;  
  20.   
  21.     private KafkaConsumer() {  
  22.         Properties props = new Properties();  
  23.         // zookeeper 配置  
  24.         props.put("zookeeper.connect""127.0.0.1:2181");  
  25.   
  26.         // group 代表一个消费组  
  27.         props.put("group.id""lingroup");  
  28.   
  29.         // zk连接超时  
  30.         props.put("zookeeper.session.timeout.ms""4000");  
  31.         props.put("zookeeper.sync.time.ms""200");  
  32.         props.put("rebalance.max.retries""5");  
  33.         props.put("rebalance.backoff.ms""1200");  
  34.           
  35.       
  36.         props.put("auto.commit.interval.ms""1000");  
  37.         props.put("auto.offset.reset""smallest");  
  38.         // 序列化类  
  39.         props.put("serializer.class""kafka.serializer.StringEncoder");  
  40.   
  41.         ConsumerConfig config = new ConsumerConfig(props);  
  42.   
  43.         consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config);  
  44.     }  
  45.   
  46.     void consume() {  
  47.         Map<String, Integer> topicCountMap = new HashMap<String, Integer>();  
  48.         topicCountMap.put(KafkaProducer.TOPIC, new Integer(1));  
  49.   
  50.         StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());  
  51.         StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());  
  52.   
  53.         Map<String, List<KafkaStream<String, String>>> consumerMap = consumer.createMessageStreams(topicCountMap, keyDecoder, valueDecoder);  
  54.         KafkaStream<String, String> stream = consumerMap.get(KafkaProducer.TOPIC).get(0);  
  55.         ConsumerIterator<String, String> it = stream.iterator();  
  56.         while (it.hasNext())  
  57.             System.out.println("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<" + it.next().message() + "<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<");  
  58.     }  
  59.   
  60.     public static void main(String[] args) {  
  61.         new KafkaConsumer().consume();  
  62.     }  
  63. }  


运行结果:


监控页面


源码下载:https://github.com/appleappleapple/BigDataLearning

原创粉丝点击