java--kafka编程实例--producer和consumer
来源:互联网 发布:淘宝手机店铺怎么装修 编辑:程序博客网 时间:2024/05/23 20:21
先修改broker的配置文件:
vim config/server.properties找到Socket Server Settings内容修改listeners的内容即可
producer代码如下:
import org.apache.kafka.clients.producer.Callback;import org.apache.kafka.clients.producer.KafkaProducer;import org.apache.kafka.clients.producer.ProducerRecord;import org.apache.kafka.clients.producer.RecordMetadata;import java.util.Properties;import java.util.concurrent.ExecutionException;class KafkaProperties { public static final String TOPIC = "chenxun"; public static final String KAFKA_SERVER_URL = "192.168.222.130"; public static final int KAFKA_SERVER_PORT = 9092; public static final int KAFKA_PRODUCER_BUFFER_SIZE = 64 * 1024; public static final int CONNECTION_TIMEOUT = 100000; public static final String TOPIC2 = "topic2"; public static final String TOPIC3 = "topic3"; public static final String CLIENT_ID = "SimpleConsumerDemoClient"; private KafkaProperties() {}}class Producer extends Thread { private final KafkaProducer<Integer, String> producer; private final String topic; private final Boolean isAsync; public Producer(String topic, Boolean isAsync) { Properties props = new Properties(); props.put("bootstrap.servers", KafkaProperties.KAFKA_SERVER_URL + ":" + KafkaProperties.KAFKA_SERVER_PORT); props.put("client.id", "DemoProducer"); props.put("key.serializer", "org.apache.kafka.common.serialization.IntegerSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); producer = new KafkaProducer<>(props); this.topic = topic; this.isAsync = isAsync; } public void run() { int messageNo = 1; while (true) { String messageStr = "Message_" + messageNo; long startTime = System.currentTimeMillis(); if (isAsync) { // Send asynchronously producer.send(new ProducerRecord<>(topic, messageNo, messageStr), new DemoCallBack(startTime, messageNo, messageStr)); } else { // Send synchronously try { producer.send(new ProducerRecord<>(topic, messageNo, messageStr)).get(); System.out.println("Sent message: (" + messageNo + ", " + messageStr + ")"); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } ++messageNo; } }}class DemoCallBack implements Callback { private final long startTime; private final int key; private final String message; public DemoCallBack(long startTime, int key, String message) { this.startTime = startTime; this.key = key; this.message = message; } /** * A callback method the user can implement to provide asynchronous handling of request completion. This method will * be called when the record sent to the server has been acknowledged. Exactly one of the arguments will be * non-null. * * @param metadata The metadata for the record that was sent (i.e. the partition and offset). Null if an error * occurred. * @param exception The exception thrown during processing of this record. Null if no error occurred. */ public void onCompletion(RecordMetadata metadata, Exception exception) { long elapsedTime = System.currentTimeMillis() - startTime; if (metadata != null) { System.out.println( "message(" + key + ", " + message + ") sent to partition(" + metadata.partition() + "), " + "offset(" + metadata.offset() + ") in " + elapsedTime + " ms"); } else { exception.printStackTrace(); } }}public class hello { public static void main(String[] args) { boolean isAsync = args.length == 0 || !args[0].trim().equalsIgnoreCase("sync"); boolean flag = true; Producer producerThread = new Producer(KafkaProperties.TOPIC, flag); producerThread.start(); //Consumer consumerThread = new Consumer(KafkaProperties.TOPIC); // consumerThread.start(); }}
consumer:
import kafka.utils.ShutdownableThread;import org.apache.kafka.clients.consumer.ConsumerConfig;import org.apache.kafka.clients.consumer.ConsumerRecord;import org.apache.kafka.clients.consumer.ConsumerRecords;import org.apache.kafka.clients.consumer.KafkaConsumer;import java.util.Collections;import java.util.Properties;class KafkaProperties { public static final String TOPIC = "chenxun"; public static final String KAFKA_SERVER_URL = "192.168.222.130"; public static final int KAFKA_SERVER_PORT = 9092; public static final int KAFKA_PRODUCER_BUFFER_SIZE = 64 * 1024; public static final int CONNECTION_TIMEOUT = 100000; public static final String TOPIC2 = "topic2"; public static final String TOPIC3 = "topic3"; public static final String CLIENT_ID = "SimpleConsumerDemoClient"; private KafkaProperties() {}}public class Consumer extends ShutdownableThread { private final KafkaConsumer<Integer, String> consumer; private final String topic; public Consumer(String topic) { super("KafkaConsumerExample", false); Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaProperties.KAFKA_SERVER_URL + ":" + KafkaProperties.KAFKA_SERVER_PORT); props.put(ConsumerConfig.GROUP_ID_CONFIG, "DemoConsumer"); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.IntegerDeserializer"); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); consumer = new KafkaConsumer<>(props); this.topic = topic; } @Override public void doWork() { consumer.subscribe(Collections.singletonList(this.topic)); ConsumerRecords<Integer, String> records = consumer.poll(1000); for (ConsumerRecord<Integer, String> record : records) { System.out.println("Received message: (" + record.key() + ", " + record.value() + ") at offset " + record.offset()); } } @Override public String name() { return null; } @Override public boolean isInterruptible() { return false; }}
阅读全文
0 0
- java--kafka编程实例--producer和consumer
- Kafka使用Java进行Producer和Consumer编程
- Kafka(producer和consumer编程实践)
- Kafka的 Consumer和Producer
- Kafka 0.11.0.0 producer&consumer java API实例
- Kafka学习整理七(producer和consumer编程实践)
- Kafka学习整理七(producer和consumer编程实践)
- Kafka的Producer和Consumer的示例(使用java语言)
- Kafka的Producer和Consumer的示例(使用java语言)
- Kafka Java Producer实例
- Kafka的Producer和Consumer源码学习
- Kafka 学习笔记(4) - Consumer和Producer
- spring集成kafka实现producer和consumer
- spring集成kafka实现producer和consumer
- Avro kafka(Producer-Consumer)
- kafka--producer&consumer
- Kafka系列3-python版本producer生产者和consumer消费者实例
- kafka学习笔记 --- Scala实现Kafka producer 和 consumer
- html实现子元素与父元素等高
- QT:知识点总结
- 2017-05-22这一天我突然想写点东西
- 经典dijkstra算法题目及代码
- 2017大学生程序设计邀请赛(华东师范大学)G. 铁路修复计划
- java--kafka编程实例--producer和consumer
- Essential Studio for Windows Forms发布2017 v2,支持office 2016和主题定制
- java生成word和pdf的几种方法的优缺点对比
- 简单介绍maven命令
- Linux_02 简单linux命令+用户管理+用户组管理
- 全选和反选Demo
- 欢迎使用CSDN-markdown编辑器
- ③写给后端工程师的JavaScript教程——变量申明
- android 快速集成ShareSDk分享功能