消息队列-kafka

来源:互联网 发布:程序员驿站 编辑:程序博客网 时间:2024/06/05 18:02

zookeeper和kafka-0.8客户端请自行搜索...

kafka依赖  版本1.3.0RELEASE

<dependency><groupId>org.springframework.integration</groupId><artifactId>spring-integration-kafka</artifactId><version>1.3.0.RELEASE</version><exclusions><exclusion><groupId>org.springframework</groupId><artifactId>*</artifactId></exclusion></exclusions></dependency><dependency><groupId>org.springframework</groupId><artifactId>spring-messaging</artifactId><version>${spring.version}</version></dependency><!-- avro --><dependency><groupId>org.apache.avro</groupId><artifactId>avro</artifactId><version>1.7.7</version></dependency>


kafka配置

producer

<?xml version="1.0" encoding="UTF-8"?><beans xmlns="http://www.springframework.org/schema/beans"xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:int="http://www.springframework.org/schema/integration"xmlns:int-kafka="http://www.springframework.org/schema/integration/kafka"xmlns:task="http://www.springframework.org/schema/task"xsi:schemaLocation="http://www.springframework.org/schema/integration/kafka http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd        http://www.springframework.org/schema/integration http://www.springframework.org/schema/integration/spring-integration.xsd        http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd        http://www.springframework.org/schema/task http://www.springframework.org/schema/task/spring-task.xsd"><!-- topic test config --><int:channel id="kafkaChannel"><int:queue /></int:channel><!-- commons config --><bean id="stringSerializer" class="org.apache.kafka.common.serialization.StringSerializer" /><bean id="kafkaEncoder" class="org.springframework.integration.kafka.serializer.avro.AvroReflectDatumBackedKafkaEncoder"><constructor-arg value="java.lang.String" /></bean><bean id="producerProperties" class="org.springframework.beans.factory.config.PropertiesFactoryBean"><property name="properties"><props><prop key="topic.metadata.refresh.interval.ms">3600000</prop>                <prop key="message.send.max.retries">5</prop><prop key="serializer.class">kafka.serializer.StringEncoder</prop><prop key="request.required.acks">1</prop></props></property></bean><task:executor id="taskExecutor" pool-size="5" keep-alive="120" queue-capacity="500" /><int-kafka:outbound-channel-adapter id="kafkaOutboundChannelAdapter"kafka-producer-context-ref="producerContext" auto-startup="true"channel="kafkaChannel" order="3"><int:poller fixed-delay="1000" time-unit="MILLISECONDS" receive-timeout="1" task-executor="taskExecutor" /></int-kafka:outbound-channel-adapter><int-kafka:producer-context id="producerContext" producer-properties="producerProperties"><int-kafka:producer-configurations><!-- 多个topic配置 --><int-kafka:producer-configurationbroker-list="${kafka.broker.list}"key-serializer="stringSerializer" value-class-type="java.lang.String"value-serializer="stringSerializer" topic="test" /><<int-kafka:producer-configurationbroker-list="<span style="font-family: Arial, Helvetica, sans-serif;">${kafka.broker.list}</span><span style="font-family: Arial, Helvetica, sans-serif;">"</span>key-serializer="stringSerializer" value-class-type="java.lang.String"value-serializer="stringSerializer" topic="otherTopic" /></int-kafka:producer-configurations></int-kafka:producer-context></beans>

consumer

<?xml version="1.0" encoding="UTF-8"?><beans xmlns="http://www.springframework.org/schema/beans"xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:int="http://www.springframework.org/schema/integration"xmlns:int-kafka="http://www.springframework.org/schema/integration/kafka"xmlns:task="http://www.springframework.org/schema/task"xsi:schemaLocation="http://www.springframework.org/schema/integration/kafka                         http://www.springframework.org/schema/integration/kafka/spring-integration-kafka.xsd                        http://www.springframework.org/schema/integration                         http://www.springframework.org/schema/integration/spring-integration.xsd                        http://www.springframework.org/schema/beans                         http://www.springframework.org/schema/beans/spring-beans.xsd                        http://www.springframework.org/schema/task                         http://www.springframework.org/schema/task/spring-task.xsd"><!-- topic test conf --><int:channel id="kafkaConsumerChannel"><int:dispatcher task-executor="kafkaMessageExecutor" /></int:channel><!-- channel配置 auto-startup="true" 否则接收不发数据 --><int-kafka:inbound-channel-adapterid="kafkaInboundChannelAdapter" kafka-consumer-context-ref="consumerContext"auto-startup="true" channel="kafkaConsumerChannel"><int:poller fixed-delay="1" time-unit="MILLISECONDS" /></int-kafka:inbound-channel-adapter><task:executor id="kafkaMessageExecutor" pool-size="8" keep-alive="120" queue-capacity="500" /><bean id="kafkaDecoder" class="org.springframework.integration.kafka.serializer.common.StringDecoder" /><bean id="consumerProperties" class="org.springframework.beans.factory.config.PropertiesFactoryBean"><property name="properties"><props><prop key="auto.offset.reset">smallest</prop><prop key="socket.receive.buffer.bytes">10485760</prop> <!-- 10M --><prop key="fetch.message.max.bytes">5242880</prop><prop key="auto.commit.interval.ms">1000</prop></props></property></bean><!-- 消息接收的BEEN --><bean id="kkConsumer" class="com.pay.kafka.KKConsumer" /><!-- 指定接收的方法 --><int:outbound-channel-adapter channel="kafkaConsumerChannel" ref="kkConsumer" method="kkMsgConsumer" /><!-- zookeeper配置 可以配置多个 --><int-kafka:zookeeper-connect id="zookeeperConnect"zk-connect="${dubbo.registry.address}" zk-connection-timeout="6000"zk-session-timeout="6000" zk-sync-time="2000" /><int-kafka:consumer-context id="consumerContext"consumer-timeout="1000" zookeeper-connect="zookeeperConnect"consumer-properties="consumerProperties"><int-kafka:consumer-configurations><int-kafka:consumer-configurationgroup-id="default1" value-decoder="kafkaDecoder" key-decoder="kafkaDecoder"max-messages="5000"><!-- 多个TOPIC配置 --><int-kafka:topic id="test" streams="4" /><int-kafka:topic id="otherTopic" streams="4" /></int-kafka:consumer-configuration></int-kafka:consumer-configurations></int-kafka:consumer-context></beans>

package com.pay.kafka;import java.util.Date;import org.slf4j.Logger;import org.slf4j.LoggerFactory;import org.springframework.integration.kafka.support.KafkaHeaders;import org.springframework.integration.support.MessageBuilder;import org.springframework.messaging.MessageChannel;public class KKProducer {  private static Logger log = LoggerFactory.getLogger(KKProducer.class);  private static MessageChannel channel;  public void setChannel(MessageChannel channel) {    KKProducer.channel = channel;  }  public static void kkMsgProducer(String topic, String strMsg) {    log.debug(String.format("生产者kafka,topic:%s,消息:%s,时间:%s", topic, strMsg, new Date()));    channel.send(MessageBuilder.withPayload(strMsg).setHeader(KafkaHeaders.TOPIC, topic).build());  }}

package com.pay.kafka;import java.util.Collection;import java.util.Iterator;import java.util.LinkedHashMap;import java.util.Map;import java.util.Set;import org.slf4j.Logger;import org.slf4j.LoggerFactory;public class KKConsumer {  private static Logger log = LoggerFactory.getLogger(KKConsumer.class);  public void kkMsgConsumer(Map<String, Map<Integer, String>> msgs) {    for (Map.Entry<String, Map<Integer, String>> entry : msgs.entrySet()) {      LinkedHashMap<Integer, String> messages = (LinkedHashMap<Integer, String>) entry.getValue();      Set<Integer> keys = messages.keySet();      for (Integer i : keys) {        log.debug("消费者kafka,Partition:{}", i);      }      Collection<String> values = messages.values();      for (Iterator<String> iterator = values.iterator(); iterator.hasNext();) {        String msg = iterator.next();        log.debug("消费者kafka,topic:{},消息:{}", entry.getKey(), msg);      }    }  }}





1 0
原创粉丝点击