flume读取日志数据写入kafka
来源:互联网 发布:掌上公交软件下载 编辑:程序博客网 时间:2024/05/16 19:42
一、flume配置
flume要求1.6以上版本
flume-conf.properties文件配置内容,sinks的输出作为kafka的product
- a1.sources = r1
- a1.sinks = k1
- a1.channels = c1
- # Describe/configure the source
- a1.sources.r1.type = exec
- a1.sources.r1.command = tail -F /home/airib/work/log.log
- # Describe the sink
- #a1.sinks.k1.type = logger
- a1.sinks.k1.type = org.apache.flume.sink.kafka.KafkaSink
- a1.sinks.k1.topic = test
- a1.sinks.k1.brokerList = localhost:9092
- a1.sinks.k1.requiredAcks = 1
- a1.sinks.k1.batchSize = 20
- # Use a channel which buffers events in memory
- a1.channels.c1.type = memory
- a1.channels.c1.capacity = 1000
- a1.channels.c1.transactionCapacity = 100
- # Bind the source and sink to the channel
- a1.sources.r1.channels = c1
- a1.sinks.k1.channel = c1
flume启动
bin/flume-ng agent --conf conf --conf-file conf/flume-conf.properties --name a1 -Dflume.root.logger=INFO,console
- package com.hgp.kafka.kafka;
- import java.util.HashMap;
- import java.util.List;
- import java.util.Map;
- import java.util.Properties;
- import kafka.consumer.ConsumerConfig;
- import kafka.consumer.ConsumerIterator;
- import kafka.consumer.KafkaStream;
- import kafka.javaapi.consumer.ConsumerConnector;
- import kafka.serializer.StringDecoder;
- import kafka.utils.VerifiableProperties;
- public class KafkaConsumer {
- private final ConsumerConnector consumer;
- private KafkaConsumer() {
- Properties props = new Properties();
- //zookeeper 配置
- props.put("zookeeper.connect", "localhost:2181");
- //group 代表一个消费组
- props.put("group.id", "jd-group");
- //zk连接超时
- props.put("zookeeper.session.timeout.ms", "4000");
- props.put("zookeeper.sync.time.ms", "200");
- props.put("auto.commit.interval.ms", "1000");
- props.put("auto.offset.reset", "smallest");
- //序列化类
- props.put("serializer.class", "kafka.serializer.StringEncoder");
- ConsumerConfig config = new ConsumerConfig(props);
- consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config);
- }
- void consume() {
- Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
- topicCountMap.put("test", new Integer(1));
- StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
- StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());
- Map<String, List<KafkaStream<String, String>>> consumerMap =
- consumer.createMessageStreams(topicCountMap,keyDecoder,valueDecoder);
- KafkaStream<String, String> stream = consumerMap.get("test").get(0);
- ConsumerIterator<String, String> it = stream.iterator();
- while (it.hasNext())
- System.out.println(it.next().message());
- }
- public static void main(String[] args) {
- new KafkaConsumer().consume();
- }
- }
kafka启动命令
启动Zookeeper server:
bin/zookeeper-server-start.sh config/zookeeper.properties &
启动Kafka server:
bin/kafka-server-start.sh config/server.properties &
运行producer:
bin/kafka-console-producer.sh --broker-list localhost:9092 --topic test
运行consumer:
bin/kafka-console-consumer.sh --zookeeper localhost:2181 --topic test --from-beginning
0 0
- flume读取日志数据写入kafka
- flume读取日志数据写入kafka
- flume读取日志数据写入kafka 然后kafka+storm整合
- Flume读取日志数据并写入到Kafka,ConsoleConsumer进行实时消费
- flume 读取kafka 数据
- flume+kafka读取日志文件
- Spark Streaming 读取Kafka数据写入Elasticsearch
- flume将log4j日志数据写入到hdfs
- Flume+Kafka+Sparkstreaming日志分析
- flume上报日志到kafka
- flume+kafka+hdfs日志系统
- flume+kafka收集业务日志
- flume学习(三):flume将log4j日志数据写入到hdfs
- flume学习(五):flume将log4j日志数据写入到hdfs
- flume学习(五):flume将log4j日志数据写入到hdfs
- flume学习(二):flume将log4j日志数据写入到hdfs
- flume学习(三):flume将log4j日志数据写入到hdfs
- 5.Flume实时监控读取日志数据,存储hdfs文件系统
- ListView +ObjectDataSource+DataPager
- JAVA算法_快速排序
- ABAP中使用函数Number_Get_Next产生流水号
- Android隐藏虚拟按键(底部导航栏)
- 如何深入理解 StatsD 与 Graphite ?
- flume读取日志数据写入kafka
- 如何使Android应用程序获取系统权限
- 【小知识点总结】保存android日志logcat到文件中
- protobuf-2.5.0安装
- 今天的重要事情
- xx技术收集汇总
- Android实现复制粘贴
- Android 永久化保持客户端和服务器连接cookieStore
- liunx 定时器