a2.sources = s2 a2.channels = c2 #指定source收集到的数据发送到哪个管道 a2.sources.s2.channels = c2 #taildir source a2.sources.s2.type = TAILDIR a2.sources.s2.channels = c2 a2.sources.s2.positionFile = /home/test/taildir_position.json a2.sources.s2.filegroups = f1 a2.sources.s2.filegroups.f1 = /home/test/log/.*dat a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat a2.sources.s2.maxBatchCount = 1000 # kafka channel充当生产者 a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel a2.channels.c2.brokerList = 192.168.40.203:9092 a2.channels.c2.zookeeperConnect=192.168.40.203:2181/kafka a2.channels.c2.topic = recv_test #false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true a2.channels.c2.parseAsFlumeEvent = false a2.channels.c2.kafka.producer.acks = 1 a2.channels.c2.producer.type=async a2.channels.c2.queue.buffering.max.ms = 5000 a2.channels.c2.queue.buffering.max.messages=20000 a2.channels.c2.batch.num.messages=500