#为source channel sink起名 a1.sources = s1 a1.channels = c1 a1.sinks = k1 #指定source收集到的数据发送到哪个管道 a1.sources.s1.channels = c1 #指定source数据收集策略 a1.sources.s1.type = org.apache.flume.source.kafka.KafkaSource a1.sources.s1.batchSize = 3000 a1.sources.s1.batchDurationMillis = 100 a1.sources.s1.kafka.bootstrap.servers = 192.168.40.203:9092 a1.sources.s1.kafka.topics = test a1.sources.s1.kafka.consumer.group.id = lxk_0429 #指定channel为memory,即表示所有的数据都装进memory当中 a1.channels.c1.type = memory a1.channels.c1.capacity = 2000000 a1.channels.c1.transactionCapacity = 30000 a1.channels.c1.byteCapacityBufferPercentage = 40 a1.channels.c1.byteCapacity = 2147483648 #原始file roll sink,将数据写到本机磁盘 a1.sinks.k1.type = file_roll a1.sinks.k1.channel = c1 a1.sinks.k1.sink.pathManager = default a1.sinks.k1.sink.pathManager.extension = dat a1.sinks.k1.sink.pathManager.prefix = test- a1.sinks.k1.sink.rollInterval = 30 a1.sinks.k1.sink.directory = /home/test/log2