diff --git a/external-flume/conf/connection/clean_start_flume.sh b/external-flume/conf/connection/clean_start_flume.sh new file mode 100755 index 0000000..ef811a5 --- /dev/null +++ b/external-flume/conf/connection/clean_start_flume.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +BASE_DIR=$(cd $(dirname $0); pwd) +DAE_NAME=`ls ${BASE_DIR} | grep ^dae_* | grep .sh$` + + +#清除日志和checkpoint以及历史data +rm -rf ${BASE_DIR}/logs/* + +nohup ${BASE_DIR}/${DAE_NAME} $1 $2 >/dev/null 2>&1 & diff --git a/external-flume/conf/connection/connection_k2f.properties b/external-flume/conf/connection/connection_k2f.properties new file mode 100644 index 0000000..f6ca65d --- /dev/null +++ b/external-flume/conf/connection/connection_k2f.properties @@ -0,0 +1,22 @@ +#为source channel sink起名 +connection.channels = c1 +connection.sinks = k1 +#指定channel为kafka channel,省略source使得效率更高 +connection.channels.c1.type = org.apache.flume.channel.kafka.KafkaChannel +connection.channels.c1.kafka.bootstrap.servers = 192.168.40.152:9092 +connection.channels.c1.kafka.topic = CONNECTION-RECORD-LOG +connection.channels.c1.kafka.consumer.group.id = lxk_0514 +connection.channels.c1.kafka.consumer.auto.offset.reset = latest +connection.channels.c1.kafka.consumer.enable.auto.commit = true +connection.channels.c1.kafka.consumer.fetch.max.wait.ms = 1000 +connection.channels.c1.kafka.consumer.fetch.min.bytes = 1048576 +connection.channels.c1.parseAsFlumeEvent = false + +#原始file roll sink,将数据写到本机磁盘 +connection.sinks.k1.type = file_roll +connection.sinks.k1.channel = c1 +connection.sinks.k1.sink.pathManager = default +connection.sinks.k1.sink.pathManager.extension = dat +connection.sinks.k1.sink.pathManager.prefix = test- +connection.sinks.k1.sink.rollInterval = 30 +connection.sinks.k1.sink.directory = /home/192.168.60.101/CONNECTION-RECORD-LOG diff --git a/external-flume/conf/connection/count_flume.sh b/external-flume/conf/connection/count_flume.sh new file mode 100755 index 0000000..cc8452b --- /dev/null +++ b/external-flume/conf/connection/count_flume.sh @@ -0,0 +1,15 @@ +#!/bin/sh + +BASE_DIR=$(cd $(dirname $0); pwd) + +JAR_NAME=`ls ${BASE_DIR} | grep ^k2* | grep .properties$` + +NUM1=`ps -ef | grep ${JAR_NAME} | grep -v grep | wc -l` +pids1=$(ps -ef | grep ${JAR_NAME} | grep -v grep | awk '{print $2}') +echo 'flume '${JAR_NAME}' total process-->'${NUM1} +if [ "${NUM1}" -ge "1" ];then + for pid1 in $pids1 + do + echo 'flume '${JAR_NAME}' process-->'$pid1 + done +fi diff --git a/external-flume/conf/connection/dae_connection.sh b/external-flume/conf/connection/dae_connection.sh new file mode 100755 index 0000000..b7ba0d9 --- /dev/null +++ b/external-flume/conf/connection/dae_connection.sh @@ -0,0 +1,39 @@ +#!/bin/sh + +#JAR_NAME=ktk_ip_asn.properties +PROPERTIES_NAME=connection_k2f +#flume进程名称 +FLUME_NAME=connection +#flume根目录 +BASE_DIR=$(cd $(dirname $0); cd ../../; pwd) +#当前路径 +CONF_DIR=$(cd $(dirname $0); pwd) +#进程总数 +PROCESS_SUM=1 + +echo "##############################################################" >> ${CONF_DIR}/restart_log/restart_${PROPERTIES_NAME}.log +id=0 #配置文件后面的序号,无特殊作用 +flag=0 # flag为0表示初始化状态,为1表示完整启动成功所有进程,为2表示进程意外停止之后又重新杀死其余的进程并重新启动所有进程 +while true ; do + NUM=`ps -ef | grep ${PROPERTIES_NAME} | grep -v grep | grep -v dae |wc -l` + pids=$(ps -ef | grep ${PROPERTIES_NAME}\* | grep properties | awk '{print $2}') + time_stamp=$(date +%Y%m%d%H%M%S) + #大于设置进程数,杀掉所有进程,重启 + if [[ "${NUM}" -ne ${PROCESS_SUM} && $flag -eq "1" ]];then + for pid in $pids + do + kill -9 $pid + done + flag=2 + #如果正在运行的进程数小于定义的进程数,就启动 + elif [ "${NUM}" -lt ${PROCESS_SUM} ];then + id=$(( ( ($id) % $PROCESS_SUM ) + 1 )) + nohup ${BASE_DIR}/bin/flume-ng agent -n ${FLUME_NAME} -c ${CONF_DIR} -f ${CONF_DIR}/${PROPERTIES_NAME}.properties >/dev/null 2>&1 & + echo "${time_stamp} ---> the ${PROPERTIES_NAME}_APP restart ---> $id" >> ${CONF_DIR}/restart_log/restart_${PROPERTIES_NAME}.log + #进程数如果等于指定的进程数,那就设置flag为1 id为0 + elif [ "${NUM}" -eq ${PROCESS_SUM} ];then + flag=1 + id=0 + fi + sleep 10 +done diff --git a/external-flume/conf/connection/flume-env.sh b/external-flume/conf/connection/flume-env.sh new file mode 100755 index 0000000..63979aa --- /dev/null +++ b/external-flume/conf/connection/flume-env.sh @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# If this file is placed at FLUME_CONF_DIR/flume-env.sh, it will be sourced +# during Flume startup. + +# Enviroment variables can be set here. + +export JAVA_HOME=/usr/lib/jvm/jdk1.8.0_73 + +# Give Flume more memory and pre-allocate, enable remote monitoring via JMX +#export JAVA_OPTS="-Xms1024m -Xmx3072m -Dcom.sun.management.jmxremote" +export JAVA_OPTS="-Xms512m -Xmx2048m -Xss256k -Xmn1g -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:-UseGCOverheadLimit -Dcom.sun.management.jmxremote" + +# Let Flume write raw event data and configuration information to its log files for debugging +# purposes. Enabling these flags is not recommended in production, +# as it may result in logging sensitive user information or encryption secrets. +# export JAVA_OPTS="$JAVA_OPTS -Dorg.apache.flume.log.rawdata=true -Dorg.apache.flume.log.printconfig=true " + +# Note that the Flume conf directory is always included in the classpath. +#FLUME_CLASSPATH="" + diff --git a/external-flume/conf/connection/log4j.properties b/external-flume/conf/connection/log4j.properties new file mode 100644 index 0000000..c948bff --- /dev/null +++ b/external-flume/conf/connection/log4j.properties @@ -0,0 +1,68 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Define some default values that can be overridden by system properties. +# +# For testing, it may also be convenient to specify +# -Dflume.root.logger=DEBUG,console when launching flume. + +#flume.root.logger=DEBUG,console +flume.root.logger=INFO,LOGFILE +flume.log.dir=./logs +flume.log.file=flume.log + +log4j.logger.org.apache.flume.lifecycle = INFO +log4j.logger.org.jboss = WARN +log4j.logger.org.mortbay = INFO +log4j.logger.org.apache.avro.ipc.NettyTransceiver = WARN +log4j.logger.org.apache.hadoop = INFO +log4j.logger.org.apache.hadoop.hive = ERROR + +# Define the root logger to the system property "flume.root.logger". +log4j.rootLogger=${flume.root.logger} + + +# Stock log4j rolling file appender +# Default log rotation configuration +log4j.appender.LOGFILE=org.apache.log4j.RollingFileAppender +log4j.appender.LOGFILE.MaxFileSize=100MB +log4j.appender.LOGFILE.MaxBackupIndex=10 +log4j.appender.LOGFILE.File=${flume.log.dir}/${flume.log.file} +log4j.appender.LOGFILE.layout=org.apache.log4j.PatternLayout +log4j.appender.LOGFILE.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# Warning: If you enable the following appender it will fill up your disk if you don't have a cleanup job! +# This uses the updated rolling file appender from log4j-extras that supports a reliable time-based rolling policy. +# See http://logging.apache.org/log4j/companions/extras/apidocs/org/apache/log4j/rolling/TimeBasedRollingPolicy.html +# Add "DAILY" to flume.root.logger above if you want to use this +log4j.appender.DAILY=org.apache.log4j.rolling.RollingFileAppender +log4j.appender.DAILY.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy +log4j.appender.DAILY.rollingPolicy.ActiveFileName=${flume.log.dir}/${flume.log.file} +log4j.appender.DAILY.rollingPolicy.FileNamePattern=${flume.log.dir}/${flume.log.file}.%d{yyyy-MM-dd} +log4j.appender.DAILY.layout=org.apache.log4j.PatternLayout +log4j.appender.DAILY.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# console +# Add "console" to flume.root.logger above if you want to use this +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d (%t) [%p - %l] %m%n diff --git a/external-flume/conf/connection/pro/2f2k.properties b/external-flume/conf/connection/pro/2f2k.properties new file mode 100644 index 0000000..5e5c573 --- /dev/null +++ b/external-flume/conf/connection/pro/2f2k.properties @@ -0,0 +1,24 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/2taildir_position.json +a2.sources.s2.filegroups = f2 +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9092 +a2.channels.c2.zookeeperConnect=192.168.40.222:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/connection/pro/4f2k.properties b/external-flume/conf/connection/pro/4f2k.properties new file mode 100644 index 0000000..bf0408b --- /dev/null +++ b/external-flume/conf/connection/pro/4f2k.properties @@ -0,0 +1,58 @@ +a2.sources = s2 +a2.channels = c2 c3 c4 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.positionFile = /home/test/4taildir_position.json +a2.sources.s2.filegroups = f3 +a2.sources.s2.filegroups.f3 = /home/test/recv/.*dat +#a2.sources.s2.headers.f3.headerKey1 = channel2 +#a2.sources.s2.filegroups.f4 = /home/test/log/.*dat +#a2.sources.s2.headers.f4.headerKey1 = channel3 +a2.sources.s2.maxBatchCount = 500 +#a2.sources.s2.selector.type = multiplexing + +#channel selector +#a2.sources.s2.selector.header = headerKey1 +#a2.sources.s2.selector.mapping.channel2=c2 +#a2.sources.s2.selector.mapping.channel3=c3 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=200000 +a2.channels.c2.batch.num.messages=5000 + +#第二个channel +a2.channels.c3.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c3.brokerList = 192.168.40.222:9093 +a2.channels.c3.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c3.parseAsFlumeEvent = false +a2.channels.c3.kafka.producer.acks = 1 +a2.channels.c3.producer.type=async +a2.channels.c3.queue.buffering.max.ms = 5000 +a2.channels.c3.queue.buffering.max.messages=20000 +a2.channels.c3.batch.num.messages=500 + + + +#第三个channel +a2.channels.c4.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c4.brokerList = 192.168.40.222:9093 +a2.channels.c4.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c4.parseAsFlumeEvent = false +a2.channels.c4.kafka.producer.acks = 1 +a2.channels.c4.producer.type=async +a2.channels.c4.queue.buffering.max.ms = 5000 +a2.channels.c4.queue.buffering.max.messages=20000 +a2.channels.c4.batch.num.messages=500 + diff --git a/external-flume/conf/connection/pro/f2k.properties b/external-flume/conf/connection/pro/f2k.properties new file mode 100644 index 0000000..7047e42 --- /dev/null +++ b/external-flume/conf/connection/pro/f2k.properties @@ -0,0 +1,24 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/taildir_position.json +a2.sources.s2.filegroups = f0 +a2.sources.s2.filegroups.f0 = /home/test/log/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9092 +a2.channels.c2.zookeeperConnect=192.168.40.222:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/connection/pro/f2k1.properties b/external-flume/conf/connection/pro/f2k1.properties new file mode 100644 index 0000000..2dadced --- /dev/null +++ b/external-flume/conf/connection/pro/f2k1.properties @@ -0,0 +1,23 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/1taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*[0-2].dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/connection/pro/f2k2.properties b/external-flume/conf/connection/pro/f2k2.properties new file mode 100644 index 0000000..e17c3e2 --- /dev/null +++ b/external-flume/conf/connection/pro/f2k2.properties @@ -0,0 +1,23 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/2taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*[3-4].dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/connection/pro/f2k3.properties b/external-flume/conf/connection/pro/f2k3.properties new file mode 100644 index 0000000..2dadced --- /dev/null +++ b/external-flume/conf/connection/pro/f2k3.properties @@ -0,0 +1,23 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/1taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*[0-2].dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/connection/pro/file.properties b/external-flume/conf/connection/pro/file.properties new file mode 100644 index 0000000..f6813e7 --- /dev/null +++ b/external-flume/conf/connection/pro/file.properties @@ -0,0 +1,29 @@ +#为source channel sink起名 +a1.sources = s1 +a1.channels = c1 +a1.sinks = k1 +#指定source收集到的数据发送到哪个管道 +a1.sources.s1.channels = c1 +#指定source数据收集策略 +a1.sources.s1.type = org.apache.flume.source.kafka.KafkaSource +a1.sources.s1.batchSize = 3000 +a1.sources.s1.batchDurationMillis = 100 +a1.sources.s1.kafka.bootstrap.servers = 192.168.40.203:9092 +a1.sources.s1.kafka.topics = test +a1.sources.s1.kafka.consumer.group.id = lxk_0429 + +#指定channel为memory,即表示所有的数据都装进memory当中 +a1.channels.c1.type = memory +a1.channels.c1.capacity = 2000000 +a1.channels.c1.transactionCapacity = 30000 +a1.channels.c1.byteCapacityBufferPercentage = 40 +a1.channels.c1.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 30 +a1.sinks.k1.sink.directory = /home/test/log2 diff --git a/external-flume/conf/connection/pro/file1.properties b/external-flume/conf/connection/pro/file1.properties new file mode 100644 index 0000000..17dac7f --- /dev/null +++ b/external-flume/conf/connection/pro/file1.properties @@ -0,0 +1,29 @@ +#为source channel sink起名 +a1.sources = s1 +a1.channels = c1 +a1.sinks = k1 +#指定source收集到的数据发送到哪个管道 +a1.sources.s1.channels = c1 +#指定source数据收集策略 +a1.sources.s1.type = org.apache.flume.source.kafka.KafkaSource +a1.sources.s1.batchSize = 3000 +a1.sources.s1.batchDurationMillis = 100 +a1.sources.s1.kafka.bootstrap.servers = 192.168.40.152:9092 +a1.sources.s1.kafka.topics = CONNECTION-RECORD-LOG +a1.sources.s1.kafka.consumer.group.id = source_0514 + +#指定channel为memory,即表示所有的数据都装进memory当中 +a1.channels.c1.type = memory +a1.channels.c1.capacity = 2000000 +a1.channels.c1.transactionCapacity = 30000 +a1.channels.c1.byteCapacityBufferPercentage = 40 +a1.channels.c1.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 30 +a1.sinks.k1.sink.directory = /home/192.168.60.102/CONNECTION-RECORD-LOG diff --git a/external-flume/conf/connection/pro/file2.properties b/external-flume/conf/connection/pro/file2.properties new file mode 100644 index 0000000..f6813e7 --- /dev/null +++ b/external-flume/conf/connection/pro/file2.properties @@ -0,0 +1,29 @@ +#为source channel sink起名 +a1.sources = s1 +a1.channels = c1 +a1.sinks = k1 +#指定source收集到的数据发送到哪个管道 +a1.sources.s1.channels = c1 +#指定source数据收集策略 +a1.sources.s1.type = org.apache.flume.source.kafka.KafkaSource +a1.sources.s1.batchSize = 3000 +a1.sources.s1.batchDurationMillis = 100 +a1.sources.s1.kafka.bootstrap.servers = 192.168.40.203:9092 +a1.sources.s1.kafka.topics = test +a1.sources.s1.kafka.consumer.group.id = lxk_0429 + +#指定channel为memory,即表示所有的数据都装进memory当中 +a1.channels.c1.type = memory +a1.channels.c1.capacity = 2000000 +a1.channels.c1.transactionCapacity = 30000 +a1.channels.c1.byteCapacityBufferPercentage = 40 +a1.channels.c1.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 30 +a1.sinks.k1.sink.directory = /home/test/log2 diff --git a/external-flume/conf/connection/pro/k2f.properties b/external-flume/conf/connection/pro/k2f.properties new file mode 100644 index 0000000..35d7d68 --- /dev/null +++ b/external-flume/conf/connection/pro/k2f.properties @@ -0,0 +1,22 @@ +#为source channel sink起名 +a1.channels = c1 +a1.sinks = k1 +#指定channel为kafka channel,省略source使得效率更高 +a1.channels.c1.type = org.apache.flume.channel.kafka.KafkaChannel +a1.channels.c1.kafka.bootstrap.servers = 192.168.40.119:9092 +a1.channels.c1.kafka.topic = test +a1.channels.c1.kafka.consumer.group.id = lxk_0509 +a1.channels.c1.kafka.consumer.auto.offset.reset = latest +a1.channels.c1.kafka.consumer.enable.auto.commit = true +a1.channels.c1.kafka.consumer.fetch.max.wait.ms = 1000 +a1.channels.c1.kafka.consumer.fetch.min.bytes = 10485760 +a1.channels.c1.parseAsFlumeEvent = false + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 60 +a1.sinks.k1.sink.directory = /home/test/log diff --git a/external-flume/conf/connection/pro/kafka.properties b/external-flume/conf/connection/pro/kafka.properties new file mode 100644 index 0000000..da560b2 --- /dev/null +++ b/external-flume/conf/connection/pro/kafka.properties @@ -0,0 +1,25 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*dat +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.203:9092 +a2.channels.c2.zookeeperConnect=192.168.40.203:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/connection/pro/kafka1.properties b/external-flume/conf/connection/pro/kafka1.properties new file mode 100644 index 0000000..ed30b16 --- /dev/null +++ b/external-flume/conf/connection/pro/kafka1.properties @@ -0,0 +1,25 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /var/taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log1/.*dat +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.203:9092 +a2.channels.c2.zookeeperConnect=192.168.40.203:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/connection/pro/kafka2.properties b/external-flume/conf/connection/pro/kafka2.properties new file mode 100644 index 0000000..6040a05 --- /dev/null +++ b/external-flume/conf/connection/pro/kafka2.properties @@ -0,0 +1,25 @@ +a3.sources = s2 +a3.channels = c2 +#指定source收集到的数据发送到哪个管道 +a3.sources.s2.channels = c2 +#taildir source +a3.sources.s2.type = TAILDIR +a3.sources.s2.channels = c2 +a3.sources.s2.positionFile = /var/taildir2_position.json +a3.sources.s2.filegroups = f2 +a3.sources.s2.filegroups.f1 = /home/test/log1/.*dat +a3.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a3.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a3.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a3.channels.c2.brokerList = 192.168.40.203:9092 +a3.channels.c2.zookeeperConnect=192.168.40.203:2181/kafka +a3.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a3.channels.c2.parseAsFlumeEvent = false +a3.channels.c2.kafka.producer.acks = 1 +a3.channels.c2.producer.type=async +a3.channels.c2.queue.buffering.max.ms = 5000 +a3.channels.c2.queue.buffering.max.messages=20000 +a3.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/connection/pro/log4j.properties b/external-flume/conf/connection/pro/log4j.properties new file mode 100644 index 0000000..c948bff --- /dev/null +++ b/external-flume/conf/connection/pro/log4j.properties @@ -0,0 +1,68 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Define some default values that can be overridden by system properties. +# +# For testing, it may also be convenient to specify +# -Dflume.root.logger=DEBUG,console when launching flume. + +#flume.root.logger=DEBUG,console +flume.root.logger=INFO,LOGFILE +flume.log.dir=./logs +flume.log.file=flume.log + +log4j.logger.org.apache.flume.lifecycle = INFO +log4j.logger.org.jboss = WARN +log4j.logger.org.mortbay = INFO +log4j.logger.org.apache.avro.ipc.NettyTransceiver = WARN +log4j.logger.org.apache.hadoop = INFO +log4j.logger.org.apache.hadoop.hive = ERROR + +# Define the root logger to the system property "flume.root.logger". +log4j.rootLogger=${flume.root.logger} + + +# Stock log4j rolling file appender +# Default log rotation configuration +log4j.appender.LOGFILE=org.apache.log4j.RollingFileAppender +log4j.appender.LOGFILE.MaxFileSize=100MB +log4j.appender.LOGFILE.MaxBackupIndex=10 +log4j.appender.LOGFILE.File=${flume.log.dir}/${flume.log.file} +log4j.appender.LOGFILE.layout=org.apache.log4j.PatternLayout +log4j.appender.LOGFILE.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# Warning: If you enable the following appender it will fill up your disk if you don't have a cleanup job! +# This uses the updated rolling file appender from log4j-extras that supports a reliable time-based rolling policy. +# See http://logging.apache.org/log4j/companions/extras/apidocs/org/apache/log4j/rolling/TimeBasedRollingPolicy.html +# Add "DAILY" to flume.root.logger above if you want to use this +log4j.appender.DAILY=org.apache.log4j.rolling.RollingFileAppender +log4j.appender.DAILY.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy +log4j.appender.DAILY.rollingPolicy.ActiveFileName=${flume.log.dir}/${flume.log.file} +log4j.appender.DAILY.rollingPolicy.FileNamePattern=${flume.log.dir}/${flume.log.file}.%d{yyyy-MM-dd} +log4j.appender.DAILY.layout=org.apache.log4j.PatternLayout +log4j.appender.DAILY.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# console +# Add "console" to flume.root.logger above if you want to use this +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d (%t) [%p - %l] %m%n diff --git a/external-flume/conf/connection/pro/tail.properties b/external-flume/conf/connection/pro/tail.properties new file mode 100644 index 0000000..c677318 --- /dev/null +++ b/external-flume/conf/connection/pro/tail.properties @@ -0,0 +1,34 @@ +a2.sources = s2 +a2.channels = c2 +a2.sinks = k2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 + +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/taildir_position.json +a2.sources.s2.filegroups = f1 f2 +a2.sources.s2.filegroups.f1 = /home/test/log1/.*dat +a2.sources.s2.headers.f1.headerKey1 = value1 +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.headers.f2.headerKey1 = value2 +a2.sources.s2.headers.f2.headerKey2 = value2-2 +a2.sources.s2.fileHeader = true +a2.sources.s2.maxBatchCount = 1000 + + +#指定channel为memory,即表示所有的数据都装进memory当中 +a2.channels.c2.type = memory +a2.channels.c2.capacity = 2000000 +a2.channels.c2.transactionCapacity = 30000 +a2.channels.c2.byteCapacityBufferPercentage = 40 +a2.channels.c2.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a2.sinks.k2.type = file_roll +a2.sinks.k2.channel = c2 +a2.sinks.k2.sink.pathManager = default +a2.sinks.k2.sink.pathManager.extension = dat +a2.sinks.k2.sink.pathManager.prefix = recv- +a2.sinks.k2.sink.rollInterval = 60 +a2.sinks.k2.sink.directory = /home/test/recv1 diff --git a/external-flume/conf/connection/restart_log/restart_connection_k2f.log b/external-flume/conf/connection/restart_log/restart_connection_k2f.log new file mode 100644 index 0000000..281d596 --- /dev/null +++ b/external-flume/conf/connection/restart_log/restart_connection_k2f.log @@ -0,0 +1,68 @@ +############################################################## +20200513184750 ---> the connection_k2f_APP restart ---> 1 +20200513184800 ---> the connection_k2f_APP restart ---> 1 +20200513184810 ---> the connection_k2f_APP restart ---> 1 +20200513184820 ---> the connection_k2f_APP restart ---> 1 +20200513184830 ---> the connection_k2f_APP restart ---> 1 +20200513184840 ---> the connection_k2f_APP restart ---> 1 +20200513184850 ---> the connection_k2f_APP restart ---> 1 +20200513184900 ---> the connection_k2f_APP restart ---> 1 +20200513184910 ---> the connection_k2f_APP restart ---> 1 +20200513184920 ---> the connection_k2f_APP restart ---> 1 +20200513184930 ---> the connection_k2f_APP restart ---> 1 +20200513184940 ---> the connection_k2f_APP restart ---> 1 +20200513184950 ---> the connection_k2f_APP restart ---> 1 +20200513185000 ---> the connection_k2f_APP restart ---> 1 +20200513185010 ---> the connection_k2f_APP restart ---> 1 +20200513185020 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513185029 ---> the connection_k2f_APP restart ---> 1 +20200513185030 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513185036 ---> the connection_k2f_APP restart ---> 1 +20200513185040 ---> the connection_k2f_APP restart ---> 1 +20200513185050 ---> the connection_k2f_APP restart ---> 1 +20200513185100 ---> the connection_k2f_APP restart ---> 1 +20200513185110 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513185120 ---> the connection_k2f_APP restart ---> 1 +20200513185241 ---> the connection_k2f_APP restart ---> 1 +20200513185251 ---> the connection_k2f_APP restart ---> 1 +20200513185301 ---> the connection_k2f_APP restart ---> 1 +20200513185311 ---> the connection_k2f_APP restart ---> 1 +20200513185321 ---> the connection_k2f_APP restart ---> 1 +20200513185331 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513185405 ---> the connection_k2f_APP restart ---> 1 +20200513185611 ---> the connection_k2f_APP restart ---> 1 +20200513185621 ---> the connection_k2f_APP restart ---> 1 +20200513185701 ---> the connection_k2f_APP restart ---> 1 +20200513185741 ---> the connection_k2f_APP restart ---> 1 +20200513185752 ---> the connection_k2f_APP restart ---> 1 +20200513185802 ---> the connection_k2f_APP restart ---> 1 +20200513185812 ---> the connection_k2f_APP restart ---> 1 +20200513185822 ---> the connection_k2f_APP restart ---> 1 +20200513185832 ---> the connection_k2f_APP restart ---> 1 +20200513185922 ---> the connection_k2f_APP restart ---> 1 +20200513185932 ---> the connection_k2f_APP restart ---> 1 +20200513185942 ---> the connection_k2f_APP restart ---> 1 +20200513185952 ---> the connection_k2f_APP restart ---> 1 +20200513190002 ---> the connection_k2f_APP restart ---> 1 +20200513190012 ---> the connection_k2f_APP restart ---> 1 +20200513190022 ---> the connection_k2f_APP restart ---> 1 +20200513190032 ---> the connection_k2f_APP restart ---> 1 +20200513190152 ---> the connection_k2f_APP restart ---> 1 +20200513190202 ---> the connection_k2f_APP restart ---> 1 +20200513190212 ---> the connection_k2f_APP restart ---> 1 +20200513190222 ---> the connection_k2f_APP restart ---> 1 +20200513190232 ---> the connection_k2f_APP restart ---> 1 +20200513190242 ---> the connection_k2f_APP restart ---> 1 +20200513190252 ---> the connection_k2f_APP restart ---> 1 +20200513190302 ---> the connection_k2f_APP restart ---> 1 +20200513190312 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513190331 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513192014 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513192212 ---> the connection_k2f_APP restart ---> 1 diff --git a/external-flume/conf/connection/restart_log/restart_f2k.log b/external-flume/conf/connection/restart_log/restart_f2k.log new file mode 100644 index 0000000..9d5fe99 --- /dev/null +++ b/external-flume/conf/connection/restart_log/restart_f2k.log @@ -0,0 +1,213 @@ +############################################################## +20200509174727 ---> the f2k_APP restart ---> 0 +20200509174737 ---> the f2k_APP restart ---> 1 +############################################################## +############################################################## +############################################################## +############################################################## +############################################################## +20200511183420 ---> the f2k_APP restart ---> 1 +20200511183430 ---> the f2k_APP restart ---> 2 +############################################################## +20200511183639 ---> the f2k_APP restart ---> 1 +20200511183649 ---> the f2k_APP restart ---> 2 +20200511183659 ---> the f2k_APP restart ---> 3 +20200511183709 ---> the f2k_APP restart ---> 4 +20200511183719 ---> the f2k_APP restart ---> 5 +20200511183729 ---> the f2k_APP restart ---> 6 +20200511183739 ---> the f2k_APP restart ---> 7 +20200511183749 ---> the f2k_APP restart ---> 8 +20200511183759 ---> the f2k_APP restart ---> 9 +20200511183809 ---> the f2k_APP restart ---> 10 +20200511183819 ---> the f2k_APP restart ---> 11 +20200511183829 ---> the f2k_APP restart ---> 12 +20200511183839 ---> the f2k_APP restart ---> 13 +20200511183849 ---> the f2k_APP restart ---> 14 +20200511183859 ---> the f2k_APP restart ---> 15 +20200511183909 ---> the f2k_APP restart ---> 16 +20200511183919 ---> the f2k_APP restart ---> 17 +20200511183930 ---> the f2k_APP restart ---> 18 +20200511183940 ---> the f2k_APP restart ---> 19 +20200511183950 ---> the f2k_APP restart ---> 20 +20200511184000 ---> the f2k_APP restart ---> 21 +20200511184010 ---> the f2k_APP restart ---> 22 +20200511184020 ---> the f2k_APP restart ---> 23 +20200511184030 ---> the f2k_APP restart ---> 24 +20200511184040 ---> the f2k_APP restart ---> 25 +20200511184050 ---> the f2k_APP restart ---> 26 +20200511184100 ---> the f2k_APP restart ---> 27 +20200511184110 ---> the f2k_APP restart ---> 28 +20200511184120 ---> the f2k_APP restart ---> 29 +20200511184130 ---> the f2k_APP restart ---> 30 +20200511184140 ---> the f2k_APP restart ---> 31 +20200511184150 ---> the f2k_APP restart ---> 32 +20200511184200 ---> the f2k_APP restart ---> 33 +20200511184210 ---> the f2k_APP restart ---> 34 +20200511184220 ---> the f2k_APP restart ---> 35 +20200511184230 ---> the f2k_APP restart ---> 36 +20200511184240 ---> the f2k_APP restart ---> 37 +20200511184250 ---> the f2k_APP restart ---> 38 +20200511184300 ---> the f2k_APP restart ---> 39 +20200511184310 ---> the f2k_APP restart ---> 40 +20200511184321 ---> the f2k_APP restart ---> 41 +20200511184331 ---> the f2k_APP restart ---> 42 +20200511184341 ---> the f2k_APP restart ---> 43 +20200511184351 ---> the f2k_APP restart ---> 44 +20200511184401 ---> the f2k_APP restart ---> 45 +20200511184411 ---> the f2k_APP restart ---> 46 +20200511184421 ---> the f2k_APP restart ---> 47 +20200511184431 ---> the f2k_APP restart ---> 48 +20200511184441 ---> the f2k_APP restart ---> 49 +20200511184451 ---> the f2k_APP restart ---> 50 +20200511184501 ---> the f2k_APP restart ---> 51 +20200511184511 ---> the f2k_APP restart ---> 52 +20200511184521 ---> the f2k_APP restart ---> 53 +20200511184531 ---> the f2k_APP restart ---> 54 +20200511184541 ---> the f2k_APP restart ---> 55 +20200511184551 ---> the f2k_APP restart ---> 56 +20200511184601 ---> the f2k_APP restart ---> 57 +20200511184611 ---> the f2k_APP restart ---> 58 +20200511184621 ---> the f2k_APP restart ---> 59 +20200511184631 ---> the f2k_APP restart ---> 60 +20200511184641 ---> the f2k_APP restart ---> 61 +20200511184651 ---> the f2k_APP restart ---> 62 +20200511184701 ---> the f2k_APP restart ---> 63 +20200511184711 ---> the f2k_APP restart ---> 64 +20200511184721 ---> the f2k_APP restart ---> 65 +20200511184732 ---> the f2k_APP restart ---> 66 +20200511184742 ---> the f2k_APP restart ---> 67 +20200511184752 ---> the f2k_APP restart ---> 68 +20200511184802 ---> the f2k_APP restart ---> 69 +20200511184812 ---> the f2k_APP restart ---> 70 +20200511184822 ---> the f2k_APP restart ---> 71 +############################################################## +20200511185311 ---> the f2k_APP restart ---> 1 +############################################################## +20200511185421 ---> the f2k_APP restart ---> 1 +############################################################## +20200511185532 ---> the f2k_APP restart ---> 1 +20200511185542 ---> the f2k_APP restart ---> 1 +20200511185552 ---> the f2k_APP restart ---> 1 +############################################################## +20200511185706 ---> the f2k_APP restart ---> 1 +20200511185716 ---> the f2k_APP restart ---> 1+1 +20200511185726 ---> the f2k_APP restart ---> 2+1 +############################################################## +20200511185837 ---> the f2k_APP restart ---> 1 +############################################################## +20200511185938 ---> the f2k_APP restart ---> 1 +############################################################## +20200511190054 ---> the f2k_APP restart ---> 1 +20200511190104 ---> the f2k_APP restart ---> 1+1 +############################################################## +20200511190604 ---> the f2k_APP restart ---> 1 +20200511190614 ---> the f2k_APP restart ---> 0 +20200511190624 ---> the f2k_APP restart ---> 1 +20200511190634 ---> the f2k_APP restart ---> 0 +############################################################## +20200511190729 ---> the f2k_APP restart ---> 1 +20200511190739 ---> the f2k_APP restart ---> 1 +20200511190749 ---> the f2k_APP restart ---> 1 +############################################################## +20200511190950 ---> the f2k_APP restart ---> 1 +20200511191000 ---> the f2k_APP restart ---> 2 +20200511191010 ---> the f2k_APP restart ---> 1 +############################################################## +20200511191834 ---> the f2k_APP restart ---> 1 +20200511191844 ---> the f2k_APP restart ---> 2 +############################################################## +20200511192013 ---> the f2k_APP restart ---> 1 +############################################################## +20200511192134 ---> the f2k_APP restart ---> 1 +20200511192144 ---> the f2k_APP restart ---> 2 +20200511192154 ---> the f2k_APP restart ---> 1 +20200511192204 ---> the f2k_APP restart ---> 2 +############################################################## +############################################################## +############################################################## +############################################################## +20200511193630 ---> the f2k_APP restart ---> 1 +20200511193640 ---> the f2k_APP restart ---> 2 +############################################################## +20200512101249 ---> the f2k_APP restart ---> 1 +20200512101259 ---> the f2k_APP restart ---> 2 +20200512101309 ---> the f2k_APP restart ---> 1 +############################################################## +20200512101433 ---> the f2k_APP restart ---> 1 +############################################################## +20200512101537 ---> the f2k_APP restart ---> 1 +20200512101547 ---> the f2k_APP restart ---> 2 +20200512101557 ---> the f2k_APP restart ---> 3 +20200512101707 ---> the f2k_APP restart ---> 1 +############################################################## +20200512102643 ---> the f2k_APP restart ---> 1 +20200512102653 ---> the f2k_APP restart ---> 2 +############################################################## +20200512102723 ---> the f2k_APP restart ---> 1 +20200512102733 ---> the f2k_APP restart ---> 2 +20200512102743 ---> the f2k_APP restart ---> 3 +############################################################## +20200512102936 ---> the f2k_APP restart ---> 1 +20200512102946 ---> the f2k_APP restart ---> 2 +20200512102956 ---> the f2k_APP restart ---> 3 +############################################################## +20200512103250 ---> the f2k_APP restart ---> 1 +20200512103300 ---> the f2k_APP restart ---> 2 +20200512103310 ---> the f2k_APP restart ---> 3 +############################################################## +20200512113819 ---> the f2k_APP restart ---> 1 +20200512113829 ---> the f2k_APP restart ---> 2 +20200512113839 ---> the f2k_APP restart ---> 3 +############################################################## +############################################################## +20200512114211 ---> the f2k_APP restart ---> 1 +20200512114241 ---> the f2k_APP restart ---> 2 +############################################################## +20200512114550 ---> the f2k_APP restart ---> 1 +20200512114600 ---> the f2k_APP restart ---> 2 +20200512114610 ---> the f2k_APP restart ---> 3 +############################################################## +############################################################## +20200512115341 ---> the f2k_APP restart ---> 1 +20200512115351 ---> the f2k_APP restart ---> 2 +20200512115401 ---> the f2k_APP restart ---> 3 +20200512115452 ---> the f2k_APP restart ---> 1 +20200512115502 ---> the f2k_APP restart ---> 2 +20200512115512 ---> the f2k_APP restart ---> 3 +20200512115712 ---> the f2k_APP restart ---> 1 +20200512115722 ---> the f2k_APP restart ---> 2 +20200512115732 ---> the f2k_APP restart ---> 3 +############################################################## +20200512115933 ---> the f2k_APP restart ---> 1 +20200512115943 ---> the f2k_APP restart ---> 2 +20200512115953 ---> the f2k_APP restart ---> 3 +############################################################## +20200512134239 ---> the f2k_APP restart ---> 1 +20200512134249 ---> the f2k_APP restart ---> 2 +20200512134259 ---> the f2k_APP restart ---> 3 +20200512134509 ---> the f2k_APP restart ---> 1 +20200512134519 ---> the f2k_APP restart ---> 2 +20200512134530 ---> the f2k_APP restart ---> 3 +20200512140003 ---> the f2k_APP restart ---> 1 +20200512140013 ---> the f2k_APP restart ---> 2 +20200512141056 ---> the f2k_APP restart ---> 1 +20200512141106 ---> the f2k_APP restart ---> 2 +20200512141116 ---> the f2k_APP restart ---> 3 +20200512142500 ---> the f2k_APP restart ---> 1 +20200512142510 ---> the f2k_APP restart ---> 2 +20200512142941 ---> the f2k_APP restart ---> 1 +20200512142951 ---> the f2k_APP restart ---> 2 +20200512143001 ---> the f2k_APP restart ---> 3 +20200512143031 ---> the f2k_APP restart ---> 1 +20200512143041 ---> the f2k_APP restart ---> 2 +20200512143051 ---> the f2k_APP restart ---> 3 +20200512144224 ---> the f2k_APP restart ---> 1 +20200512144234 ---> the f2k_APP restart ---> 2 +20200512144314 ---> the f2k_APP restart ---> 1 +20200512144324 ---> the f2k_APP restart ---> 2 +20200512144355 ---> the f2k_APP restart ---> 1 +20200512144405 ---> the f2k_APP restart ---> 2 +20200512144415 ---> the f2k_APP restart ---> 3 +20200512144635 ---> the f2k_APP restart ---> 1 +20200512144645 ---> the f2k_APP restart ---> 2 +20200512144655 ---> the f2k_APP restart ---> 3 diff --git a/external-flume/conf/connection/restart_log/restart_security_f2k.log b/external-flume/conf/connection/restart_log/restart_security_f2k.log new file mode 100644 index 0000000..690385d --- /dev/null +++ b/external-flume/conf/connection/restart_log/restart_security_f2k.log @@ -0,0 +1,134 @@ +############################################################## +20200512180055 ---> the security_f2k_APP restart ---> 1 +20200512180105 ---> the security_f2k_APP restart ---> 2 +############################################################## +20200512180108 ---> the security_f2k_APP restart ---> 1 +20200512180115 ---> the security_f2k_APP restart ---> 3 +20200512180118 ---> the security_f2k_APP restart ---> 2 +20200512180125 ---> the security_f2k_APP restart ---> 1 +20200512180128 ---> the security_f2k_APP restart ---> 3 +20200512180135 ---> the security_f2k_APP restart ---> 2 +20200512180138 ---> the security_f2k_APP restart ---> 1 +20200512180145 ---> the security_f2k_APP restart ---> 3 +20200512180148 ---> the security_f2k_APP restart ---> 2 +20200512180155 ---> the security_f2k_APP restart ---> 1 +20200512180158 ---> the security_f2k_APP restart ---> 3 +20200512180206 ---> the security_f2k_APP restart ---> 2 +20200512180208 ---> the security_f2k_APP restart ---> 1 +20200512180216 ---> the security_f2k_APP restart ---> 3 +20200512180218 ---> the security_f2k_APP restart ---> 2 +20200512180226 ---> the security_f2k_APP restart ---> 1 +20200512180228 ---> the security_f2k_APP restart ---> 3 +20200512180236 ---> the security_f2k_APP restart ---> 2 +20200512180238 ---> the security_f2k_APP restart ---> 1 +20200512180246 ---> the security_f2k_APP restart ---> 3 +20200512180248 ---> the security_f2k_APP restart ---> 2 +20200512180256 ---> the security_f2k_APP restart ---> 1 +20200512180258 ---> the security_f2k_APP restart ---> 3 +############################################################## +20200512180513 ---> the security_f2k_APP restart ---> 1 +20200512180523 ---> the security_f2k_APP restart ---> 2 +20200512180533 ---> the security_f2k_APP restart ---> 3 +############################################################## +20200512180536 ---> the security_f2k_APP restart ---> 1 +20200512180546 ---> the security_f2k_APP restart ---> 2 +20200512180556 ---> the security_f2k_APP restart ---> 3 +20200512180626 ---> the security_f2k_APP restart ---> 1 +20200512180637 ---> the security_f2k_APP restart ---> 2 +20200512180707 ---> the security_f2k_APP restart ---> 1 +20200512180717 ---> the security_f2k_APP restart ---> 2 +20200512180747 ---> the security_f2k_APP restart ---> 1 +20200512180757 ---> the security_f2k_APP restart ---> 2 +20200512180827 ---> the security_f2k_APP restart ---> 1 +20200512180837 ---> the security_f2k_APP restart ---> 2 +20200512180907 ---> the security_f2k_APP restart ---> 1 +20200512180917 ---> the security_f2k_APP restart ---> 2 +20200512180947 ---> the security_f2k_APP restart ---> 1 +20200512180957 ---> the security_f2k_APP restart ---> 2 +20200512181028 ---> the security_f2k_APP restart ---> 1 +20200512181038 ---> the security_f2k_APP restart ---> 2 +20200512181108 ---> the security_f2k_APP restart ---> 1 +20200512181118 ---> the security_f2k_APP restart ---> 2 +20200512181148 ---> the security_f2k_APP restart ---> 1 +20200512181158 ---> the security_f2k_APP restart ---> 2 +20200512181228 ---> the security_f2k_APP restart ---> 1 +20200512181238 ---> the security_f2k_APP restart ---> 2 +20200512181308 ---> the security_f2k_APP restart ---> 1 +20200512181318 ---> the security_f2k_APP restart ---> 2 +20200512181348 ---> the security_f2k_APP restart ---> 1 +20200512181358 ---> the security_f2k_APP restart ---> 2 +20200512181429 ---> the security_f2k_APP restart ---> 1 +20200512181439 ---> the security_f2k_APP restart ---> 2 +20200512181449 ---> the security_f2k_APP restart ---> 3 +20200512181519 ---> the security_f2k_APP restart ---> 1 +20200512181529 ---> the security_f2k_APP restart ---> 2 +20200512181539 ---> the security_f2k_APP restart ---> 3 +20200512181609 ---> the security_f2k_APP restart ---> 1 +20200512181619 ---> the security_f2k_APP restart ---> 2 +20200512181649 ---> the security_f2k_APP restart ---> 1 +20200512181659 ---> the security_f2k_APP restart ---> 2 +20200512181729 ---> the security_f2k_APP restart ---> 1 +20200512181739 ---> the security_f2k_APP restart ---> 2 +20200512181809 ---> the security_f2k_APP restart ---> 1 +20200512181820 ---> the security_f2k_APP restart ---> 2 +20200512181850 ---> the security_f2k_APP restart ---> 1 +20200512181900 ---> the security_f2k_APP restart ---> 2 +20200512181910 ---> the security_f2k_APP restart ---> 3 +20200512181920 ---> the security_f2k_APP restart ---> 1 +20200512181930 ---> the security_f2k_APP restart ---> 2 +20200512181940 ---> the security_f2k_APP restart ---> 3 +20200512181950 ---> the security_f2k_APP restart ---> 1 +20200512182000 ---> the security_f2k_APP restart ---> 2 +20200512182010 ---> the security_f2k_APP restart ---> 3 +20200512182020 ---> the security_f2k_APP restart ---> 1 +20200512182030 ---> the security_f2k_APP restart ---> 2 +20200512182040 ---> the security_f2k_APP restart ---> 3 +20200512182050 ---> the security_f2k_APP restart ---> 1 +20200512182100 ---> the security_f2k_APP restart ---> 2 +20200512182110 ---> the security_f2k_APP restart ---> 3 +20200512182120 ---> the security_f2k_APP restart ---> 1 +20200512182130 ---> the security_f2k_APP restart ---> 2 +20200512182140 ---> the security_f2k_APP restart ---> 3 +20200512182150 ---> the security_f2k_APP restart ---> 1 +20200512182200 ---> the security_f2k_APP restart ---> 2 +20200512182211 ---> the security_f2k_APP restart ---> 3 +############################################################## +20200512183204 ---> the security_f2k_APP restart ---> 1 +20200512183215 ---> the security_f2k_APP restart ---> 2 +20200512183225 ---> the security_f2k_APP restart ---> 3 +20200512183255 ---> the security_f2k_APP restart ---> 1 +20200512183305 ---> the security_f2k_APP restart ---> 2 +20200512183335 ---> the security_f2k_APP restart ---> 1 +20200512183345 ---> the security_f2k_APP restart ---> 2 +20200512183415 ---> the security_f2k_APP restart ---> 1 +20200512183425 ---> the security_f2k_APP restart ---> 2 +20200512183455 ---> the security_f2k_APP restart ---> 1 +20200512183505 ---> the security_f2k_APP restart ---> 2 +20200512183535 ---> the security_f2k_APP restart ---> 1 +20200512183545 ---> the security_f2k_APP restart ---> 2 +20200512183615 ---> the security_f2k_APP restart ---> 1 +20200512183626 ---> the security_f2k_APP restart ---> 2 +20200512183656 ---> the security_f2k_APP restart ---> 1 +20200512183706 ---> the security_f2k_APP restart ---> 2 +20200512183716 ---> the security_f2k_APP restart ---> 3 +20200512183726 ---> the security_f2k_APP restart ---> 1 +20200512183736 ---> the security_f2k_APP restart ---> 2 +20200512183746 ---> the security_f2k_APP restart ---> 3 +20200512183756 ---> the security_f2k_APP restart ---> 1 +20200512183806 ---> the security_f2k_APP restart ---> 2 +20200512183836 ---> the security_f2k_APP restart ---> 1 +20200512183846 ---> the security_f2k_APP restart ---> 2 +20200512183916 ---> the security_f2k_APP restart ---> 1 +20200512183926 ---> the security_f2k_APP restart ---> 2 +20200512183936 ---> the security_f2k_APP restart ---> 3 +20200512184006 ---> the security_f2k_APP restart ---> 1 +20200512184016 ---> the security_f2k_APP restart ---> 2 +20200512184027 ---> the security_f2k_APP restart ---> 3 +20200512184037 ---> the security_f2k_APP restart ---> 1 +20200512184047 ---> the security_f2k_APP restart ---> 2 +20200512184057 ---> the security_f2k_APP restart ---> 3 +20200512184107 ---> the security_f2k_APP restart ---> 1 +20200512184117 ---> the security_f2k_APP restart ---> 2 +20200512184127 ---> the security_f2k_APP restart ---> 3 +20200512184157 ---> the security_f2k_APP restart ---> 1 +20200512184207 ---> the security_f2k_APP restart ---> 2 diff --git a/external-flume/conf/connection/start_flume.sh b/external-flume/conf/connection/start_flume.sh new file mode 100755 index 0000000..01dde8b --- /dev/null +++ b/external-flume/conf/connection/start_flume.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +BASE_DIR=$(cd $(dirname $0); pwd) +DAE_FILE=$(cd $(dirname $0); ls | grep dae*) +nohup ${BASE_DIR}/$DAE_FILE >/dev/null 2>&1 & diff --git a/external-flume/conf/connection/stop_flume.sh b/external-flume/conf/connection/stop_flume.sh new file mode 100755 index 0000000..e4417aa --- /dev/null +++ b/external-flume/conf/connection/stop_flume.sh @@ -0,0 +1,28 @@ +#!/bin/sh + +#DAE_NAME=dae_k2ha.sh +#JAR_NAME=k2ha.properties +BASE_DIR=$(cd $(dirname $0); pwd) + +DAE_NAME=`ls ${BASE_DIR} | grep ^dae_* | grep .sh$` +JAR_NAME=`ls ${BASE_DIR} | grep ^f2* | grep .properties$` + +NUM1=`ps -ef | grep ${DAE_NAME} | grep -v grep | wc -l` +pids1=$(ps -ef | grep ${DAE_NAME} | grep -v grep | awk '{print $2}') +if [ "${NUM1}" -ge "1" ];then + for pid1 in $pids1 + do + kill -9 $pid1 + echo 'killed '${DAE_NAME}' process-->'$pid1 + done +fi + +NUM2=`ps -ef | grep ${JAR_NAME} | grep -v grep | wc -l` +pids2=$(ps -ef | grep ${JAR_NAME} | grep -v grep | awk '{print $2}') +if [ "${NUM2}" -ge "1" ];then + for pid2 in $pids2 + do + kill -9 $pid2 + echo 'killed '${JAR_NAME}' process-->'$pid2 + done +fi diff --git a/external-flume/conf/proxy/clean_start_flume.sh b/external-flume/conf/proxy/clean_start_flume.sh new file mode 100755 index 0000000..ef811a5 --- /dev/null +++ b/external-flume/conf/proxy/clean_start_flume.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +BASE_DIR=$(cd $(dirname $0); pwd) +DAE_NAME=`ls ${BASE_DIR} | grep ^dae_* | grep .sh$` + + +#清除日志和checkpoint以及历史data +rm -rf ${BASE_DIR}/logs/* + +nohup ${BASE_DIR}/${DAE_NAME} $1 $2 >/dev/null 2>&1 & diff --git a/external-flume/conf/proxy/count_flume.sh b/external-flume/conf/proxy/count_flume.sh new file mode 100755 index 0000000..cc8452b --- /dev/null +++ b/external-flume/conf/proxy/count_flume.sh @@ -0,0 +1,15 @@ +#!/bin/sh + +BASE_DIR=$(cd $(dirname $0); pwd) + +JAR_NAME=`ls ${BASE_DIR} | grep ^k2* | grep .properties$` + +NUM1=`ps -ef | grep ${JAR_NAME} | grep -v grep | wc -l` +pids1=$(ps -ef | grep ${JAR_NAME} | grep -v grep | awk '{print $2}') +echo 'flume '${JAR_NAME}' total process-->'${NUM1} +if [ "${NUM1}" -ge "1" ];then + for pid1 in $pids1 + do + echo 'flume '${JAR_NAME}' process-->'$pid1 + done +fi diff --git a/external-flume/conf/proxy/dae_proxy.sh b/external-flume/conf/proxy/dae_proxy.sh new file mode 100755 index 0000000..5716c9d --- /dev/null +++ b/external-flume/conf/proxy/dae_proxy.sh @@ -0,0 +1,39 @@ +#!/bin/sh + +#JAR_NAME=ktk_ip_asn.properties +PROPERTIES_NAME=proxy_k2f +#flume进程名称 +FLUME_NAME=proxy +#flume根目录 +BASE_DIR=$(cd $(dirname $0); cd ../../; pwd) +#当前路径 +CONF_DIR=$(cd $(dirname $0); pwd) +#进程总数 +PROCESS_SUM=1 + +echo "##############################################################" >> ${CONF_DIR}/restart_log/restart_${PROPERTIES_NAME}.log +id=0 #配置文件后面的序号,无特殊作用 +flag=0 # flag为0表示初始化状态,为1表示完整启动成功所有进程,为2表示进程意外停止之后又重新杀死其余的进程并重新启动所有进程 +while true ; do + NUM=`ps -ef | grep ${PROPERTIES_NAME} | grep -v grep | grep -v dae |wc -l` + pids=$(ps -ef | grep ${PROPERTIES_NAME}\* | grep properties | awk '{print $2}') + time_stamp=$(date +%Y%m%d%H%M%S) + #大于设置进程数,杀掉所有进程,重启 + if [[ "${NUM}" -ne ${PROCESS_SUM} && $flag -eq "1" ]];then + for pid in $pids + do + kill -9 $pid + done + flag=2 + #如果正在运行的进程数小于定义的进程数,就启动 + elif [ "${NUM}" -lt ${PROCESS_SUM} ];then + id=$(( ( ($id) % $PROCESS_SUM ) + 1 )) + nohup ${BASE_DIR}/bin/flume-ng agent -n ${FLUME_NAME} -c ${CONF_DIR} -f ${CONF_DIR}/${PROPERTIES_NAME}.properties >/dev/null 2>&1 & + echo "${time_stamp} ---> the ${PROPERTIES_NAME}_APP restart ---> $id" >> ${CONF_DIR}/restart_log/restart_${PROPERTIES_NAME}.log + #进程数如果等于指定的进程数,那就设置flag为1 id为0 + elif [ "${NUM}" -eq ${PROCESS_SUM} ];then + flag=1 + id=0 + fi + sleep 10 +done diff --git a/external-flume/conf/proxy/flume-env.sh b/external-flume/conf/proxy/flume-env.sh new file mode 100755 index 0000000..63979aa --- /dev/null +++ b/external-flume/conf/proxy/flume-env.sh @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# If this file is placed at FLUME_CONF_DIR/flume-env.sh, it will be sourced +# during Flume startup. + +# Enviroment variables can be set here. + +export JAVA_HOME=/usr/lib/jvm/jdk1.8.0_73 + +# Give Flume more memory and pre-allocate, enable remote monitoring via JMX +#export JAVA_OPTS="-Xms1024m -Xmx3072m -Dcom.sun.management.jmxremote" +export JAVA_OPTS="-Xms512m -Xmx2048m -Xss256k -Xmn1g -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:-UseGCOverheadLimit -Dcom.sun.management.jmxremote" + +# Let Flume write raw event data and configuration information to its log files for debugging +# purposes. Enabling these flags is not recommended in production, +# as it may result in logging sensitive user information or encryption secrets. +# export JAVA_OPTS="$JAVA_OPTS -Dorg.apache.flume.log.rawdata=true -Dorg.apache.flume.log.printconfig=true " + +# Note that the Flume conf directory is always included in the classpath. +#FLUME_CLASSPATH="" + diff --git a/external-flume/conf/proxy/log4j.properties b/external-flume/conf/proxy/log4j.properties new file mode 100644 index 0000000..c948bff --- /dev/null +++ b/external-flume/conf/proxy/log4j.properties @@ -0,0 +1,68 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Define some default values that can be overridden by system properties. +# +# For testing, it may also be convenient to specify +# -Dflume.root.logger=DEBUG,console when launching flume. + +#flume.root.logger=DEBUG,console +flume.root.logger=INFO,LOGFILE +flume.log.dir=./logs +flume.log.file=flume.log + +log4j.logger.org.apache.flume.lifecycle = INFO +log4j.logger.org.jboss = WARN +log4j.logger.org.mortbay = INFO +log4j.logger.org.apache.avro.ipc.NettyTransceiver = WARN +log4j.logger.org.apache.hadoop = INFO +log4j.logger.org.apache.hadoop.hive = ERROR + +# Define the root logger to the system property "flume.root.logger". +log4j.rootLogger=${flume.root.logger} + + +# Stock log4j rolling file appender +# Default log rotation configuration +log4j.appender.LOGFILE=org.apache.log4j.RollingFileAppender +log4j.appender.LOGFILE.MaxFileSize=100MB +log4j.appender.LOGFILE.MaxBackupIndex=10 +log4j.appender.LOGFILE.File=${flume.log.dir}/${flume.log.file} +log4j.appender.LOGFILE.layout=org.apache.log4j.PatternLayout +log4j.appender.LOGFILE.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# Warning: If you enable the following appender it will fill up your disk if you don't have a cleanup job! +# This uses the updated rolling file appender from log4j-extras that supports a reliable time-based rolling policy. +# See http://logging.apache.org/log4j/companions/extras/apidocs/org/apache/log4j/rolling/TimeBasedRollingPolicy.html +# Add "DAILY" to flume.root.logger above if you want to use this +log4j.appender.DAILY=org.apache.log4j.rolling.RollingFileAppender +log4j.appender.DAILY.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy +log4j.appender.DAILY.rollingPolicy.ActiveFileName=${flume.log.dir}/${flume.log.file} +log4j.appender.DAILY.rollingPolicy.FileNamePattern=${flume.log.dir}/${flume.log.file}.%d{yyyy-MM-dd} +log4j.appender.DAILY.layout=org.apache.log4j.PatternLayout +log4j.appender.DAILY.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# console +# Add "console" to flume.root.logger above if you want to use this +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d (%t) [%p - %l] %m%n diff --git a/external-flume/conf/proxy/pro/2f2k.properties b/external-flume/conf/proxy/pro/2f2k.properties new file mode 100644 index 0000000..5e5c573 --- /dev/null +++ b/external-flume/conf/proxy/pro/2f2k.properties @@ -0,0 +1,24 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/2taildir_position.json +a2.sources.s2.filegroups = f2 +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9092 +a2.channels.c2.zookeeperConnect=192.168.40.222:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/proxy/pro/4f2k.properties b/external-flume/conf/proxy/pro/4f2k.properties new file mode 100644 index 0000000..bf0408b --- /dev/null +++ b/external-flume/conf/proxy/pro/4f2k.properties @@ -0,0 +1,58 @@ +a2.sources = s2 +a2.channels = c2 c3 c4 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.positionFile = /home/test/4taildir_position.json +a2.sources.s2.filegroups = f3 +a2.sources.s2.filegroups.f3 = /home/test/recv/.*dat +#a2.sources.s2.headers.f3.headerKey1 = channel2 +#a2.sources.s2.filegroups.f4 = /home/test/log/.*dat +#a2.sources.s2.headers.f4.headerKey1 = channel3 +a2.sources.s2.maxBatchCount = 500 +#a2.sources.s2.selector.type = multiplexing + +#channel selector +#a2.sources.s2.selector.header = headerKey1 +#a2.sources.s2.selector.mapping.channel2=c2 +#a2.sources.s2.selector.mapping.channel3=c3 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=200000 +a2.channels.c2.batch.num.messages=5000 + +#第二个channel +a2.channels.c3.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c3.brokerList = 192.168.40.222:9093 +a2.channels.c3.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c3.parseAsFlumeEvent = false +a2.channels.c3.kafka.producer.acks = 1 +a2.channels.c3.producer.type=async +a2.channels.c3.queue.buffering.max.ms = 5000 +a2.channels.c3.queue.buffering.max.messages=20000 +a2.channels.c3.batch.num.messages=500 + + + +#第三个channel +a2.channels.c4.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c4.brokerList = 192.168.40.222:9093 +a2.channels.c4.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c4.parseAsFlumeEvent = false +a2.channels.c4.kafka.producer.acks = 1 +a2.channels.c4.producer.type=async +a2.channels.c4.queue.buffering.max.ms = 5000 +a2.channels.c4.queue.buffering.max.messages=20000 +a2.channels.c4.batch.num.messages=500 + diff --git a/external-flume/conf/proxy/pro/f2k.properties b/external-flume/conf/proxy/pro/f2k.properties new file mode 100644 index 0000000..7047e42 --- /dev/null +++ b/external-flume/conf/proxy/pro/f2k.properties @@ -0,0 +1,24 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/taildir_position.json +a2.sources.s2.filegroups = f0 +a2.sources.s2.filegroups.f0 = /home/test/log/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9092 +a2.channels.c2.zookeeperConnect=192.168.40.222:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/proxy/pro/f2k1.properties b/external-flume/conf/proxy/pro/f2k1.properties new file mode 100644 index 0000000..2dadced --- /dev/null +++ b/external-flume/conf/proxy/pro/f2k1.properties @@ -0,0 +1,23 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/1taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*[0-2].dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/proxy/pro/f2k2.properties b/external-flume/conf/proxy/pro/f2k2.properties new file mode 100644 index 0000000..e17c3e2 --- /dev/null +++ b/external-flume/conf/proxy/pro/f2k2.properties @@ -0,0 +1,23 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/2taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*[3-4].dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/proxy/pro/f2k3.properties b/external-flume/conf/proxy/pro/f2k3.properties new file mode 100644 index 0000000..2dadced --- /dev/null +++ b/external-flume/conf/proxy/pro/f2k3.properties @@ -0,0 +1,23 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/1taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*[0-2].dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/proxy/pro/file.properties b/external-flume/conf/proxy/pro/file.properties new file mode 100644 index 0000000..f6813e7 --- /dev/null +++ b/external-flume/conf/proxy/pro/file.properties @@ -0,0 +1,29 @@ +#为source channel sink起名 +a1.sources = s1 +a1.channels = c1 +a1.sinks = k1 +#指定source收集到的数据发送到哪个管道 +a1.sources.s1.channels = c1 +#指定source数据收集策略 +a1.sources.s1.type = org.apache.flume.source.kafka.KafkaSource +a1.sources.s1.batchSize = 3000 +a1.sources.s1.batchDurationMillis = 100 +a1.sources.s1.kafka.bootstrap.servers = 192.168.40.203:9092 +a1.sources.s1.kafka.topics = test +a1.sources.s1.kafka.consumer.group.id = lxk_0429 + +#指定channel为memory,即表示所有的数据都装进memory当中 +a1.channels.c1.type = memory +a1.channels.c1.capacity = 2000000 +a1.channels.c1.transactionCapacity = 30000 +a1.channels.c1.byteCapacityBufferPercentage = 40 +a1.channels.c1.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 30 +a1.sinks.k1.sink.directory = /home/test/log2 diff --git a/external-flume/conf/proxy/pro/file1.properties b/external-flume/conf/proxy/pro/file1.properties new file mode 100644 index 0000000..17dac7f --- /dev/null +++ b/external-flume/conf/proxy/pro/file1.properties @@ -0,0 +1,29 @@ +#为source channel sink起名 +a1.sources = s1 +a1.channels = c1 +a1.sinks = k1 +#指定source收集到的数据发送到哪个管道 +a1.sources.s1.channels = c1 +#指定source数据收集策略 +a1.sources.s1.type = org.apache.flume.source.kafka.KafkaSource +a1.sources.s1.batchSize = 3000 +a1.sources.s1.batchDurationMillis = 100 +a1.sources.s1.kafka.bootstrap.servers = 192.168.40.152:9092 +a1.sources.s1.kafka.topics = CONNECTION-RECORD-LOG +a1.sources.s1.kafka.consumer.group.id = source_0514 + +#指定channel为memory,即表示所有的数据都装进memory当中 +a1.channels.c1.type = memory +a1.channels.c1.capacity = 2000000 +a1.channels.c1.transactionCapacity = 30000 +a1.channels.c1.byteCapacityBufferPercentage = 40 +a1.channels.c1.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 30 +a1.sinks.k1.sink.directory = /home/192.168.60.102/CONNECTION-RECORD-LOG diff --git a/external-flume/conf/proxy/pro/file2.properties b/external-flume/conf/proxy/pro/file2.properties new file mode 100644 index 0000000..f6813e7 --- /dev/null +++ b/external-flume/conf/proxy/pro/file2.properties @@ -0,0 +1,29 @@ +#为source channel sink起名 +a1.sources = s1 +a1.channels = c1 +a1.sinks = k1 +#指定source收集到的数据发送到哪个管道 +a1.sources.s1.channels = c1 +#指定source数据收集策略 +a1.sources.s1.type = org.apache.flume.source.kafka.KafkaSource +a1.sources.s1.batchSize = 3000 +a1.sources.s1.batchDurationMillis = 100 +a1.sources.s1.kafka.bootstrap.servers = 192.168.40.203:9092 +a1.sources.s1.kafka.topics = test +a1.sources.s1.kafka.consumer.group.id = lxk_0429 + +#指定channel为memory,即表示所有的数据都装进memory当中 +a1.channels.c1.type = memory +a1.channels.c1.capacity = 2000000 +a1.channels.c1.transactionCapacity = 30000 +a1.channels.c1.byteCapacityBufferPercentage = 40 +a1.channels.c1.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 30 +a1.sinks.k1.sink.directory = /home/test/log2 diff --git a/external-flume/conf/proxy/pro/k2f.properties b/external-flume/conf/proxy/pro/k2f.properties new file mode 100644 index 0000000..35d7d68 --- /dev/null +++ b/external-flume/conf/proxy/pro/k2f.properties @@ -0,0 +1,22 @@ +#为source channel sink起名 +a1.channels = c1 +a1.sinks = k1 +#指定channel为kafka channel,省略source使得效率更高 +a1.channels.c1.type = org.apache.flume.channel.kafka.KafkaChannel +a1.channels.c1.kafka.bootstrap.servers = 192.168.40.119:9092 +a1.channels.c1.kafka.topic = test +a1.channels.c1.kafka.consumer.group.id = lxk_0509 +a1.channels.c1.kafka.consumer.auto.offset.reset = latest +a1.channels.c1.kafka.consumer.enable.auto.commit = true +a1.channels.c1.kafka.consumer.fetch.max.wait.ms = 1000 +a1.channels.c1.kafka.consumer.fetch.min.bytes = 10485760 +a1.channels.c1.parseAsFlumeEvent = false + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 60 +a1.sinks.k1.sink.directory = /home/test/log diff --git a/external-flume/conf/proxy/pro/kafka.properties b/external-flume/conf/proxy/pro/kafka.properties new file mode 100644 index 0000000..da560b2 --- /dev/null +++ b/external-flume/conf/proxy/pro/kafka.properties @@ -0,0 +1,25 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*dat +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.203:9092 +a2.channels.c2.zookeeperConnect=192.168.40.203:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/proxy/pro/kafka1.properties b/external-flume/conf/proxy/pro/kafka1.properties new file mode 100644 index 0000000..ed30b16 --- /dev/null +++ b/external-flume/conf/proxy/pro/kafka1.properties @@ -0,0 +1,25 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /var/taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log1/.*dat +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.203:9092 +a2.channels.c2.zookeeperConnect=192.168.40.203:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/proxy/pro/kafka2.properties b/external-flume/conf/proxy/pro/kafka2.properties new file mode 100644 index 0000000..6040a05 --- /dev/null +++ b/external-flume/conf/proxy/pro/kafka2.properties @@ -0,0 +1,25 @@ +a3.sources = s2 +a3.channels = c2 +#指定source收集到的数据发送到哪个管道 +a3.sources.s2.channels = c2 +#taildir source +a3.sources.s2.type = TAILDIR +a3.sources.s2.channels = c2 +a3.sources.s2.positionFile = /var/taildir2_position.json +a3.sources.s2.filegroups = f2 +a3.sources.s2.filegroups.f1 = /home/test/log1/.*dat +a3.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a3.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a3.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a3.channels.c2.brokerList = 192.168.40.203:9092 +a3.channels.c2.zookeeperConnect=192.168.40.203:2181/kafka +a3.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a3.channels.c2.parseAsFlumeEvent = false +a3.channels.c2.kafka.producer.acks = 1 +a3.channels.c2.producer.type=async +a3.channels.c2.queue.buffering.max.ms = 5000 +a3.channels.c2.queue.buffering.max.messages=20000 +a3.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/proxy/pro/log4j.properties b/external-flume/conf/proxy/pro/log4j.properties new file mode 100644 index 0000000..c948bff --- /dev/null +++ b/external-flume/conf/proxy/pro/log4j.properties @@ -0,0 +1,68 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Define some default values that can be overridden by system properties. +# +# For testing, it may also be convenient to specify +# -Dflume.root.logger=DEBUG,console when launching flume. + +#flume.root.logger=DEBUG,console +flume.root.logger=INFO,LOGFILE +flume.log.dir=./logs +flume.log.file=flume.log + +log4j.logger.org.apache.flume.lifecycle = INFO +log4j.logger.org.jboss = WARN +log4j.logger.org.mortbay = INFO +log4j.logger.org.apache.avro.ipc.NettyTransceiver = WARN +log4j.logger.org.apache.hadoop = INFO +log4j.logger.org.apache.hadoop.hive = ERROR + +# Define the root logger to the system property "flume.root.logger". +log4j.rootLogger=${flume.root.logger} + + +# Stock log4j rolling file appender +# Default log rotation configuration +log4j.appender.LOGFILE=org.apache.log4j.RollingFileAppender +log4j.appender.LOGFILE.MaxFileSize=100MB +log4j.appender.LOGFILE.MaxBackupIndex=10 +log4j.appender.LOGFILE.File=${flume.log.dir}/${flume.log.file} +log4j.appender.LOGFILE.layout=org.apache.log4j.PatternLayout +log4j.appender.LOGFILE.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# Warning: If you enable the following appender it will fill up your disk if you don't have a cleanup job! +# This uses the updated rolling file appender from log4j-extras that supports a reliable time-based rolling policy. +# See http://logging.apache.org/log4j/companions/extras/apidocs/org/apache/log4j/rolling/TimeBasedRollingPolicy.html +# Add "DAILY" to flume.root.logger above if you want to use this +log4j.appender.DAILY=org.apache.log4j.rolling.RollingFileAppender +log4j.appender.DAILY.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy +log4j.appender.DAILY.rollingPolicy.ActiveFileName=${flume.log.dir}/${flume.log.file} +log4j.appender.DAILY.rollingPolicy.FileNamePattern=${flume.log.dir}/${flume.log.file}.%d{yyyy-MM-dd} +log4j.appender.DAILY.layout=org.apache.log4j.PatternLayout +log4j.appender.DAILY.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# console +# Add "console" to flume.root.logger above if you want to use this +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d (%t) [%p - %l] %m%n diff --git a/external-flume/conf/proxy/pro/tail.properties b/external-flume/conf/proxy/pro/tail.properties new file mode 100644 index 0000000..c677318 --- /dev/null +++ b/external-flume/conf/proxy/pro/tail.properties @@ -0,0 +1,34 @@ +a2.sources = s2 +a2.channels = c2 +a2.sinks = k2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 + +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/taildir_position.json +a2.sources.s2.filegroups = f1 f2 +a2.sources.s2.filegroups.f1 = /home/test/log1/.*dat +a2.sources.s2.headers.f1.headerKey1 = value1 +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.headers.f2.headerKey1 = value2 +a2.sources.s2.headers.f2.headerKey2 = value2-2 +a2.sources.s2.fileHeader = true +a2.sources.s2.maxBatchCount = 1000 + + +#指定channel为memory,即表示所有的数据都装进memory当中 +a2.channels.c2.type = memory +a2.channels.c2.capacity = 2000000 +a2.channels.c2.transactionCapacity = 30000 +a2.channels.c2.byteCapacityBufferPercentage = 40 +a2.channels.c2.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a2.sinks.k2.type = file_roll +a2.sinks.k2.channel = c2 +a2.sinks.k2.sink.pathManager = default +a2.sinks.k2.sink.pathManager.extension = dat +a2.sinks.k2.sink.pathManager.prefix = recv- +a2.sinks.k2.sink.rollInterval = 60 +a2.sinks.k2.sink.directory = /home/test/recv1 diff --git a/external-flume/conf/proxy/proxy_k2f.properties b/external-flume/conf/proxy/proxy_k2f.properties new file mode 100644 index 0000000..155b433 --- /dev/null +++ b/external-flume/conf/proxy/proxy_k2f.properties @@ -0,0 +1,22 @@ +#为source channel sink起名 +proxy.channels = c1 +proxy.sinks = k1 +#指定channel为kafka channel,省略source使得效率更高 +proxy.channels.c1.type = org.apache.flume.channel.kafka.KafkaChannel +proxy.channels.c1.kafka.bootstrap.servers = 192.168.40.152:9092 +proxy.channels.c1.kafka.topic = PROXY-EVENT-LOG +proxy.channels.c1.kafka.consumer.group.id = lxk_0514 +proxy.channels.c1.kafka.consumer.auto.offset.reset = latest +proxy.channels.c1.kafka.consumer.enable.auto.commit = true +proxy.channels.c1.kafka.consumer.fetch.max.wait.ms = 1000 +proxy.channels.c1.kafka.consumer.fetch.min.bytes = 1048576 +proxy.channels.c1.parseAsFlumeEvent = false + +#原始file roll sink,将数据写到本机磁盘 +proxy.sinks.k1.type = file_roll +proxy.sinks.k1.channel = c1 +proxy.sinks.k1.sink.pathManager = default +proxy.sinks.k1.sink.pathManager.extension = dat +proxy.sinks.k1.sink.pathManager.prefix = test- +proxy.sinks.k1.sink.rollInterval = 30 +proxy.sinks.k1.sink.directory = /home/192.168.60.101/PROXY-EVENT-LOG diff --git a/external-flume/conf/proxy/start_flume.sh b/external-flume/conf/proxy/start_flume.sh new file mode 100755 index 0000000..01dde8b --- /dev/null +++ b/external-flume/conf/proxy/start_flume.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +BASE_DIR=$(cd $(dirname $0); pwd) +DAE_FILE=$(cd $(dirname $0); ls | grep dae*) +nohup ${BASE_DIR}/$DAE_FILE >/dev/null 2>&1 & diff --git a/external-flume/conf/proxy/stop_flume.sh b/external-flume/conf/proxy/stop_flume.sh new file mode 100755 index 0000000..e4417aa --- /dev/null +++ b/external-flume/conf/proxy/stop_flume.sh @@ -0,0 +1,28 @@ +#!/bin/sh + +#DAE_NAME=dae_k2ha.sh +#JAR_NAME=k2ha.properties +BASE_DIR=$(cd $(dirname $0); pwd) + +DAE_NAME=`ls ${BASE_DIR} | grep ^dae_* | grep .sh$` +JAR_NAME=`ls ${BASE_DIR} | grep ^f2* | grep .properties$` + +NUM1=`ps -ef | grep ${DAE_NAME} | grep -v grep | wc -l` +pids1=$(ps -ef | grep ${DAE_NAME} | grep -v grep | awk '{print $2}') +if [ "${NUM1}" -ge "1" ];then + for pid1 in $pids1 + do + kill -9 $pid1 + echo 'killed '${DAE_NAME}' process-->'$pid1 + done +fi + +NUM2=`ps -ef | grep ${JAR_NAME} | grep -v grep | wc -l` +pids2=$(ps -ef | grep ${JAR_NAME} | grep -v grep | awk '{print $2}') +if [ "${NUM2}" -ge "1" ];then + for pid2 in $pids2 + do + kill -9 $pid2 + echo 'killed '${JAR_NAME}' process-->'$pid2 + done +fi diff --git a/external-flume/conf/radius/clean_start_flume.sh b/external-flume/conf/radius/clean_start_flume.sh new file mode 100755 index 0000000..ef811a5 --- /dev/null +++ b/external-flume/conf/radius/clean_start_flume.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +BASE_DIR=$(cd $(dirname $0); pwd) +DAE_NAME=`ls ${BASE_DIR} | grep ^dae_* | grep .sh$` + + +#清除日志和checkpoint以及历史data +rm -rf ${BASE_DIR}/logs/* + +nohup ${BASE_DIR}/${DAE_NAME} $1 $2 >/dev/null 2>&1 & diff --git a/external-flume/conf/radius/count_flume.sh b/external-flume/conf/radius/count_flume.sh new file mode 100755 index 0000000..cc8452b --- /dev/null +++ b/external-flume/conf/radius/count_flume.sh @@ -0,0 +1,15 @@ +#!/bin/sh + +BASE_DIR=$(cd $(dirname $0); pwd) + +JAR_NAME=`ls ${BASE_DIR} | grep ^k2* | grep .properties$` + +NUM1=`ps -ef | grep ${JAR_NAME} | grep -v grep | wc -l` +pids1=$(ps -ef | grep ${JAR_NAME} | grep -v grep | awk '{print $2}') +echo 'flume '${JAR_NAME}' total process-->'${NUM1} +if [ "${NUM1}" -ge "1" ];then + for pid1 in $pids1 + do + echo 'flume '${JAR_NAME}' process-->'$pid1 + done +fi diff --git a/external-flume/conf/radius/dae_radius.sh b/external-flume/conf/radius/dae_radius.sh new file mode 100755 index 0000000..04563ce --- /dev/null +++ b/external-flume/conf/radius/dae_radius.sh @@ -0,0 +1,39 @@ +#!/bin/sh + +#JAR_NAME=ktk_ip_asn.properties +PROPERTIES_NAME=radius_k2f +#flume进程名称 +FLUME_NAME=radius +#flume根目录 +BASE_DIR=$(cd $(dirname $0); cd ../../; pwd) +#当前路径 +CONF_DIR=$(cd $(dirname $0); pwd) +#进程总数 +PROCESS_SUM=1 + +echo "##############################################################" >> ${CONF_DIR}/restart_log/restart_${PROPERTIES_NAME}.log +id=0 #配置文件后面的序号,无特殊作用 +flag=0 # flag为0表示初始化状态,为1表示完整启动成功所有进程,为2表示进程意外停止之后又重新杀死其余的进程并重新启动所有进程 +while true ; do + NUM=`ps -ef | grep ${PROPERTIES_NAME} | grep -v grep | grep -v dae |wc -l` + pids=$(ps -ef | grep ${PROPERTIES_NAME}\* | grep properties | awk '{print $2}') + time_stamp=$(date +%Y%m%d%H%M%S) + #大于设置进程数,杀掉所有进程,重启 + if [[ "${NUM}" -ne ${PROCESS_SUM} && $flag -eq "1" ]];then + for pid in $pids + do + kill -9 $pid + done + flag=2 + #如果正在运行的进程数小于定义的进程数,就启动 + elif [ "${NUM}" -lt ${PROCESS_SUM} ];then + id=$(( ( ($id) % $PROCESS_SUM ) + 1 )) + nohup ${BASE_DIR}/bin/flume-ng agent -n ${FLUME_NAME} -c ${CONF_DIR} -f ${CONF_DIR}/${PROPERTIES_NAME}.properties >/dev/null 2>&1 & + echo "${time_stamp} ---> the ${PROPERTIES_NAME}_APP restart ---> $id" >> ${CONF_DIR}/restart_log/restart_${PROPERTIES_NAME}.log + #进程数如果等于指定的进程数,那就设置flag为1 id为0 + elif [ "${NUM}" -eq ${PROCESS_SUM} ];then + flag=1 + id=0 + fi + sleep 10 +done diff --git a/external-flume/conf/radius/flume-env.sh b/external-flume/conf/radius/flume-env.sh new file mode 100755 index 0000000..63979aa --- /dev/null +++ b/external-flume/conf/radius/flume-env.sh @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# If this file is placed at FLUME_CONF_DIR/flume-env.sh, it will be sourced +# during Flume startup. + +# Enviroment variables can be set here. + +export JAVA_HOME=/usr/lib/jvm/jdk1.8.0_73 + +# Give Flume more memory and pre-allocate, enable remote monitoring via JMX +#export JAVA_OPTS="-Xms1024m -Xmx3072m -Dcom.sun.management.jmxremote" +export JAVA_OPTS="-Xms512m -Xmx2048m -Xss256k -Xmn1g -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:-UseGCOverheadLimit -Dcom.sun.management.jmxremote" + +# Let Flume write raw event data and configuration information to its log files for debugging +# purposes. Enabling these flags is not recommended in production, +# as it may result in logging sensitive user information or encryption secrets. +# export JAVA_OPTS="$JAVA_OPTS -Dorg.apache.flume.log.rawdata=true -Dorg.apache.flume.log.printconfig=true " + +# Note that the Flume conf directory is always included in the classpath. +#FLUME_CLASSPATH="" + diff --git a/external-flume/conf/radius/log4j.properties b/external-flume/conf/radius/log4j.properties new file mode 100644 index 0000000..c948bff --- /dev/null +++ b/external-flume/conf/radius/log4j.properties @@ -0,0 +1,68 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Define some default values that can be overridden by system properties. +# +# For testing, it may also be convenient to specify +# -Dflume.root.logger=DEBUG,console when launching flume. + +#flume.root.logger=DEBUG,console +flume.root.logger=INFO,LOGFILE +flume.log.dir=./logs +flume.log.file=flume.log + +log4j.logger.org.apache.flume.lifecycle = INFO +log4j.logger.org.jboss = WARN +log4j.logger.org.mortbay = INFO +log4j.logger.org.apache.avro.ipc.NettyTransceiver = WARN +log4j.logger.org.apache.hadoop = INFO +log4j.logger.org.apache.hadoop.hive = ERROR + +# Define the root logger to the system property "flume.root.logger". +log4j.rootLogger=${flume.root.logger} + + +# Stock log4j rolling file appender +# Default log rotation configuration +log4j.appender.LOGFILE=org.apache.log4j.RollingFileAppender +log4j.appender.LOGFILE.MaxFileSize=100MB +log4j.appender.LOGFILE.MaxBackupIndex=10 +log4j.appender.LOGFILE.File=${flume.log.dir}/${flume.log.file} +log4j.appender.LOGFILE.layout=org.apache.log4j.PatternLayout +log4j.appender.LOGFILE.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# Warning: If you enable the following appender it will fill up your disk if you don't have a cleanup job! +# This uses the updated rolling file appender from log4j-extras that supports a reliable time-based rolling policy. +# See http://logging.apache.org/log4j/companions/extras/apidocs/org/apache/log4j/rolling/TimeBasedRollingPolicy.html +# Add "DAILY" to flume.root.logger above if you want to use this +log4j.appender.DAILY=org.apache.log4j.rolling.RollingFileAppender +log4j.appender.DAILY.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy +log4j.appender.DAILY.rollingPolicy.ActiveFileName=${flume.log.dir}/${flume.log.file} +log4j.appender.DAILY.rollingPolicy.FileNamePattern=${flume.log.dir}/${flume.log.file}.%d{yyyy-MM-dd} +log4j.appender.DAILY.layout=org.apache.log4j.PatternLayout +log4j.appender.DAILY.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# console +# Add "console" to flume.root.logger above if you want to use this +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d (%t) [%p - %l] %m%n diff --git a/external-flume/conf/radius/pro/2f2k.properties b/external-flume/conf/radius/pro/2f2k.properties new file mode 100644 index 0000000..5e5c573 --- /dev/null +++ b/external-flume/conf/radius/pro/2f2k.properties @@ -0,0 +1,24 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/2taildir_position.json +a2.sources.s2.filegroups = f2 +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9092 +a2.channels.c2.zookeeperConnect=192.168.40.222:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/radius/pro/4f2k.properties b/external-flume/conf/radius/pro/4f2k.properties new file mode 100644 index 0000000..bf0408b --- /dev/null +++ b/external-flume/conf/radius/pro/4f2k.properties @@ -0,0 +1,58 @@ +a2.sources = s2 +a2.channels = c2 c3 c4 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.positionFile = /home/test/4taildir_position.json +a2.sources.s2.filegroups = f3 +a2.sources.s2.filegroups.f3 = /home/test/recv/.*dat +#a2.sources.s2.headers.f3.headerKey1 = channel2 +#a2.sources.s2.filegroups.f4 = /home/test/log/.*dat +#a2.sources.s2.headers.f4.headerKey1 = channel3 +a2.sources.s2.maxBatchCount = 500 +#a2.sources.s2.selector.type = multiplexing + +#channel selector +#a2.sources.s2.selector.header = headerKey1 +#a2.sources.s2.selector.mapping.channel2=c2 +#a2.sources.s2.selector.mapping.channel3=c3 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=200000 +a2.channels.c2.batch.num.messages=5000 + +#第二个channel +a2.channels.c3.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c3.brokerList = 192.168.40.222:9093 +a2.channels.c3.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c3.parseAsFlumeEvent = false +a2.channels.c3.kafka.producer.acks = 1 +a2.channels.c3.producer.type=async +a2.channels.c3.queue.buffering.max.ms = 5000 +a2.channels.c3.queue.buffering.max.messages=20000 +a2.channels.c3.batch.num.messages=500 + + + +#第三个channel +a2.channels.c4.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c4.brokerList = 192.168.40.222:9093 +a2.channels.c4.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c4.parseAsFlumeEvent = false +a2.channels.c4.kafka.producer.acks = 1 +a2.channels.c4.producer.type=async +a2.channels.c4.queue.buffering.max.ms = 5000 +a2.channels.c4.queue.buffering.max.messages=20000 +a2.channels.c4.batch.num.messages=500 + diff --git a/external-flume/conf/radius/pro/f2k.properties b/external-flume/conf/radius/pro/f2k.properties new file mode 100644 index 0000000..7047e42 --- /dev/null +++ b/external-flume/conf/radius/pro/f2k.properties @@ -0,0 +1,24 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/taildir_position.json +a2.sources.s2.filegroups = f0 +a2.sources.s2.filegroups.f0 = /home/test/log/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9092 +a2.channels.c2.zookeeperConnect=192.168.40.222:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/radius/pro/f2k1.properties b/external-flume/conf/radius/pro/f2k1.properties new file mode 100644 index 0000000..2dadced --- /dev/null +++ b/external-flume/conf/radius/pro/f2k1.properties @@ -0,0 +1,23 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/1taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*[0-2].dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/radius/pro/f2k2.properties b/external-flume/conf/radius/pro/f2k2.properties new file mode 100644 index 0000000..e17c3e2 --- /dev/null +++ b/external-flume/conf/radius/pro/f2k2.properties @@ -0,0 +1,23 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/2taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*[3-4].dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/radius/pro/f2k3.properties b/external-flume/conf/radius/pro/f2k3.properties new file mode 100644 index 0000000..2dadced --- /dev/null +++ b/external-flume/conf/radius/pro/f2k3.properties @@ -0,0 +1,23 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/1taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*[0-2].dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/radius/pro/file.properties b/external-flume/conf/radius/pro/file.properties new file mode 100644 index 0000000..f6813e7 --- /dev/null +++ b/external-flume/conf/radius/pro/file.properties @@ -0,0 +1,29 @@ +#为source channel sink起名 +a1.sources = s1 +a1.channels = c1 +a1.sinks = k1 +#指定source收集到的数据发送到哪个管道 +a1.sources.s1.channels = c1 +#指定source数据收集策略 +a1.sources.s1.type = org.apache.flume.source.kafka.KafkaSource +a1.sources.s1.batchSize = 3000 +a1.sources.s1.batchDurationMillis = 100 +a1.sources.s1.kafka.bootstrap.servers = 192.168.40.203:9092 +a1.sources.s1.kafka.topics = test +a1.sources.s1.kafka.consumer.group.id = lxk_0429 + +#指定channel为memory,即表示所有的数据都装进memory当中 +a1.channels.c1.type = memory +a1.channels.c1.capacity = 2000000 +a1.channels.c1.transactionCapacity = 30000 +a1.channels.c1.byteCapacityBufferPercentage = 40 +a1.channels.c1.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 30 +a1.sinks.k1.sink.directory = /home/test/log2 diff --git a/external-flume/conf/radius/pro/file1.properties b/external-flume/conf/radius/pro/file1.properties new file mode 100644 index 0000000..17dac7f --- /dev/null +++ b/external-flume/conf/radius/pro/file1.properties @@ -0,0 +1,29 @@ +#为source channel sink起名 +a1.sources = s1 +a1.channels = c1 +a1.sinks = k1 +#指定source收集到的数据发送到哪个管道 +a1.sources.s1.channels = c1 +#指定source数据收集策略 +a1.sources.s1.type = org.apache.flume.source.kafka.KafkaSource +a1.sources.s1.batchSize = 3000 +a1.sources.s1.batchDurationMillis = 100 +a1.sources.s1.kafka.bootstrap.servers = 192.168.40.152:9092 +a1.sources.s1.kafka.topics = CONNECTION-RECORD-LOG +a1.sources.s1.kafka.consumer.group.id = source_0514 + +#指定channel为memory,即表示所有的数据都装进memory当中 +a1.channels.c1.type = memory +a1.channels.c1.capacity = 2000000 +a1.channels.c1.transactionCapacity = 30000 +a1.channels.c1.byteCapacityBufferPercentage = 40 +a1.channels.c1.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 30 +a1.sinks.k1.sink.directory = /home/192.168.60.102/CONNECTION-RECORD-LOG diff --git a/external-flume/conf/radius/pro/file2.properties b/external-flume/conf/radius/pro/file2.properties new file mode 100644 index 0000000..f6813e7 --- /dev/null +++ b/external-flume/conf/radius/pro/file2.properties @@ -0,0 +1,29 @@ +#为source channel sink起名 +a1.sources = s1 +a1.channels = c1 +a1.sinks = k1 +#指定source收集到的数据发送到哪个管道 +a1.sources.s1.channels = c1 +#指定source数据收集策略 +a1.sources.s1.type = org.apache.flume.source.kafka.KafkaSource +a1.sources.s1.batchSize = 3000 +a1.sources.s1.batchDurationMillis = 100 +a1.sources.s1.kafka.bootstrap.servers = 192.168.40.203:9092 +a1.sources.s1.kafka.topics = test +a1.sources.s1.kafka.consumer.group.id = lxk_0429 + +#指定channel为memory,即表示所有的数据都装进memory当中 +a1.channels.c1.type = memory +a1.channels.c1.capacity = 2000000 +a1.channels.c1.transactionCapacity = 30000 +a1.channels.c1.byteCapacityBufferPercentage = 40 +a1.channels.c1.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 30 +a1.sinks.k1.sink.directory = /home/test/log2 diff --git a/external-flume/conf/radius/pro/k2f.properties b/external-flume/conf/radius/pro/k2f.properties new file mode 100644 index 0000000..35d7d68 --- /dev/null +++ b/external-flume/conf/radius/pro/k2f.properties @@ -0,0 +1,22 @@ +#为source channel sink起名 +a1.channels = c1 +a1.sinks = k1 +#指定channel为kafka channel,省略source使得效率更高 +a1.channels.c1.type = org.apache.flume.channel.kafka.KafkaChannel +a1.channels.c1.kafka.bootstrap.servers = 192.168.40.119:9092 +a1.channels.c1.kafka.topic = test +a1.channels.c1.kafka.consumer.group.id = lxk_0509 +a1.channels.c1.kafka.consumer.auto.offset.reset = latest +a1.channels.c1.kafka.consumer.enable.auto.commit = true +a1.channels.c1.kafka.consumer.fetch.max.wait.ms = 1000 +a1.channels.c1.kafka.consumer.fetch.min.bytes = 10485760 +a1.channels.c1.parseAsFlumeEvent = false + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 60 +a1.sinks.k1.sink.directory = /home/test/log diff --git a/external-flume/conf/radius/pro/kafka.properties b/external-flume/conf/radius/pro/kafka.properties new file mode 100644 index 0000000..da560b2 --- /dev/null +++ b/external-flume/conf/radius/pro/kafka.properties @@ -0,0 +1,25 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*dat +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.203:9092 +a2.channels.c2.zookeeperConnect=192.168.40.203:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/radius/pro/kafka1.properties b/external-flume/conf/radius/pro/kafka1.properties new file mode 100644 index 0000000..ed30b16 --- /dev/null +++ b/external-flume/conf/radius/pro/kafka1.properties @@ -0,0 +1,25 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /var/taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log1/.*dat +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.203:9092 +a2.channels.c2.zookeeperConnect=192.168.40.203:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/radius/pro/kafka2.properties b/external-flume/conf/radius/pro/kafka2.properties new file mode 100644 index 0000000..6040a05 --- /dev/null +++ b/external-flume/conf/radius/pro/kafka2.properties @@ -0,0 +1,25 @@ +a3.sources = s2 +a3.channels = c2 +#指定source收集到的数据发送到哪个管道 +a3.sources.s2.channels = c2 +#taildir source +a3.sources.s2.type = TAILDIR +a3.sources.s2.channels = c2 +a3.sources.s2.positionFile = /var/taildir2_position.json +a3.sources.s2.filegroups = f2 +a3.sources.s2.filegroups.f1 = /home/test/log1/.*dat +a3.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a3.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a3.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a3.channels.c2.brokerList = 192.168.40.203:9092 +a3.channels.c2.zookeeperConnect=192.168.40.203:2181/kafka +a3.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a3.channels.c2.parseAsFlumeEvent = false +a3.channels.c2.kafka.producer.acks = 1 +a3.channels.c2.producer.type=async +a3.channels.c2.queue.buffering.max.ms = 5000 +a3.channels.c2.queue.buffering.max.messages=20000 +a3.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/radius/pro/log4j.properties b/external-flume/conf/radius/pro/log4j.properties new file mode 100644 index 0000000..c948bff --- /dev/null +++ b/external-flume/conf/radius/pro/log4j.properties @@ -0,0 +1,68 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Define some default values that can be overridden by system properties. +# +# For testing, it may also be convenient to specify +# -Dflume.root.logger=DEBUG,console when launching flume. + +#flume.root.logger=DEBUG,console +flume.root.logger=INFO,LOGFILE +flume.log.dir=./logs +flume.log.file=flume.log + +log4j.logger.org.apache.flume.lifecycle = INFO +log4j.logger.org.jboss = WARN +log4j.logger.org.mortbay = INFO +log4j.logger.org.apache.avro.ipc.NettyTransceiver = WARN +log4j.logger.org.apache.hadoop = INFO +log4j.logger.org.apache.hadoop.hive = ERROR + +# Define the root logger to the system property "flume.root.logger". +log4j.rootLogger=${flume.root.logger} + + +# Stock log4j rolling file appender +# Default log rotation configuration +log4j.appender.LOGFILE=org.apache.log4j.RollingFileAppender +log4j.appender.LOGFILE.MaxFileSize=100MB +log4j.appender.LOGFILE.MaxBackupIndex=10 +log4j.appender.LOGFILE.File=${flume.log.dir}/${flume.log.file} +log4j.appender.LOGFILE.layout=org.apache.log4j.PatternLayout +log4j.appender.LOGFILE.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# Warning: If you enable the following appender it will fill up your disk if you don't have a cleanup job! +# This uses the updated rolling file appender from log4j-extras that supports a reliable time-based rolling policy. +# See http://logging.apache.org/log4j/companions/extras/apidocs/org/apache/log4j/rolling/TimeBasedRollingPolicy.html +# Add "DAILY" to flume.root.logger above if you want to use this +log4j.appender.DAILY=org.apache.log4j.rolling.RollingFileAppender +log4j.appender.DAILY.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy +log4j.appender.DAILY.rollingPolicy.ActiveFileName=${flume.log.dir}/${flume.log.file} +log4j.appender.DAILY.rollingPolicy.FileNamePattern=${flume.log.dir}/${flume.log.file}.%d{yyyy-MM-dd} +log4j.appender.DAILY.layout=org.apache.log4j.PatternLayout +log4j.appender.DAILY.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# console +# Add "console" to flume.root.logger above if you want to use this +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d (%t) [%p - %l] %m%n diff --git a/external-flume/conf/radius/pro/tail.properties b/external-flume/conf/radius/pro/tail.properties new file mode 100644 index 0000000..c677318 --- /dev/null +++ b/external-flume/conf/radius/pro/tail.properties @@ -0,0 +1,34 @@ +a2.sources = s2 +a2.channels = c2 +a2.sinks = k2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 + +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/taildir_position.json +a2.sources.s2.filegroups = f1 f2 +a2.sources.s2.filegroups.f1 = /home/test/log1/.*dat +a2.sources.s2.headers.f1.headerKey1 = value1 +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.headers.f2.headerKey1 = value2 +a2.sources.s2.headers.f2.headerKey2 = value2-2 +a2.sources.s2.fileHeader = true +a2.sources.s2.maxBatchCount = 1000 + + +#指定channel为memory,即表示所有的数据都装进memory当中 +a2.channels.c2.type = memory +a2.channels.c2.capacity = 2000000 +a2.channels.c2.transactionCapacity = 30000 +a2.channels.c2.byteCapacityBufferPercentage = 40 +a2.channels.c2.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a2.sinks.k2.type = file_roll +a2.sinks.k2.channel = c2 +a2.sinks.k2.sink.pathManager = default +a2.sinks.k2.sink.pathManager.extension = dat +a2.sinks.k2.sink.pathManager.prefix = recv- +a2.sinks.k2.sink.rollInterval = 60 +a2.sinks.k2.sink.directory = /home/test/recv1 diff --git a/external-flume/conf/radius/radius_k2f.properties b/external-flume/conf/radius/radius_k2f.properties new file mode 100644 index 0000000..895414f --- /dev/null +++ b/external-flume/conf/radius/radius_k2f.properties @@ -0,0 +1,22 @@ +#为source channel sink起名 +radius.channels = c1 +radius.sinks = k1 +#指定channel为kafka channel,省略source使得效率更高 +radius.channels.c1.type = org.apache.flume.channel.kafka.KafkaChannel +radius.channels.c1.kafka.bootstrap.servers = 192.168.40.152:9092 +radius.channels.c1.kafka.topic = RADIUS-RECORD-LOG +radius.channels.c1.kafka.consumer.group.id = lxk_0514 +radius.channels.c1.kafka.consumer.auto.offset.reset = latest +radius.channels.c1.kafka.consumer.enable.auto.commit = true +radius.channels.c1.kafka.consumer.fetch.max.wait.ms = 1000 +radius.channels.c1.kafka.consumer.fetch.min.bytes = 1048576 +radius.channels.c1.parseAsFlumeEvent = false + +#原始file roll sink,将数据写到本机磁盘 +radius.sinks.k1.type = file_roll +radius.sinks.k1.channel = c1 +radius.sinks.k1.sink.pathManager = default +radius.sinks.k1.sink.pathManager.extension = dat +radius.sinks.k1.sink.pathManager.prefix = test- +radius.sinks.k1.sink.rollInterval = 30 +radius.sinks.k1.sink.directory = /home/192.168.60.101/RADIUS-RECORD-LOG diff --git a/external-flume/conf/radius/restart_log/restart_connection_k2f.log b/external-flume/conf/radius/restart_log/restart_connection_k2f.log new file mode 100644 index 0000000..281d596 --- /dev/null +++ b/external-flume/conf/radius/restart_log/restart_connection_k2f.log @@ -0,0 +1,68 @@ +############################################################## +20200513184750 ---> the connection_k2f_APP restart ---> 1 +20200513184800 ---> the connection_k2f_APP restart ---> 1 +20200513184810 ---> the connection_k2f_APP restart ---> 1 +20200513184820 ---> the connection_k2f_APP restart ---> 1 +20200513184830 ---> the connection_k2f_APP restart ---> 1 +20200513184840 ---> the connection_k2f_APP restart ---> 1 +20200513184850 ---> the connection_k2f_APP restart ---> 1 +20200513184900 ---> the connection_k2f_APP restart ---> 1 +20200513184910 ---> the connection_k2f_APP restart ---> 1 +20200513184920 ---> the connection_k2f_APP restart ---> 1 +20200513184930 ---> the connection_k2f_APP restart ---> 1 +20200513184940 ---> the connection_k2f_APP restart ---> 1 +20200513184950 ---> the connection_k2f_APP restart ---> 1 +20200513185000 ---> the connection_k2f_APP restart ---> 1 +20200513185010 ---> the connection_k2f_APP restart ---> 1 +20200513185020 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513185029 ---> the connection_k2f_APP restart ---> 1 +20200513185030 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513185036 ---> the connection_k2f_APP restart ---> 1 +20200513185040 ---> the connection_k2f_APP restart ---> 1 +20200513185050 ---> the connection_k2f_APP restart ---> 1 +20200513185100 ---> the connection_k2f_APP restart ---> 1 +20200513185110 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513185120 ---> the connection_k2f_APP restart ---> 1 +20200513185241 ---> the connection_k2f_APP restart ---> 1 +20200513185251 ---> the connection_k2f_APP restart ---> 1 +20200513185301 ---> the connection_k2f_APP restart ---> 1 +20200513185311 ---> the connection_k2f_APP restart ---> 1 +20200513185321 ---> the connection_k2f_APP restart ---> 1 +20200513185331 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513185405 ---> the connection_k2f_APP restart ---> 1 +20200513185611 ---> the connection_k2f_APP restart ---> 1 +20200513185621 ---> the connection_k2f_APP restart ---> 1 +20200513185701 ---> the connection_k2f_APP restart ---> 1 +20200513185741 ---> the connection_k2f_APP restart ---> 1 +20200513185752 ---> the connection_k2f_APP restart ---> 1 +20200513185802 ---> the connection_k2f_APP restart ---> 1 +20200513185812 ---> the connection_k2f_APP restart ---> 1 +20200513185822 ---> the connection_k2f_APP restart ---> 1 +20200513185832 ---> the connection_k2f_APP restart ---> 1 +20200513185922 ---> the connection_k2f_APP restart ---> 1 +20200513185932 ---> the connection_k2f_APP restart ---> 1 +20200513185942 ---> the connection_k2f_APP restart ---> 1 +20200513185952 ---> the connection_k2f_APP restart ---> 1 +20200513190002 ---> the connection_k2f_APP restart ---> 1 +20200513190012 ---> the connection_k2f_APP restart ---> 1 +20200513190022 ---> the connection_k2f_APP restart ---> 1 +20200513190032 ---> the connection_k2f_APP restart ---> 1 +20200513190152 ---> the connection_k2f_APP restart ---> 1 +20200513190202 ---> the connection_k2f_APP restart ---> 1 +20200513190212 ---> the connection_k2f_APP restart ---> 1 +20200513190222 ---> the connection_k2f_APP restart ---> 1 +20200513190232 ---> the connection_k2f_APP restart ---> 1 +20200513190242 ---> the connection_k2f_APP restart ---> 1 +20200513190252 ---> the connection_k2f_APP restart ---> 1 +20200513190302 ---> the connection_k2f_APP restart ---> 1 +20200513190312 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513190331 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513192014 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513192212 ---> the connection_k2f_APP restart ---> 1 diff --git a/external-flume/conf/radius/restart_log/restart_f2k.log b/external-flume/conf/radius/restart_log/restart_f2k.log new file mode 100644 index 0000000..9d5fe99 --- /dev/null +++ b/external-flume/conf/radius/restart_log/restart_f2k.log @@ -0,0 +1,213 @@ +############################################################## +20200509174727 ---> the f2k_APP restart ---> 0 +20200509174737 ---> the f2k_APP restart ---> 1 +############################################################## +############################################################## +############################################################## +############################################################## +############################################################## +20200511183420 ---> the f2k_APP restart ---> 1 +20200511183430 ---> the f2k_APP restart ---> 2 +############################################################## +20200511183639 ---> the f2k_APP restart ---> 1 +20200511183649 ---> the f2k_APP restart ---> 2 +20200511183659 ---> the f2k_APP restart ---> 3 +20200511183709 ---> the f2k_APP restart ---> 4 +20200511183719 ---> the f2k_APP restart ---> 5 +20200511183729 ---> the f2k_APP restart ---> 6 +20200511183739 ---> the f2k_APP restart ---> 7 +20200511183749 ---> the f2k_APP restart ---> 8 +20200511183759 ---> the f2k_APP restart ---> 9 +20200511183809 ---> the f2k_APP restart ---> 10 +20200511183819 ---> the f2k_APP restart ---> 11 +20200511183829 ---> the f2k_APP restart ---> 12 +20200511183839 ---> the f2k_APP restart ---> 13 +20200511183849 ---> the f2k_APP restart ---> 14 +20200511183859 ---> the f2k_APP restart ---> 15 +20200511183909 ---> the f2k_APP restart ---> 16 +20200511183919 ---> the f2k_APP restart ---> 17 +20200511183930 ---> the f2k_APP restart ---> 18 +20200511183940 ---> the f2k_APP restart ---> 19 +20200511183950 ---> the f2k_APP restart ---> 20 +20200511184000 ---> the f2k_APP restart ---> 21 +20200511184010 ---> the f2k_APP restart ---> 22 +20200511184020 ---> the f2k_APP restart ---> 23 +20200511184030 ---> the f2k_APP restart ---> 24 +20200511184040 ---> the f2k_APP restart ---> 25 +20200511184050 ---> the f2k_APP restart ---> 26 +20200511184100 ---> the f2k_APP restart ---> 27 +20200511184110 ---> the f2k_APP restart ---> 28 +20200511184120 ---> the f2k_APP restart ---> 29 +20200511184130 ---> the f2k_APP restart ---> 30 +20200511184140 ---> the f2k_APP restart ---> 31 +20200511184150 ---> the f2k_APP restart ---> 32 +20200511184200 ---> the f2k_APP restart ---> 33 +20200511184210 ---> the f2k_APP restart ---> 34 +20200511184220 ---> the f2k_APP restart ---> 35 +20200511184230 ---> the f2k_APP restart ---> 36 +20200511184240 ---> the f2k_APP restart ---> 37 +20200511184250 ---> the f2k_APP restart ---> 38 +20200511184300 ---> the f2k_APP restart ---> 39 +20200511184310 ---> the f2k_APP restart ---> 40 +20200511184321 ---> the f2k_APP restart ---> 41 +20200511184331 ---> the f2k_APP restart ---> 42 +20200511184341 ---> the f2k_APP restart ---> 43 +20200511184351 ---> the f2k_APP restart ---> 44 +20200511184401 ---> the f2k_APP restart ---> 45 +20200511184411 ---> the f2k_APP restart ---> 46 +20200511184421 ---> the f2k_APP restart ---> 47 +20200511184431 ---> the f2k_APP restart ---> 48 +20200511184441 ---> the f2k_APP restart ---> 49 +20200511184451 ---> the f2k_APP restart ---> 50 +20200511184501 ---> the f2k_APP restart ---> 51 +20200511184511 ---> the f2k_APP restart ---> 52 +20200511184521 ---> the f2k_APP restart ---> 53 +20200511184531 ---> the f2k_APP restart ---> 54 +20200511184541 ---> the f2k_APP restart ---> 55 +20200511184551 ---> the f2k_APP restart ---> 56 +20200511184601 ---> the f2k_APP restart ---> 57 +20200511184611 ---> the f2k_APP restart ---> 58 +20200511184621 ---> the f2k_APP restart ---> 59 +20200511184631 ---> the f2k_APP restart ---> 60 +20200511184641 ---> the f2k_APP restart ---> 61 +20200511184651 ---> the f2k_APP restart ---> 62 +20200511184701 ---> the f2k_APP restart ---> 63 +20200511184711 ---> the f2k_APP restart ---> 64 +20200511184721 ---> the f2k_APP restart ---> 65 +20200511184732 ---> the f2k_APP restart ---> 66 +20200511184742 ---> the f2k_APP restart ---> 67 +20200511184752 ---> the f2k_APP restart ---> 68 +20200511184802 ---> the f2k_APP restart ---> 69 +20200511184812 ---> the f2k_APP restart ---> 70 +20200511184822 ---> the f2k_APP restart ---> 71 +############################################################## +20200511185311 ---> the f2k_APP restart ---> 1 +############################################################## +20200511185421 ---> the f2k_APP restart ---> 1 +############################################################## +20200511185532 ---> the f2k_APP restart ---> 1 +20200511185542 ---> the f2k_APP restart ---> 1 +20200511185552 ---> the f2k_APP restart ---> 1 +############################################################## +20200511185706 ---> the f2k_APP restart ---> 1 +20200511185716 ---> the f2k_APP restart ---> 1+1 +20200511185726 ---> the f2k_APP restart ---> 2+1 +############################################################## +20200511185837 ---> the f2k_APP restart ---> 1 +############################################################## +20200511185938 ---> the f2k_APP restart ---> 1 +############################################################## +20200511190054 ---> the f2k_APP restart ---> 1 +20200511190104 ---> the f2k_APP restart ---> 1+1 +############################################################## +20200511190604 ---> the f2k_APP restart ---> 1 +20200511190614 ---> the f2k_APP restart ---> 0 +20200511190624 ---> the f2k_APP restart ---> 1 +20200511190634 ---> the f2k_APP restart ---> 0 +############################################################## +20200511190729 ---> the f2k_APP restart ---> 1 +20200511190739 ---> the f2k_APP restart ---> 1 +20200511190749 ---> the f2k_APP restart ---> 1 +############################################################## +20200511190950 ---> the f2k_APP restart ---> 1 +20200511191000 ---> the f2k_APP restart ---> 2 +20200511191010 ---> the f2k_APP restart ---> 1 +############################################################## +20200511191834 ---> the f2k_APP restart ---> 1 +20200511191844 ---> the f2k_APP restart ---> 2 +############################################################## +20200511192013 ---> the f2k_APP restart ---> 1 +############################################################## +20200511192134 ---> the f2k_APP restart ---> 1 +20200511192144 ---> the f2k_APP restart ---> 2 +20200511192154 ---> the f2k_APP restart ---> 1 +20200511192204 ---> the f2k_APP restart ---> 2 +############################################################## +############################################################## +############################################################## +############################################################## +20200511193630 ---> the f2k_APP restart ---> 1 +20200511193640 ---> the f2k_APP restart ---> 2 +############################################################## +20200512101249 ---> the f2k_APP restart ---> 1 +20200512101259 ---> the f2k_APP restart ---> 2 +20200512101309 ---> the f2k_APP restart ---> 1 +############################################################## +20200512101433 ---> the f2k_APP restart ---> 1 +############################################################## +20200512101537 ---> the f2k_APP restart ---> 1 +20200512101547 ---> the f2k_APP restart ---> 2 +20200512101557 ---> the f2k_APP restart ---> 3 +20200512101707 ---> the f2k_APP restart ---> 1 +############################################################## +20200512102643 ---> the f2k_APP restart ---> 1 +20200512102653 ---> the f2k_APP restart ---> 2 +############################################################## +20200512102723 ---> the f2k_APP restart ---> 1 +20200512102733 ---> the f2k_APP restart ---> 2 +20200512102743 ---> the f2k_APP restart ---> 3 +############################################################## +20200512102936 ---> the f2k_APP restart ---> 1 +20200512102946 ---> the f2k_APP restart ---> 2 +20200512102956 ---> the f2k_APP restart ---> 3 +############################################################## +20200512103250 ---> the f2k_APP restart ---> 1 +20200512103300 ---> the f2k_APP restart ---> 2 +20200512103310 ---> the f2k_APP restart ---> 3 +############################################################## +20200512113819 ---> the f2k_APP restart ---> 1 +20200512113829 ---> the f2k_APP restart ---> 2 +20200512113839 ---> the f2k_APP restart ---> 3 +############################################################## +############################################################## +20200512114211 ---> the f2k_APP restart ---> 1 +20200512114241 ---> the f2k_APP restart ---> 2 +############################################################## +20200512114550 ---> the f2k_APP restart ---> 1 +20200512114600 ---> the f2k_APP restart ---> 2 +20200512114610 ---> the f2k_APP restart ---> 3 +############################################################## +############################################################## +20200512115341 ---> the f2k_APP restart ---> 1 +20200512115351 ---> the f2k_APP restart ---> 2 +20200512115401 ---> the f2k_APP restart ---> 3 +20200512115452 ---> the f2k_APP restart ---> 1 +20200512115502 ---> the f2k_APP restart ---> 2 +20200512115512 ---> the f2k_APP restart ---> 3 +20200512115712 ---> the f2k_APP restart ---> 1 +20200512115722 ---> the f2k_APP restart ---> 2 +20200512115732 ---> the f2k_APP restart ---> 3 +############################################################## +20200512115933 ---> the f2k_APP restart ---> 1 +20200512115943 ---> the f2k_APP restart ---> 2 +20200512115953 ---> the f2k_APP restart ---> 3 +############################################################## +20200512134239 ---> the f2k_APP restart ---> 1 +20200512134249 ---> the f2k_APP restart ---> 2 +20200512134259 ---> the f2k_APP restart ---> 3 +20200512134509 ---> the f2k_APP restart ---> 1 +20200512134519 ---> the f2k_APP restart ---> 2 +20200512134530 ---> the f2k_APP restart ---> 3 +20200512140003 ---> the f2k_APP restart ---> 1 +20200512140013 ---> the f2k_APP restart ---> 2 +20200512141056 ---> the f2k_APP restart ---> 1 +20200512141106 ---> the f2k_APP restart ---> 2 +20200512141116 ---> the f2k_APP restart ---> 3 +20200512142500 ---> the f2k_APP restart ---> 1 +20200512142510 ---> the f2k_APP restart ---> 2 +20200512142941 ---> the f2k_APP restart ---> 1 +20200512142951 ---> the f2k_APP restart ---> 2 +20200512143001 ---> the f2k_APP restart ---> 3 +20200512143031 ---> the f2k_APP restart ---> 1 +20200512143041 ---> the f2k_APP restart ---> 2 +20200512143051 ---> the f2k_APP restart ---> 3 +20200512144224 ---> the f2k_APP restart ---> 1 +20200512144234 ---> the f2k_APP restart ---> 2 +20200512144314 ---> the f2k_APP restart ---> 1 +20200512144324 ---> the f2k_APP restart ---> 2 +20200512144355 ---> the f2k_APP restart ---> 1 +20200512144405 ---> the f2k_APP restart ---> 2 +20200512144415 ---> the f2k_APP restart ---> 3 +20200512144635 ---> the f2k_APP restart ---> 1 +20200512144645 ---> the f2k_APP restart ---> 2 +20200512144655 ---> the f2k_APP restart ---> 3 diff --git a/external-flume/conf/radius/restart_log/restart_security_f2k.log b/external-flume/conf/radius/restart_log/restart_security_f2k.log new file mode 100644 index 0000000..690385d --- /dev/null +++ b/external-flume/conf/radius/restart_log/restart_security_f2k.log @@ -0,0 +1,134 @@ +############################################################## +20200512180055 ---> the security_f2k_APP restart ---> 1 +20200512180105 ---> the security_f2k_APP restart ---> 2 +############################################################## +20200512180108 ---> the security_f2k_APP restart ---> 1 +20200512180115 ---> the security_f2k_APP restart ---> 3 +20200512180118 ---> the security_f2k_APP restart ---> 2 +20200512180125 ---> the security_f2k_APP restart ---> 1 +20200512180128 ---> the security_f2k_APP restart ---> 3 +20200512180135 ---> the security_f2k_APP restart ---> 2 +20200512180138 ---> the security_f2k_APP restart ---> 1 +20200512180145 ---> the security_f2k_APP restart ---> 3 +20200512180148 ---> the security_f2k_APP restart ---> 2 +20200512180155 ---> the security_f2k_APP restart ---> 1 +20200512180158 ---> the security_f2k_APP restart ---> 3 +20200512180206 ---> the security_f2k_APP restart ---> 2 +20200512180208 ---> the security_f2k_APP restart ---> 1 +20200512180216 ---> the security_f2k_APP restart ---> 3 +20200512180218 ---> the security_f2k_APP restart ---> 2 +20200512180226 ---> the security_f2k_APP restart ---> 1 +20200512180228 ---> the security_f2k_APP restart ---> 3 +20200512180236 ---> the security_f2k_APP restart ---> 2 +20200512180238 ---> the security_f2k_APP restart ---> 1 +20200512180246 ---> the security_f2k_APP restart ---> 3 +20200512180248 ---> the security_f2k_APP restart ---> 2 +20200512180256 ---> the security_f2k_APP restart ---> 1 +20200512180258 ---> the security_f2k_APP restart ---> 3 +############################################################## +20200512180513 ---> the security_f2k_APP restart ---> 1 +20200512180523 ---> the security_f2k_APP restart ---> 2 +20200512180533 ---> the security_f2k_APP restart ---> 3 +############################################################## +20200512180536 ---> the security_f2k_APP restart ---> 1 +20200512180546 ---> the security_f2k_APP restart ---> 2 +20200512180556 ---> the security_f2k_APP restart ---> 3 +20200512180626 ---> the security_f2k_APP restart ---> 1 +20200512180637 ---> the security_f2k_APP restart ---> 2 +20200512180707 ---> the security_f2k_APP restart ---> 1 +20200512180717 ---> the security_f2k_APP restart ---> 2 +20200512180747 ---> the security_f2k_APP restart ---> 1 +20200512180757 ---> the security_f2k_APP restart ---> 2 +20200512180827 ---> the security_f2k_APP restart ---> 1 +20200512180837 ---> the security_f2k_APP restart ---> 2 +20200512180907 ---> the security_f2k_APP restart ---> 1 +20200512180917 ---> the security_f2k_APP restart ---> 2 +20200512180947 ---> the security_f2k_APP restart ---> 1 +20200512180957 ---> the security_f2k_APP restart ---> 2 +20200512181028 ---> the security_f2k_APP restart ---> 1 +20200512181038 ---> the security_f2k_APP restart ---> 2 +20200512181108 ---> the security_f2k_APP restart ---> 1 +20200512181118 ---> the security_f2k_APP restart ---> 2 +20200512181148 ---> the security_f2k_APP restart ---> 1 +20200512181158 ---> the security_f2k_APP restart ---> 2 +20200512181228 ---> the security_f2k_APP restart ---> 1 +20200512181238 ---> the security_f2k_APP restart ---> 2 +20200512181308 ---> the security_f2k_APP restart ---> 1 +20200512181318 ---> the security_f2k_APP restart ---> 2 +20200512181348 ---> the security_f2k_APP restart ---> 1 +20200512181358 ---> the security_f2k_APP restart ---> 2 +20200512181429 ---> the security_f2k_APP restart ---> 1 +20200512181439 ---> the security_f2k_APP restart ---> 2 +20200512181449 ---> the security_f2k_APP restart ---> 3 +20200512181519 ---> the security_f2k_APP restart ---> 1 +20200512181529 ---> the security_f2k_APP restart ---> 2 +20200512181539 ---> the security_f2k_APP restart ---> 3 +20200512181609 ---> the security_f2k_APP restart ---> 1 +20200512181619 ---> the security_f2k_APP restart ---> 2 +20200512181649 ---> the security_f2k_APP restart ---> 1 +20200512181659 ---> the security_f2k_APP restart ---> 2 +20200512181729 ---> the security_f2k_APP restart ---> 1 +20200512181739 ---> the security_f2k_APP restart ---> 2 +20200512181809 ---> the security_f2k_APP restart ---> 1 +20200512181820 ---> the security_f2k_APP restart ---> 2 +20200512181850 ---> the security_f2k_APP restart ---> 1 +20200512181900 ---> the security_f2k_APP restart ---> 2 +20200512181910 ---> the security_f2k_APP restart ---> 3 +20200512181920 ---> the security_f2k_APP restart ---> 1 +20200512181930 ---> the security_f2k_APP restart ---> 2 +20200512181940 ---> the security_f2k_APP restart ---> 3 +20200512181950 ---> the security_f2k_APP restart ---> 1 +20200512182000 ---> the security_f2k_APP restart ---> 2 +20200512182010 ---> the security_f2k_APP restart ---> 3 +20200512182020 ---> the security_f2k_APP restart ---> 1 +20200512182030 ---> the security_f2k_APP restart ---> 2 +20200512182040 ---> the security_f2k_APP restart ---> 3 +20200512182050 ---> the security_f2k_APP restart ---> 1 +20200512182100 ---> the security_f2k_APP restart ---> 2 +20200512182110 ---> the security_f2k_APP restart ---> 3 +20200512182120 ---> the security_f2k_APP restart ---> 1 +20200512182130 ---> the security_f2k_APP restart ---> 2 +20200512182140 ---> the security_f2k_APP restart ---> 3 +20200512182150 ---> the security_f2k_APP restart ---> 1 +20200512182200 ---> the security_f2k_APP restart ---> 2 +20200512182211 ---> the security_f2k_APP restart ---> 3 +############################################################## +20200512183204 ---> the security_f2k_APP restart ---> 1 +20200512183215 ---> the security_f2k_APP restart ---> 2 +20200512183225 ---> the security_f2k_APP restart ---> 3 +20200512183255 ---> the security_f2k_APP restart ---> 1 +20200512183305 ---> the security_f2k_APP restart ---> 2 +20200512183335 ---> the security_f2k_APP restart ---> 1 +20200512183345 ---> the security_f2k_APP restart ---> 2 +20200512183415 ---> the security_f2k_APP restart ---> 1 +20200512183425 ---> the security_f2k_APP restart ---> 2 +20200512183455 ---> the security_f2k_APP restart ---> 1 +20200512183505 ---> the security_f2k_APP restart ---> 2 +20200512183535 ---> the security_f2k_APP restart ---> 1 +20200512183545 ---> the security_f2k_APP restart ---> 2 +20200512183615 ---> the security_f2k_APP restart ---> 1 +20200512183626 ---> the security_f2k_APP restart ---> 2 +20200512183656 ---> the security_f2k_APP restart ---> 1 +20200512183706 ---> the security_f2k_APP restart ---> 2 +20200512183716 ---> the security_f2k_APP restart ---> 3 +20200512183726 ---> the security_f2k_APP restart ---> 1 +20200512183736 ---> the security_f2k_APP restart ---> 2 +20200512183746 ---> the security_f2k_APP restart ---> 3 +20200512183756 ---> the security_f2k_APP restart ---> 1 +20200512183806 ---> the security_f2k_APP restart ---> 2 +20200512183836 ---> the security_f2k_APP restart ---> 1 +20200512183846 ---> the security_f2k_APP restart ---> 2 +20200512183916 ---> the security_f2k_APP restart ---> 1 +20200512183926 ---> the security_f2k_APP restart ---> 2 +20200512183936 ---> the security_f2k_APP restart ---> 3 +20200512184006 ---> the security_f2k_APP restart ---> 1 +20200512184016 ---> the security_f2k_APP restart ---> 2 +20200512184027 ---> the security_f2k_APP restart ---> 3 +20200512184037 ---> the security_f2k_APP restart ---> 1 +20200512184047 ---> the security_f2k_APP restart ---> 2 +20200512184057 ---> the security_f2k_APP restart ---> 3 +20200512184107 ---> the security_f2k_APP restart ---> 1 +20200512184117 ---> the security_f2k_APP restart ---> 2 +20200512184127 ---> the security_f2k_APP restart ---> 3 +20200512184157 ---> the security_f2k_APP restart ---> 1 +20200512184207 ---> the security_f2k_APP restart ---> 2 diff --git a/external-flume/conf/radius/start_flume.sh b/external-flume/conf/radius/start_flume.sh new file mode 100755 index 0000000..01dde8b --- /dev/null +++ b/external-flume/conf/radius/start_flume.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +BASE_DIR=$(cd $(dirname $0); pwd) +DAE_FILE=$(cd $(dirname $0); ls | grep dae*) +nohup ${BASE_DIR}/$DAE_FILE >/dev/null 2>&1 & diff --git a/external-flume/conf/radius/stop_flume.sh b/external-flume/conf/radius/stop_flume.sh new file mode 100755 index 0000000..e4417aa --- /dev/null +++ b/external-flume/conf/radius/stop_flume.sh @@ -0,0 +1,28 @@ +#!/bin/sh + +#DAE_NAME=dae_k2ha.sh +#JAR_NAME=k2ha.properties +BASE_DIR=$(cd $(dirname $0); pwd) + +DAE_NAME=`ls ${BASE_DIR} | grep ^dae_* | grep .sh$` +JAR_NAME=`ls ${BASE_DIR} | grep ^f2* | grep .properties$` + +NUM1=`ps -ef | grep ${DAE_NAME} | grep -v grep | wc -l` +pids1=$(ps -ef | grep ${DAE_NAME} | grep -v grep | awk '{print $2}') +if [ "${NUM1}" -ge "1" ];then + for pid1 in $pids1 + do + kill -9 $pid1 + echo 'killed '${DAE_NAME}' process-->'$pid1 + done +fi + +NUM2=`ps -ef | grep ${JAR_NAME} | grep -v grep | wc -l` +pids2=$(ps -ef | grep ${JAR_NAME} | grep -v grep | awk '{print $2}') +if [ "${NUM2}" -ge "1" ];then + for pid2 in $pids2 + do + kill -9 $pid2 + echo 'killed '${JAR_NAME}' process-->'$pid2 + done +fi diff --git a/external-flume/conf/security/clean_start_flume.sh b/external-flume/conf/security/clean_start_flume.sh new file mode 100755 index 0000000..ef811a5 --- /dev/null +++ b/external-flume/conf/security/clean_start_flume.sh @@ -0,0 +1,10 @@ +#!/bin/sh + +BASE_DIR=$(cd $(dirname $0); pwd) +DAE_NAME=`ls ${BASE_DIR} | grep ^dae_* | grep .sh$` + + +#清除日志和checkpoint以及历史data +rm -rf ${BASE_DIR}/logs/* + +nohup ${BASE_DIR}/${DAE_NAME} $1 $2 >/dev/null 2>&1 & diff --git a/external-flume/conf/security/count_flume.sh b/external-flume/conf/security/count_flume.sh new file mode 100755 index 0000000..cc8452b --- /dev/null +++ b/external-flume/conf/security/count_flume.sh @@ -0,0 +1,15 @@ +#!/bin/sh + +BASE_DIR=$(cd $(dirname $0); pwd) + +JAR_NAME=`ls ${BASE_DIR} | grep ^k2* | grep .properties$` + +NUM1=`ps -ef | grep ${JAR_NAME} | grep -v grep | wc -l` +pids1=$(ps -ef | grep ${JAR_NAME} | grep -v grep | awk '{print $2}') +echo 'flume '${JAR_NAME}' total process-->'${NUM1} +if [ "${NUM1}" -ge "1" ];then + for pid1 in $pids1 + do + echo 'flume '${JAR_NAME}' process-->'$pid1 + done +fi diff --git a/external-flume/conf/security/dae_security.sh b/external-flume/conf/security/dae_security.sh new file mode 100755 index 0000000..cf0e1f0 --- /dev/null +++ b/external-flume/conf/security/dae_security.sh @@ -0,0 +1,39 @@ +#!/bin/sh + +#JAR_NAME=ktk_ip_asn.properties +PROPERTIES_NAME=security_k2f +#flume进程名称 +FLUME_NAME=security +#flume根目录 +BASE_DIR=$(cd $(dirname $0); cd ../../; pwd) +#当前路径 +CONF_DIR=$(cd $(dirname $0); pwd) +#进程总数 +PROCESS_SUM=1 + +echo "##############################################################" >> ${CONF_DIR}/restart_log/restart_${PROPERTIES_NAME}.log +id=0 #配置文件后面的序号,无特殊作用 +flag=0 # flag为0表示初始化状态,为1表示完整启动成功所有进程,为2表示进程意外停止之后又重新杀死其余的进程并重新启动所有进程 +while true ; do + NUM=`ps -ef | grep ${PROPERTIES_NAME} | grep -v grep | grep -v dae |wc -l` + pids=$(ps -ef | grep ${PROPERTIES_NAME}\* | grep properties | awk '{print $2}') + time_stamp=$(date +%Y%m%d%H%M%S) + #大于设置进程数,杀掉所有进程,重启 + if [[ "${NUM}" -ne ${PROCESS_SUM} && $flag -eq "1" ]];then + for pid in $pids + do + kill -9 $pid + done + flag=2 + #如果正在运行的进程数小于定义的进程数,就启动 + elif [ "${NUM}" -lt ${PROCESS_SUM} ];then + id=$(( ( ($id) % $PROCESS_SUM ) + 1 )) + nohup ${BASE_DIR}/bin/flume-ng agent -n ${FLUME_NAME} -c ${CONF_DIR} -f ${CONF_DIR}/${PROPERTIES_NAME}.properties >/dev/null 2>&1 & + echo "${time_stamp} ---> the ${PROPERTIES_NAME}_APP restart ---> $id" >> ${CONF_DIR}/restart_log/restart_${PROPERTIES_NAME}.log + #进程数如果等于指定的进程数,那就设置flag为1 id为0 + elif [ "${NUM}" -eq ${PROCESS_SUM} ];then + flag=1 + id=0 + fi + sleep 10 +done diff --git a/external-flume/conf/security/flume-env.sh b/external-flume/conf/security/flume-env.sh new file mode 100755 index 0000000..63979aa --- /dev/null +++ b/external-flume/conf/security/flume-env.sh @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# If this file is placed at FLUME_CONF_DIR/flume-env.sh, it will be sourced +# during Flume startup. + +# Enviroment variables can be set here. + +export JAVA_HOME=/usr/lib/jvm/jdk1.8.0_73 + +# Give Flume more memory and pre-allocate, enable remote monitoring via JMX +#export JAVA_OPTS="-Xms1024m -Xmx3072m -Dcom.sun.management.jmxremote" +export JAVA_OPTS="-Xms512m -Xmx2048m -Xss256k -Xmn1g -XX:+UseParNewGC -XX:+UseConcMarkSweepGC -XX:-UseGCOverheadLimit -Dcom.sun.management.jmxremote" + +# Let Flume write raw event data and configuration information to its log files for debugging +# purposes. Enabling these flags is not recommended in production, +# as it may result in logging sensitive user information or encryption secrets. +# export JAVA_OPTS="$JAVA_OPTS -Dorg.apache.flume.log.rawdata=true -Dorg.apache.flume.log.printconfig=true " + +# Note that the Flume conf directory is always included in the classpath. +#FLUME_CLASSPATH="" + diff --git a/external-flume/conf/security/log4j.properties b/external-flume/conf/security/log4j.properties new file mode 100644 index 0000000..c948bff --- /dev/null +++ b/external-flume/conf/security/log4j.properties @@ -0,0 +1,68 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Define some default values that can be overridden by system properties. +# +# For testing, it may also be convenient to specify +# -Dflume.root.logger=DEBUG,console when launching flume. + +#flume.root.logger=DEBUG,console +flume.root.logger=INFO,LOGFILE +flume.log.dir=./logs +flume.log.file=flume.log + +log4j.logger.org.apache.flume.lifecycle = INFO +log4j.logger.org.jboss = WARN +log4j.logger.org.mortbay = INFO +log4j.logger.org.apache.avro.ipc.NettyTransceiver = WARN +log4j.logger.org.apache.hadoop = INFO +log4j.logger.org.apache.hadoop.hive = ERROR + +# Define the root logger to the system property "flume.root.logger". +log4j.rootLogger=${flume.root.logger} + + +# Stock log4j rolling file appender +# Default log rotation configuration +log4j.appender.LOGFILE=org.apache.log4j.RollingFileAppender +log4j.appender.LOGFILE.MaxFileSize=100MB +log4j.appender.LOGFILE.MaxBackupIndex=10 +log4j.appender.LOGFILE.File=${flume.log.dir}/${flume.log.file} +log4j.appender.LOGFILE.layout=org.apache.log4j.PatternLayout +log4j.appender.LOGFILE.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# Warning: If you enable the following appender it will fill up your disk if you don't have a cleanup job! +# This uses the updated rolling file appender from log4j-extras that supports a reliable time-based rolling policy. +# See http://logging.apache.org/log4j/companions/extras/apidocs/org/apache/log4j/rolling/TimeBasedRollingPolicy.html +# Add "DAILY" to flume.root.logger above if you want to use this +log4j.appender.DAILY=org.apache.log4j.rolling.RollingFileAppender +log4j.appender.DAILY.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy +log4j.appender.DAILY.rollingPolicy.ActiveFileName=${flume.log.dir}/${flume.log.file} +log4j.appender.DAILY.rollingPolicy.FileNamePattern=${flume.log.dir}/${flume.log.file}.%d{yyyy-MM-dd} +log4j.appender.DAILY.layout=org.apache.log4j.PatternLayout +log4j.appender.DAILY.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# console +# Add "console" to flume.root.logger above if you want to use this +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d (%t) [%p - %l] %m%n diff --git a/external-flume/conf/security/pro/2f2k.properties b/external-flume/conf/security/pro/2f2k.properties new file mode 100644 index 0000000..5e5c573 --- /dev/null +++ b/external-flume/conf/security/pro/2f2k.properties @@ -0,0 +1,24 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/2taildir_position.json +a2.sources.s2.filegroups = f2 +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9092 +a2.channels.c2.zookeeperConnect=192.168.40.222:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/security/pro/4f2k.properties b/external-flume/conf/security/pro/4f2k.properties new file mode 100644 index 0000000..bf0408b --- /dev/null +++ b/external-flume/conf/security/pro/4f2k.properties @@ -0,0 +1,58 @@ +a2.sources = s2 +a2.channels = c2 c3 c4 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.positionFile = /home/test/4taildir_position.json +a2.sources.s2.filegroups = f3 +a2.sources.s2.filegroups.f3 = /home/test/recv/.*dat +#a2.sources.s2.headers.f3.headerKey1 = channel2 +#a2.sources.s2.filegroups.f4 = /home/test/log/.*dat +#a2.sources.s2.headers.f4.headerKey1 = channel3 +a2.sources.s2.maxBatchCount = 500 +#a2.sources.s2.selector.type = multiplexing + +#channel selector +#a2.sources.s2.selector.header = headerKey1 +#a2.sources.s2.selector.mapping.channel2=c2 +#a2.sources.s2.selector.mapping.channel3=c3 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=200000 +a2.channels.c2.batch.num.messages=5000 + +#第二个channel +a2.channels.c3.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c3.brokerList = 192.168.40.222:9093 +a2.channels.c3.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c3.parseAsFlumeEvent = false +a2.channels.c3.kafka.producer.acks = 1 +a2.channels.c3.producer.type=async +a2.channels.c3.queue.buffering.max.ms = 5000 +a2.channels.c3.queue.buffering.max.messages=20000 +a2.channels.c3.batch.num.messages=500 + + + +#第三个channel +a2.channels.c4.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c4.brokerList = 192.168.40.222:9093 +a2.channels.c4.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c4.parseAsFlumeEvent = false +a2.channels.c4.kafka.producer.acks = 1 +a2.channels.c4.producer.type=async +a2.channels.c4.queue.buffering.max.ms = 5000 +a2.channels.c4.queue.buffering.max.messages=20000 +a2.channels.c4.batch.num.messages=500 + diff --git a/external-flume/conf/security/pro/f2k.properties b/external-flume/conf/security/pro/f2k.properties new file mode 100644 index 0000000..7047e42 --- /dev/null +++ b/external-flume/conf/security/pro/f2k.properties @@ -0,0 +1,24 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/taildir_position.json +a2.sources.s2.filegroups = f0 +a2.sources.s2.filegroups.f0 = /home/test/log/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9092 +a2.channels.c2.zookeeperConnect=192.168.40.222:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/security/pro/f2k1.properties b/external-flume/conf/security/pro/f2k1.properties new file mode 100644 index 0000000..2dadced --- /dev/null +++ b/external-flume/conf/security/pro/f2k1.properties @@ -0,0 +1,23 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/1taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*[0-2].dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/security/pro/f2k2.properties b/external-flume/conf/security/pro/f2k2.properties new file mode 100644 index 0000000..e17c3e2 --- /dev/null +++ b/external-flume/conf/security/pro/f2k2.properties @@ -0,0 +1,23 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/2taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*[3-4].dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/security/pro/f2k3.properties b/external-flume/conf/security/pro/f2k3.properties new file mode 100644 index 0000000..2dadced --- /dev/null +++ b/external-flume/conf/security/pro/f2k3.properties @@ -0,0 +1,23 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/1taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*[0-2].dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.222:9093 +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/security/pro/file.properties b/external-flume/conf/security/pro/file.properties new file mode 100644 index 0000000..f6813e7 --- /dev/null +++ b/external-flume/conf/security/pro/file.properties @@ -0,0 +1,29 @@ +#为source channel sink起名 +a1.sources = s1 +a1.channels = c1 +a1.sinks = k1 +#指定source收集到的数据发送到哪个管道 +a1.sources.s1.channels = c1 +#指定source数据收集策略 +a1.sources.s1.type = org.apache.flume.source.kafka.KafkaSource +a1.sources.s1.batchSize = 3000 +a1.sources.s1.batchDurationMillis = 100 +a1.sources.s1.kafka.bootstrap.servers = 192.168.40.203:9092 +a1.sources.s1.kafka.topics = test +a1.sources.s1.kafka.consumer.group.id = lxk_0429 + +#指定channel为memory,即表示所有的数据都装进memory当中 +a1.channels.c1.type = memory +a1.channels.c1.capacity = 2000000 +a1.channels.c1.transactionCapacity = 30000 +a1.channels.c1.byteCapacityBufferPercentage = 40 +a1.channels.c1.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 30 +a1.sinks.k1.sink.directory = /home/test/log2 diff --git a/external-flume/conf/security/pro/file1.properties b/external-flume/conf/security/pro/file1.properties new file mode 100644 index 0000000..17dac7f --- /dev/null +++ b/external-flume/conf/security/pro/file1.properties @@ -0,0 +1,29 @@ +#为source channel sink起名 +a1.sources = s1 +a1.channels = c1 +a1.sinks = k1 +#指定source收集到的数据发送到哪个管道 +a1.sources.s1.channels = c1 +#指定source数据收集策略 +a1.sources.s1.type = org.apache.flume.source.kafka.KafkaSource +a1.sources.s1.batchSize = 3000 +a1.sources.s1.batchDurationMillis = 100 +a1.sources.s1.kafka.bootstrap.servers = 192.168.40.152:9092 +a1.sources.s1.kafka.topics = CONNECTION-RECORD-LOG +a1.sources.s1.kafka.consumer.group.id = source_0514 + +#指定channel为memory,即表示所有的数据都装进memory当中 +a1.channels.c1.type = memory +a1.channels.c1.capacity = 2000000 +a1.channels.c1.transactionCapacity = 30000 +a1.channels.c1.byteCapacityBufferPercentage = 40 +a1.channels.c1.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 30 +a1.sinks.k1.sink.directory = /home/192.168.60.102/CONNECTION-RECORD-LOG diff --git a/external-flume/conf/security/pro/file2.properties b/external-flume/conf/security/pro/file2.properties new file mode 100644 index 0000000..f6813e7 --- /dev/null +++ b/external-flume/conf/security/pro/file2.properties @@ -0,0 +1,29 @@ +#为source channel sink起名 +a1.sources = s1 +a1.channels = c1 +a1.sinks = k1 +#指定source收集到的数据发送到哪个管道 +a1.sources.s1.channels = c1 +#指定source数据收集策略 +a1.sources.s1.type = org.apache.flume.source.kafka.KafkaSource +a1.sources.s1.batchSize = 3000 +a1.sources.s1.batchDurationMillis = 100 +a1.sources.s1.kafka.bootstrap.servers = 192.168.40.203:9092 +a1.sources.s1.kafka.topics = test +a1.sources.s1.kafka.consumer.group.id = lxk_0429 + +#指定channel为memory,即表示所有的数据都装进memory当中 +a1.channels.c1.type = memory +a1.channels.c1.capacity = 2000000 +a1.channels.c1.transactionCapacity = 30000 +a1.channels.c1.byteCapacityBufferPercentage = 40 +a1.channels.c1.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 30 +a1.sinks.k1.sink.directory = /home/test/log2 diff --git a/external-flume/conf/security/pro/k2f.properties b/external-flume/conf/security/pro/k2f.properties new file mode 100644 index 0000000..35d7d68 --- /dev/null +++ b/external-flume/conf/security/pro/k2f.properties @@ -0,0 +1,22 @@ +#为source channel sink起名 +a1.channels = c1 +a1.sinks = k1 +#指定channel为kafka channel,省略source使得效率更高 +a1.channels.c1.type = org.apache.flume.channel.kafka.KafkaChannel +a1.channels.c1.kafka.bootstrap.servers = 192.168.40.119:9092 +a1.channels.c1.kafka.topic = test +a1.channels.c1.kafka.consumer.group.id = lxk_0509 +a1.channels.c1.kafka.consumer.auto.offset.reset = latest +a1.channels.c1.kafka.consumer.enable.auto.commit = true +a1.channels.c1.kafka.consumer.fetch.max.wait.ms = 1000 +a1.channels.c1.kafka.consumer.fetch.min.bytes = 10485760 +a1.channels.c1.parseAsFlumeEvent = false + +#原始file roll sink,将数据写到本机磁盘 +a1.sinks.k1.type = file_roll +a1.sinks.k1.channel = c1 +a1.sinks.k1.sink.pathManager = default +a1.sinks.k1.sink.pathManager.extension = dat +a1.sinks.k1.sink.pathManager.prefix = test- +a1.sinks.k1.sink.rollInterval = 60 +a1.sinks.k1.sink.directory = /home/test/log diff --git a/external-flume/conf/security/pro/kafka.properties b/external-flume/conf/security/pro/kafka.properties new file mode 100644 index 0000000..da560b2 --- /dev/null +++ b/external-flume/conf/security/pro/kafka.properties @@ -0,0 +1,25 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log/.*dat +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.203:9092 +a2.channels.c2.zookeeperConnect=192.168.40.203:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/security/pro/kafka1.properties b/external-flume/conf/security/pro/kafka1.properties new file mode 100644 index 0000000..ed30b16 --- /dev/null +++ b/external-flume/conf/security/pro/kafka1.properties @@ -0,0 +1,25 @@ +a2.sources = s2 +a2.channels = c2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 +#taildir source +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /var/taildir_position.json +a2.sources.s2.filegroups = f1 +a2.sources.s2.filegroups.f1 = /home/test/log1/.*dat +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a2.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a2.channels.c2.brokerList = 192.168.40.203:9092 +a2.channels.c2.zookeeperConnect=192.168.40.203:2181/kafka +a2.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a2.channels.c2.parseAsFlumeEvent = false +a2.channels.c2.kafka.producer.acks = 1 +a2.channels.c2.producer.type=async +a2.channels.c2.queue.buffering.max.ms = 5000 +a2.channels.c2.queue.buffering.max.messages=20000 +a2.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/security/pro/kafka2.properties b/external-flume/conf/security/pro/kafka2.properties new file mode 100644 index 0000000..6040a05 --- /dev/null +++ b/external-flume/conf/security/pro/kafka2.properties @@ -0,0 +1,25 @@ +a3.sources = s2 +a3.channels = c2 +#指定source收集到的数据发送到哪个管道 +a3.sources.s2.channels = c2 +#taildir source +a3.sources.s2.type = TAILDIR +a3.sources.s2.channels = c2 +a3.sources.s2.positionFile = /var/taildir2_position.json +a3.sources.s2.filegroups = f2 +a3.sources.s2.filegroups.f1 = /home/test/log1/.*dat +a3.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a3.sources.s2.maxBatchCount = 1000 + +# kafka channel充当生产者 +a3.channels.c2.type = org.apache.flume.channel.kafka.KafkaChannel +a3.channels.c2.brokerList = 192.168.40.203:9092 +a3.channels.c2.zookeeperConnect=192.168.40.203:2181/kafka +a3.channels.c2.topic = recv_test +#false表示是以纯文本的形式写进入的,true是以event的形式写进入的,以event写进入时,会出现乱码, 默认是true +a3.channels.c2.parseAsFlumeEvent = false +a3.channels.c2.kafka.producer.acks = 1 +a3.channels.c2.producer.type=async +a3.channels.c2.queue.buffering.max.ms = 5000 +a3.channels.c2.queue.buffering.max.messages=20000 +a3.channels.c2.batch.num.messages=500 diff --git a/external-flume/conf/security/pro/log4j.properties b/external-flume/conf/security/pro/log4j.properties new file mode 100644 index 0000000..c948bff --- /dev/null +++ b/external-flume/conf/security/pro/log4j.properties @@ -0,0 +1,68 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Define some default values that can be overridden by system properties. +# +# For testing, it may also be convenient to specify +# -Dflume.root.logger=DEBUG,console when launching flume. + +#flume.root.logger=DEBUG,console +flume.root.logger=INFO,LOGFILE +flume.log.dir=./logs +flume.log.file=flume.log + +log4j.logger.org.apache.flume.lifecycle = INFO +log4j.logger.org.jboss = WARN +log4j.logger.org.mortbay = INFO +log4j.logger.org.apache.avro.ipc.NettyTransceiver = WARN +log4j.logger.org.apache.hadoop = INFO +log4j.logger.org.apache.hadoop.hive = ERROR + +# Define the root logger to the system property "flume.root.logger". +log4j.rootLogger=${flume.root.logger} + + +# Stock log4j rolling file appender +# Default log rotation configuration +log4j.appender.LOGFILE=org.apache.log4j.RollingFileAppender +log4j.appender.LOGFILE.MaxFileSize=100MB +log4j.appender.LOGFILE.MaxBackupIndex=10 +log4j.appender.LOGFILE.File=${flume.log.dir}/${flume.log.file} +log4j.appender.LOGFILE.layout=org.apache.log4j.PatternLayout +log4j.appender.LOGFILE.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# Warning: If you enable the following appender it will fill up your disk if you don't have a cleanup job! +# This uses the updated rolling file appender from log4j-extras that supports a reliable time-based rolling policy. +# See http://logging.apache.org/log4j/companions/extras/apidocs/org/apache/log4j/rolling/TimeBasedRollingPolicy.html +# Add "DAILY" to flume.root.logger above if you want to use this +log4j.appender.DAILY=org.apache.log4j.rolling.RollingFileAppender +log4j.appender.DAILY.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy +log4j.appender.DAILY.rollingPolicy.ActiveFileName=${flume.log.dir}/${flume.log.file} +log4j.appender.DAILY.rollingPolicy.FileNamePattern=${flume.log.dir}/${flume.log.file}.%d{yyyy-MM-dd} +log4j.appender.DAILY.layout=org.apache.log4j.PatternLayout +log4j.appender.DAILY.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n + + +# console +# Add "console" to flume.root.logger above if you want to use this +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d (%t) [%p - %l] %m%n diff --git a/external-flume/conf/security/pro/tail.properties b/external-flume/conf/security/pro/tail.properties new file mode 100644 index 0000000..c677318 --- /dev/null +++ b/external-flume/conf/security/pro/tail.properties @@ -0,0 +1,34 @@ +a2.sources = s2 +a2.channels = c2 +a2.sinks = k2 +#指定source收集到的数据发送到哪个管道 +a2.sources.s2.channels = c2 + +a2.sources.s2.type = TAILDIR +a2.sources.s2.channels = c2 +a2.sources.s2.positionFile = /home/test/taildir_position.json +a2.sources.s2.filegroups = f1 f2 +a2.sources.s2.filegroups.f1 = /home/test/log1/.*dat +a2.sources.s2.headers.f1.headerKey1 = value1 +a2.sources.s2.filegroups.f2 = /home/test/log2/.*dat +a2.sources.s2.headers.f2.headerKey1 = value2 +a2.sources.s2.headers.f2.headerKey2 = value2-2 +a2.sources.s2.fileHeader = true +a2.sources.s2.maxBatchCount = 1000 + + +#指定channel为memory,即表示所有的数据都装进memory当中 +a2.channels.c2.type = memory +a2.channels.c2.capacity = 2000000 +a2.channels.c2.transactionCapacity = 30000 +a2.channels.c2.byteCapacityBufferPercentage = 40 +a2.channels.c2.byteCapacity = 2147483648 + +#原始file roll sink,将数据写到本机磁盘 +a2.sinks.k2.type = file_roll +a2.sinks.k2.channel = c2 +a2.sinks.k2.sink.pathManager = default +a2.sinks.k2.sink.pathManager.extension = dat +a2.sinks.k2.sink.pathManager.prefix = recv- +a2.sinks.k2.sink.rollInterval = 60 +a2.sinks.k2.sink.directory = /home/test/recv1 diff --git a/external-flume/conf/security/restart_log/restart_connection_k2f.log b/external-flume/conf/security/restart_log/restart_connection_k2f.log new file mode 100644 index 0000000..281d596 --- /dev/null +++ b/external-flume/conf/security/restart_log/restart_connection_k2f.log @@ -0,0 +1,68 @@ +############################################################## +20200513184750 ---> the connection_k2f_APP restart ---> 1 +20200513184800 ---> the connection_k2f_APP restart ---> 1 +20200513184810 ---> the connection_k2f_APP restart ---> 1 +20200513184820 ---> the connection_k2f_APP restart ---> 1 +20200513184830 ---> the connection_k2f_APP restart ---> 1 +20200513184840 ---> the connection_k2f_APP restart ---> 1 +20200513184850 ---> the connection_k2f_APP restart ---> 1 +20200513184900 ---> the connection_k2f_APP restart ---> 1 +20200513184910 ---> the connection_k2f_APP restart ---> 1 +20200513184920 ---> the connection_k2f_APP restart ---> 1 +20200513184930 ---> the connection_k2f_APP restart ---> 1 +20200513184940 ---> the connection_k2f_APP restart ---> 1 +20200513184950 ---> the connection_k2f_APP restart ---> 1 +20200513185000 ---> the connection_k2f_APP restart ---> 1 +20200513185010 ---> the connection_k2f_APP restart ---> 1 +20200513185020 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513185029 ---> the connection_k2f_APP restart ---> 1 +20200513185030 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513185036 ---> the connection_k2f_APP restart ---> 1 +20200513185040 ---> the connection_k2f_APP restart ---> 1 +20200513185050 ---> the connection_k2f_APP restart ---> 1 +20200513185100 ---> the connection_k2f_APP restart ---> 1 +20200513185110 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513185120 ---> the connection_k2f_APP restart ---> 1 +20200513185241 ---> the connection_k2f_APP restart ---> 1 +20200513185251 ---> the connection_k2f_APP restart ---> 1 +20200513185301 ---> the connection_k2f_APP restart ---> 1 +20200513185311 ---> the connection_k2f_APP restart ---> 1 +20200513185321 ---> the connection_k2f_APP restart ---> 1 +20200513185331 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513185405 ---> the connection_k2f_APP restart ---> 1 +20200513185611 ---> the connection_k2f_APP restart ---> 1 +20200513185621 ---> the connection_k2f_APP restart ---> 1 +20200513185701 ---> the connection_k2f_APP restart ---> 1 +20200513185741 ---> the connection_k2f_APP restart ---> 1 +20200513185752 ---> the connection_k2f_APP restart ---> 1 +20200513185802 ---> the connection_k2f_APP restart ---> 1 +20200513185812 ---> the connection_k2f_APP restart ---> 1 +20200513185822 ---> the connection_k2f_APP restart ---> 1 +20200513185832 ---> the connection_k2f_APP restart ---> 1 +20200513185922 ---> the connection_k2f_APP restart ---> 1 +20200513185932 ---> the connection_k2f_APP restart ---> 1 +20200513185942 ---> the connection_k2f_APP restart ---> 1 +20200513185952 ---> the connection_k2f_APP restart ---> 1 +20200513190002 ---> the connection_k2f_APP restart ---> 1 +20200513190012 ---> the connection_k2f_APP restart ---> 1 +20200513190022 ---> the connection_k2f_APP restart ---> 1 +20200513190032 ---> the connection_k2f_APP restart ---> 1 +20200513190152 ---> the connection_k2f_APP restart ---> 1 +20200513190202 ---> the connection_k2f_APP restart ---> 1 +20200513190212 ---> the connection_k2f_APP restart ---> 1 +20200513190222 ---> the connection_k2f_APP restart ---> 1 +20200513190232 ---> the connection_k2f_APP restart ---> 1 +20200513190242 ---> the connection_k2f_APP restart ---> 1 +20200513190252 ---> the connection_k2f_APP restart ---> 1 +20200513190302 ---> the connection_k2f_APP restart ---> 1 +20200513190312 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513190331 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513192014 ---> the connection_k2f_APP restart ---> 1 +############################################################## +20200513192212 ---> the connection_k2f_APP restart ---> 1 diff --git a/external-flume/conf/security/restart_log/restart_f2k.log b/external-flume/conf/security/restart_log/restart_f2k.log new file mode 100644 index 0000000..9d5fe99 --- /dev/null +++ b/external-flume/conf/security/restart_log/restart_f2k.log @@ -0,0 +1,213 @@ +############################################################## +20200509174727 ---> the f2k_APP restart ---> 0 +20200509174737 ---> the f2k_APP restart ---> 1 +############################################################## +############################################################## +############################################################## +############################################################## +############################################################## +20200511183420 ---> the f2k_APP restart ---> 1 +20200511183430 ---> the f2k_APP restart ---> 2 +############################################################## +20200511183639 ---> the f2k_APP restart ---> 1 +20200511183649 ---> the f2k_APP restart ---> 2 +20200511183659 ---> the f2k_APP restart ---> 3 +20200511183709 ---> the f2k_APP restart ---> 4 +20200511183719 ---> the f2k_APP restart ---> 5 +20200511183729 ---> the f2k_APP restart ---> 6 +20200511183739 ---> the f2k_APP restart ---> 7 +20200511183749 ---> the f2k_APP restart ---> 8 +20200511183759 ---> the f2k_APP restart ---> 9 +20200511183809 ---> the f2k_APP restart ---> 10 +20200511183819 ---> the f2k_APP restart ---> 11 +20200511183829 ---> the f2k_APP restart ---> 12 +20200511183839 ---> the f2k_APP restart ---> 13 +20200511183849 ---> the f2k_APP restart ---> 14 +20200511183859 ---> the f2k_APP restart ---> 15 +20200511183909 ---> the f2k_APP restart ---> 16 +20200511183919 ---> the f2k_APP restart ---> 17 +20200511183930 ---> the f2k_APP restart ---> 18 +20200511183940 ---> the f2k_APP restart ---> 19 +20200511183950 ---> the f2k_APP restart ---> 20 +20200511184000 ---> the f2k_APP restart ---> 21 +20200511184010 ---> the f2k_APP restart ---> 22 +20200511184020 ---> the f2k_APP restart ---> 23 +20200511184030 ---> the f2k_APP restart ---> 24 +20200511184040 ---> the f2k_APP restart ---> 25 +20200511184050 ---> the f2k_APP restart ---> 26 +20200511184100 ---> the f2k_APP restart ---> 27 +20200511184110 ---> the f2k_APP restart ---> 28 +20200511184120 ---> the f2k_APP restart ---> 29 +20200511184130 ---> the f2k_APP restart ---> 30 +20200511184140 ---> the f2k_APP restart ---> 31 +20200511184150 ---> the f2k_APP restart ---> 32 +20200511184200 ---> the f2k_APP restart ---> 33 +20200511184210 ---> the f2k_APP restart ---> 34 +20200511184220 ---> the f2k_APP restart ---> 35 +20200511184230 ---> the f2k_APP restart ---> 36 +20200511184240 ---> the f2k_APP restart ---> 37 +20200511184250 ---> the f2k_APP restart ---> 38 +20200511184300 ---> the f2k_APP restart ---> 39 +20200511184310 ---> the f2k_APP restart ---> 40 +20200511184321 ---> the f2k_APP restart ---> 41 +20200511184331 ---> the f2k_APP restart ---> 42 +20200511184341 ---> the f2k_APP restart ---> 43 +20200511184351 ---> the f2k_APP restart ---> 44 +20200511184401 ---> the f2k_APP restart ---> 45 +20200511184411 ---> the f2k_APP restart ---> 46 +20200511184421 ---> the f2k_APP restart ---> 47 +20200511184431 ---> the f2k_APP restart ---> 48 +20200511184441 ---> the f2k_APP restart ---> 49 +20200511184451 ---> the f2k_APP restart ---> 50 +20200511184501 ---> the f2k_APP restart ---> 51 +20200511184511 ---> the f2k_APP restart ---> 52 +20200511184521 ---> the f2k_APP restart ---> 53 +20200511184531 ---> the f2k_APP restart ---> 54 +20200511184541 ---> the f2k_APP restart ---> 55 +20200511184551 ---> the f2k_APP restart ---> 56 +20200511184601 ---> the f2k_APP restart ---> 57 +20200511184611 ---> the f2k_APP restart ---> 58 +20200511184621 ---> the f2k_APP restart ---> 59 +20200511184631 ---> the f2k_APP restart ---> 60 +20200511184641 ---> the f2k_APP restart ---> 61 +20200511184651 ---> the f2k_APP restart ---> 62 +20200511184701 ---> the f2k_APP restart ---> 63 +20200511184711 ---> the f2k_APP restart ---> 64 +20200511184721 ---> the f2k_APP restart ---> 65 +20200511184732 ---> the f2k_APP restart ---> 66 +20200511184742 ---> the f2k_APP restart ---> 67 +20200511184752 ---> the f2k_APP restart ---> 68 +20200511184802 ---> the f2k_APP restart ---> 69 +20200511184812 ---> the f2k_APP restart ---> 70 +20200511184822 ---> the f2k_APP restart ---> 71 +############################################################## +20200511185311 ---> the f2k_APP restart ---> 1 +############################################################## +20200511185421 ---> the f2k_APP restart ---> 1 +############################################################## +20200511185532 ---> the f2k_APP restart ---> 1 +20200511185542 ---> the f2k_APP restart ---> 1 +20200511185552 ---> the f2k_APP restart ---> 1 +############################################################## +20200511185706 ---> the f2k_APP restart ---> 1 +20200511185716 ---> the f2k_APP restart ---> 1+1 +20200511185726 ---> the f2k_APP restart ---> 2+1 +############################################################## +20200511185837 ---> the f2k_APP restart ---> 1 +############################################################## +20200511185938 ---> the f2k_APP restart ---> 1 +############################################################## +20200511190054 ---> the f2k_APP restart ---> 1 +20200511190104 ---> the f2k_APP restart ---> 1+1 +############################################################## +20200511190604 ---> the f2k_APP restart ---> 1 +20200511190614 ---> the f2k_APP restart ---> 0 +20200511190624 ---> the f2k_APP restart ---> 1 +20200511190634 ---> the f2k_APP restart ---> 0 +############################################################## +20200511190729 ---> the f2k_APP restart ---> 1 +20200511190739 ---> the f2k_APP restart ---> 1 +20200511190749 ---> the f2k_APP restart ---> 1 +############################################################## +20200511190950 ---> the f2k_APP restart ---> 1 +20200511191000 ---> the f2k_APP restart ---> 2 +20200511191010 ---> the f2k_APP restart ---> 1 +############################################################## +20200511191834 ---> the f2k_APP restart ---> 1 +20200511191844 ---> the f2k_APP restart ---> 2 +############################################################## +20200511192013 ---> the f2k_APP restart ---> 1 +############################################################## +20200511192134 ---> the f2k_APP restart ---> 1 +20200511192144 ---> the f2k_APP restart ---> 2 +20200511192154 ---> the f2k_APP restart ---> 1 +20200511192204 ---> the f2k_APP restart ---> 2 +############################################################## +############################################################## +############################################################## +############################################################## +20200511193630 ---> the f2k_APP restart ---> 1 +20200511193640 ---> the f2k_APP restart ---> 2 +############################################################## +20200512101249 ---> the f2k_APP restart ---> 1 +20200512101259 ---> the f2k_APP restart ---> 2 +20200512101309 ---> the f2k_APP restart ---> 1 +############################################################## +20200512101433 ---> the f2k_APP restart ---> 1 +############################################################## +20200512101537 ---> the f2k_APP restart ---> 1 +20200512101547 ---> the f2k_APP restart ---> 2 +20200512101557 ---> the f2k_APP restart ---> 3 +20200512101707 ---> the f2k_APP restart ---> 1 +############################################################## +20200512102643 ---> the f2k_APP restart ---> 1 +20200512102653 ---> the f2k_APP restart ---> 2 +############################################################## +20200512102723 ---> the f2k_APP restart ---> 1 +20200512102733 ---> the f2k_APP restart ---> 2 +20200512102743 ---> the f2k_APP restart ---> 3 +############################################################## +20200512102936 ---> the f2k_APP restart ---> 1 +20200512102946 ---> the f2k_APP restart ---> 2 +20200512102956 ---> the f2k_APP restart ---> 3 +############################################################## +20200512103250 ---> the f2k_APP restart ---> 1 +20200512103300 ---> the f2k_APP restart ---> 2 +20200512103310 ---> the f2k_APP restart ---> 3 +############################################################## +20200512113819 ---> the f2k_APP restart ---> 1 +20200512113829 ---> the f2k_APP restart ---> 2 +20200512113839 ---> the f2k_APP restart ---> 3 +############################################################## +############################################################## +20200512114211 ---> the f2k_APP restart ---> 1 +20200512114241 ---> the f2k_APP restart ---> 2 +############################################################## +20200512114550 ---> the f2k_APP restart ---> 1 +20200512114600 ---> the f2k_APP restart ---> 2 +20200512114610 ---> the f2k_APP restart ---> 3 +############################################################## +############################################################## +20200512115341 ---> the f2k_APP restart ---> 1 +20200512115351 ---> the f2k_APP restart ---> 2 +20200512115401 ---> the f2k_APP restart ---> 3 +20200512115452 ---> the f2k_APP restart ---> 1 +20200512115502 ---> the f2k_APP restart ---> 2 +20200512115512 ---> the f2k_APP restart ---> 3 +20200512115712 ---> the f2k_APP restart ---> 1 +20200512115722 ---> the f2k_APP restart ---> 2 +20200512115732 ---> the f2k_APP restart ---> 3 +############################################################## +20200512115933 ---> the f2k_APP restart ---> 1 +20200512115943 ---> the f2k_APP restart ---> 2 +20200512115953 ---> the f2k_APP restart ---> 3 +############################################################## +20200512134239 ---> the f2k_APP restart ---> 1 +20200512134249 ---> the f2k_APP restart ---> 2 +20200512134259 ---> the f2k_APP restart ---> 3 +20200512134509 ---> the f2k_APP restart ---> 1 +20200512134519 ---> the f2k_APP restart ---> 2 +20200512134530 ---> the f2k_APP restart ---> 3 +20200512140003 ---> the f2k_APP restart ---> 1 +20200512140013 ---> the f2k_APP restart ---> 2 +20200512141056 ---> the f2k_APP restart ---> 1 +20200512141106 ---> the f2k_APP restart ---> 2 +20200512141116 ---> the f2k_APP restart ---> 3 +20200512142500 ---> the f2k_APP restart ---> 1 +20200512142510 ---> the f2k_APP restart ---> 2 +20200512142941 ---> the f2k_APP restart ---> 1 +20200512142951 ---> the f2k_APP restart ---> 2 +20200512143001 ---> the f2k_APP restart ---> 3 +20200512143031 ---> the f2k_APP restart ---> 1 +20200512143041 ---> the f2k_APP restart ---> 2 +20200512143051 ---> the f2k_APP restart ---> 3 +20200512144224 ---> the f2k_APP restart ---> 1 +20200512144234 ---> the f2k_APP restart ---> 2 +20200512144314 ---> the f2k_APP restart ---> 1 +20200512144324 ---> the f2k_APP restart ---> 2 +20200512144355 ---> the f2k_APP restart ---> 1 +20200512144405 ---> the f2k_APP restart ---> 2 +20200512144415 ---> the f2k_APP restart ---> 3 +20200512144635 ---> the f2k_APP restart ---> 1 +20200512144645 ---> the f2k_APP restart ---> 2 +20200512144655 ---> the f2k_APP restart ---> 3 diff --git a/external-flume/conf/security/restart_log/restart_security_f2k.log b/external-flume/conf/security/restart_log/restart_security_f2k.log new file mode 100644 index 0000000..690385d --- /dev/null +++ b/external-flume/conf/security/restart_log/restart_security_f2k.log @@ -0,0 +1,134 @@ +############################################################## +20200512180055 ---> the security_f2k_APP restart ---> 1 +20200512180105 ---> the security_f2k_APP restart ---> 2 +############################################################## +20200512180108 ---> the security_f2k_APP restart ---> 1 +20200512180115 ---> the security_f2k_APP restart ---> 3 +20200512180118 ---> the security_f2k_APP restart ---> 2 +20200512180125 ---> the security_f2k_APP restart ---> 1 +20200512180128 ---> the security_f2k_APP restart ---> 3 +20200512180135 ---> the security_f2k_APP restart ---> 2 +20200512180138 ---> the security_f2k_APP restart ---> 1 +20200512180145 ---> the security_f2k_APP restart ---> 3 +20200512180148 ---> the security_f2k_APP restart ---> 2 +20200512180155 ---> the security_f2k_APP restart ---> 1 +20200512180158 ---> the security_f2k_APP restart ---> 3 +20200512180206 ---> the security_f2k_APP restart ---> 2 +20200512180208 ---> the security_f2k_APP restart ---> 1 +20200512180216 ---> the security_f2k_APP restart ---> 3 +20200512180218 ---> the security_f2k_APP restart ---> 2 +20200512180226 ---> the security_f2k_APP restart ---> 1 +20200512180228 ---> the security_f2k_APP restart ---> 3 +20200512180236 ---> the security_f2k_APP restart ---> 2 +20200512180238 ---> the security_f2k_APP restart ---> 1 +20200512180246 ---> the security_f2k_APP restart ---> 3 +20200512180248 ---> the security_f2k_APP restart ---> 2 +20200512180256 ---> the security_f2k_APP restart ---> 1 +20200512180258 ---> the security_f2k_APP restart ---> 3 +############################################################## +20200512180513 ---> the security_f2k_APP restart ---> 1 +20200512180523 ---> the security_f2k_APP restart ---> 2 +20200512180533 ---> the security_f2k_APP restart ---> 3 +############################################################## +20200512180536 ---> the security_f2k_APP restart ---> 1 +20200512180546 ---> the security_f2k_APP restart ---> 2 +20200512180556 ---> the security_f2k_APP restart ---> 3 +20200512180626 ---> the security_f2k_APP restart ---> 1 +20200512180637 ---> the security_f2k_APP restart ---> 2 +20200512180707 ---> the security_f2k_APP restart ---> 1 +20200512180717 ---> the security_f2k_APP restart ---> 2 +20200512180747 ---> the security_f2k_APP restart ---> 1 +20200512180757 ---> the security_f2k_APP restart ---> 2 +20200512180827 ---> the security_f2k_APP restart ---> 1 +20200512180837 ---> the security_f2k_APP restart ---> 2 +20200512180907 ---> the security_f2k_APP restart ---> 1 +20200512180917 ---> the security_f2k_APP restart ---> 2 +20200512180947 ---> the security_f2k_APP restart ---> 1 +20200512180957 ---> the security_f2k_APP restart ---> 2 +20200512181028 ---> the security_f2k_APP restart ---> 1 +20200512181038 ---> the security_f2k_APP restart ---> 2 +20200512181108 ---> the security_f2k_APP restart ---> 1 +20200512181118 ---> the security_f2k_APP restart ---> 2 +20200512181148 ---> the security_f2k_APP restart ---> 1 +20200512181158 ---> the security_f2k_APP restart ---> 2 +20200512181228 ---> the security_f2k_APP restart ---> 1 +20200512181238 ---> the security_f2k_APP restart ---> 2 +20200512181308 ---> the security_f2k_APP restart ---> 1 +20200512181318 ---> the security_f2k_APP restart ---> 2 +20200512181348 ---> the security_f2k_APP restart ---> 1 +20200512181358 ---> the security_f2k_APP restart ---> 2 +20200512181429 ---> the security_f2k_APP restart ---> 1 +20200512181439 ---> the security_f2k_APP restart ---> 2 +20200512181449 ---> the security_f2k_APP restart ---> 3 +20200512181519 ---> the security_f2k_APP restart ---> 1 +20200512181529 ---> the security_f2k_APP restart ---> 2 +20200512181539 ---> the security_f2k_APP restart ---> 3 +20200512181609 ---> the security_f2k_APP restart ---> 1 +20200512181619 ---> the security_f2k_APP restart ---> 2 +20200512181649 ---> the security_f2k_APP restart ---> 1 +20200512181659 ---> the security_f2k_APP restart ---> 2 +20200512181729 ---> the security_f2k_APP restart ---> 1 +20200512181739 ---> the security_f2k_APP restart ---> 2 +20200512181809 ---> the security_f2k_APP restart ---> 1 +20200512181820 ---> the security_f2k_APP restart ---> 2 +20200512181850 ---> the security_f2k_APP restart ---> 1 +20200512181900 ---> the security_f2k_APP restart ---> 2 +20200512181910 ---> the security_f2k_APP restart ---> 3 +20200512181920 ---> the security_f2k_APP restart ---> 1 +20200512181930 ---> the security_f2k_APP restart ---> 2 +20200512181940 ---> the security_f2k_APP restart ---> 3 +20200512181950 ---> the security_f2k_APP restart ---> 1 +20200512182000 ---> the security_f2k_APP restart ---> 2 +20200512182010 ---> the security_f2k_APP restart ---> 3 +20200512182020 ---> the security_f2k_APP restart ---> 1 +20200512182030 ---> the security_f2k_APP restart ---> 2 +20200512182040 ---> the security_f2k_APP restart ---> 3 +20200512182050 ---> the security_f2k_APP restart ---> 1 +20200512182100 ---> the security_f2k_APP restart ---> 2 +20200512182110 ---> the security_f2k_APP restart ---> 3 +20200512182120 ---> the security_f2k_APP restart ---> 1 +20200512182130 ---> the security_f2k_APP restart ---> 2 +20200512182140 ---> the security_f2k_APP restart ---> 3 +20200512182150 ---> the security_f2k_APP restart ---> 1 +20200512182200 ---> the security_f2k_APP restart ---> 2 +20200512182211 ---> the security_f2k_APP restart ---> 3 +############################################################## +20200512183204 ---> the security_f2k_APP restart ---> 1 +20200512183215 ---> the security_f2k_APP restart ---> 2 +20200512183225 ---> the security_f2k_APP restart ---> 3 +20200512183255 ---> the security_f2k_APP restart ---> 1 +20200512183305 ---> the security_f2k_APP restart ---> 2 +20200512183335 ---> the security_f2k_APP restart ---> 1 +20200512183345 ---> the security_f2k_APP restart ---> 2 +20200512183415 ---> the security_f2k_APP restart ---> 1 +20200512183425 ---> the security_f2k_APP restart ---> 2 +20200512183455 ---> the security_f2k_APP restart ---> 1 +20200512183505 ---> the security_f2k_APP restart ---> 2 +20200512183535 ---> the security_f2k_APP restart ---> 1 +20200512183545 ---> the security_f2k_APP restart ---> 2 +20200512183615 ---> the security_f2k_APP restart ---> 1 +20200512183626 ---> the security_f2k_APP restart ---> 2 +20200512183656 ---> the security_f2k_APP restart ---> 1 +20200512183706 ---> the security_f2k_APP restart ---> 2 +20200512183716 ---> the security_f2k_APP restart ---> 3 +20200512183726 ---> the security_f2k_APP restart ---> 1 +20200512183736 ---> the security_f2k_APP restart ---> 2 +20200512183746 ---> the security_f2k_APP restart ---> 3 +20200512183756 ---> the security_f2k_APP restart ---> 1 +20200512183806 ---> the security_f2k_APP restart ---> 2 +20200512183836 ---> the security_f2k_APP restart ---> 1 +20200512183846 ---> the security_f2k_APP restart ---> 2 +20200512183916 ---> the security_f2k_APP restart ---> 1 +20200512183926 ---> the security_f2k_APP restart ---> 2 +20200512183936 ---> the security_f2k_APP restart ---> 3 +20200512184006 ---> the security_f2k_APP restart ---> 1 +20200512184016 ---> the security_f2k_APP restart ---> 2 +20200512184027 ---> the security_f2k_APP restart ---> 3 +20200512184037 ---> the security_f2k_APP restart ---> 1 +20200512184047 ---> the security_f2k_APP restart ---> 2 +20200512184057 ---> the security_f2k_APP restart ---> 3 +20200512184107 ---> the security_f2k_APP restart ---> 1 +20200512184117 ---> the security_f2k_APP restart ---> 2 +20200512184127 ---> the security_f2k_APP restart ---> 3 +20200512184157 ---> the security_f2k_APP restart ---> 1 +20200512184207 ---> the security_f2k_APP restart ---> 2 diff --git a/external-flume/conf/security/security_k2f.properties b/external-flume/conf/security/security_k2f.properties new file mode 100644 index 0000000..b60c2e7 --- /dev/null +++ b/external-flume/conf/security/security_k2f.properties @@ -0,0 +1,22 @@ +#为source channel sink起名 +security.channels = c1 +security.sinks = k1 +#指定channel为kafka channel,省略source使得效率更高 +security.channels.c1.type = org.apache.flume.channel.kafka.KafkaChannel +security.channels.c1.kafka.bootstrap.servers = 192.168.40.152:9092 +security.channels.c1.kafka.topic = SECURITY-EVENT-LOG +security.channels.c1.kafka.consumer.group.id = lxk_0514 +security.channels.c1.kafka.consumer.auto.offset.reset = latest +security.channels.c1.kafka.consumer.enable.auto.commit = true +security.channels.c1.kafka.consumer.fetch.max.wait.ms = 1000 +security.channels.c1.kafka.consumer.fetch.min.bytes = 1048576 +security.channels.c1.parseAsFlumeEvent = false + +#原始file roll sink,将数据写到本机磁盘 +security.sinks.k1.type = file_roll +security.sinks.k1.channel = c1 +security.sinks.k1.sink.pathManager = default +security.sinks.k1.sink.pathManager.extension = dat +security.sinks.k1.sink.pathManager.prefix = test- +security.sinks.k1.sink.rollInterval = 30 +security.sinks.k1.sink.directory = /home/192.168.60.101/SECURITY-EVENT-LOG diff --git a/external-flume/conf/security/start_flume.sh b/external-flume/conf/security/start_flume.sh new file mode 100755 index 0000000..01dde8b --- /dev/null +++ b/external-flume/conf/security/start_flume.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +BASE_DIR=$(cd $(dirname $0); pwd) +DAE_FILE=$(cd $(dirname $0); ls | grep dae*) +nohup ${BASE_DIR}/$DAE_FILE >/dev/null 2>&1 & diff --git a/external-flume/conf/security/stop_flume.sh b/external-flume/conf/security/stop_flume.sh new file mode 100755 index 0000000..e4417aa --- /dev/null +++ b/external-flume/conf/security/stop_flume.sh @@ -0,0 +1,28 @@ +#!/bin/sh + +#DAE_NAME=dae_k2ha.sh +#JAR_NAME=k2ha.properties +BASE_DIR=$(cd $(dirname $0); pwd) + +DAE_NAME=`ls ${BASE_DIR} | grep ^dae_* | grep .sh$` +JAR_NAME=`ls ${BASE_DIR} | grep ^f2* | grep .properties$` + +NUM1=`ps -ef | grep ${DAE_NAME} | grep -v grep | wc -l` +pids1=$(ps -ef | grep ${DAE_NAME} | grep -v grep | awk '{print $2}') +if [ "${NUM1}" -ge "1" ];then + for pid1 in $pids1 + do + kill -9 $pid1 + echo 'killed '${DAE_NAME}' process-->'$pid1 + done +fi + +NUM2=`ps -ef | grep ${JAR_NAME} | grep -v grep | wc -l` +pids2=$(ps -ef | grep ${JAR_NAME} | grep -v grep | awk '{print $2}') +if [ "${NUM2}" -ge "1" ];then + for pid2 in $pids2 + do + kill -9 $pid2 + echo 'killed '${JAR_NAME}' process-->'$pid2 + done +fi diff --git a/external-flume/docker-compose.yml b/external-flume/docker-compose.yml new file mode 100644 index 0000000..c83cc01 --- /dev/null +++ b/external-flume/docker-compose.yml @@ -0,0 +1,16 @@ +version: '2' + +services: + flume: + image: 192.168.40.153:9080/tsg/flume:1.9.0 + restart: always + container_name: flume + volumes: + - "/home/tsg3.0-volumes/flume/conf:/home/apache-flume-1.9.0-bin/conf" + - "/home/tsg3.0-volumes/flume/flumeController:/home/apache-flume-1.9.0-bin/flumeController" + - "/home/tsg3.0-volumes/flume/json:/home/test" + - "/home/tsg3.0-volumes/flume/data:/home/192.168.60.101" + - "/home/tsg3.0-volumes/flume/flumeMonitor:/home/apache-flume-1.9.0-bin/flumeMonitor" + - "/etc/localtime:/etc/localtime:ro" + - "/etc/timezone:/etc/timezone:ro" + network_mode: "host"