ansible test3
This commit is contained in:
30
bigdata-scripts_test3/roles/kafka/files/dae-kafka.sh
Executable file
30
bigdata-scripts_test3/roles/kafka/files/dae-kafka.sh
Executable file
@@ -0,0 +1,30 @@
|
||||
#!/bin/sh
|
||||
|
||||
#修改BASE_DIR为安装的路径
|
||||
#启动命令端口可自行指定
|
||||
#JMX_PORT=9191 nohup $BASE_DIR/bin/kafka-server-start.sh $BASE_DIR/config/server.properties > /dev/null 2>&1 &
|
||||
|
||||
PRO_NAME=Kafka
|
||||
BASE_DIR=$1
|
||||
VERSION="kafka_2.11-1.0.0"
|
||||
|
||||
#string=`cat $BASE_DIR/$VERSION/config/server.properties | grep broker.id`
|
||||
#array=(${string//=/ })
|
||||
#echo ${array[1]}
|
||||
#ssh root@192.168.40.123 "source /etc/profile ; zkCli.sh ls /kafka/brokers/ids | grep -v 2181 | grep 4 | wc -l"
|
||||
source /etc/profile
|
||||
while true ; do
|
||||
NUM=`jps | grep -w ${PRO_NAME} | grep -v grep |wc -l`
|
||||
|
||||
if [ "${NUM}" -lt "1" ];then
|
||||
JMX_PORT=9191 nohup $BASE_DIR/$VERSION/bin/kafka-server-start.sh $BASE_DIR/$VERSION/config/server.properties > /dev/null 2>&1 &
|
||||
OLD_NUM=`cat $BASE_DIR/$VERSION/logs/restart_sum.log`
|
||||
RESTART_NUM=`expr $OLD_NUM + 1`
|
||||
echo $RESTART_NUM > $BASE_DIR/$VERSION/logs/restart_sum.log
|
||||
echo "`date "+%Y-%m-%d %H:%M:%S"` - kafka服务启动/异常重启 - 重启次数 -> $RESTART_NUM" >> $BASE_DIR/$VERSION/restart.log
|
||||
#大于1,杀掉所有进程,重启
|
||||
elif [ "${NUM}" -gt "1" ];then
|
||||
killall -9 ${PRO_NAME}
|
||||
fi
|
||||
sleep 60
|
||||
done
|
||||
BIN
bigdata-scripts_test3/roles/kafka/files/kafka-message.tar.gz
Normal file
BIN
bigdata-scripts_test3/roles/kafka/files/kafka-message.tar.gz
Normal file
Binary file not shown.
BIN
bigdata-scripts_test3/roles/kafka/files/kafka_2.11-1.0.0.tgz
Normal file
BIN
bigdata-scripts_test3/roles/kafka/files/kafka_2.11-1.0.0.tgz
Normal file
Binary file not shown.
14
bigdata-scripts_test3/roles/kafka/files/kflogdelete.sh
Executable file
14
bigdata-scripts_test3/roles/kafka/files/kflogdelete.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/bin/sh
|
||||
|
||||
#只保留最近三天的日志,如需要多保留几天修改8行最后 -n days
|
||||
#将此脚本加载到系统定时任务中 /etc/crontab
|
||||
#脚本会读取环境变量,固需要配置环境变量。
|
||||
#. /etc/profile
|
||||
|
||||
day=$(date +"%Y-%m-%d" -d "-3 days")
|
||||
|
||||
kafka=`jps | grep Kafka | wc -l`
|
||||
if [[ $kafka = "1" ]];then
|
||||
rm -rf $KAFKA_HOME/logs/*.$day*
|
||||
fi
|
||||
|
||||
95
bigdata-scripts_test3/roles/kafka/files/log4j.properties
Normal file
95
bigdata-scripts_test3/roles/kafka/files/log4j.properties
Normal file
@@ -0,0 +1,95 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Unspecified loggers and loggers with additivity=true output to server.log and stdout
|
||||
# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise
|
||||
log4j.rootLogger=INFO, stdout, kafkaAppender
|
||||
|
||||
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
|
||||
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
|
||||
|
||||
log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender
|
||||
log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd
|
||||
log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log
|
||||
log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
|
||||
|
||||
log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender
|
||||
log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd
|
||||
log4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log
|
||||
log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
|
||||
|
||||
log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender
|
||||
log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd
|
||||
log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log
|
||||
log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
|
||||
|
||||
log4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender
|
||||
log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd
|
||||
log4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log
|
||||
log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
|
||||
|
||||
log4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender
|
||||
log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd
|
||||
log4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log
|
||||
log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
|
||||
|
||||
log4j.appender.authorizerAppender=org.apache.log4j.DailyRollingFileAppender
|
||||
log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd
|
||||
log4j.appender.authorizerAppender.File=${kafka.logs.dir}/kafka-authorizer.log
|
||||
log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
|
||||
|
||||
# Change the two lines below to adjust ZK client logging
|
||||
log4j.logger.org.I0Itec.zkclient.ZkClient=INFO
|
||||
log4j.logger.org.apache.zookeeper=INFO
|
||||
|
||||
# Change the two lines below to adjust the general broker logging level (output to server.log and stdout)
|
||||
log4j.logger.kafka=INFO
|
||||
log4j.logger.org.apache.kafka=INFO
|
||||
|
||||
# Change to DEBUG or TRACE to enable request logging
|
||||
log4j.logger.kafka.request.logger=WARN, requestAppender
|
||||
log4j.additivity.kafka.request.logger=false
|
||||
|
||||
# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output
|
||||
# related to the handling of requests
|
||||
#log4j.logger.kafka.network.Processor=TRACE, requestAppender
|
||||
#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender
|
||||
#log4j.additivity.kafka.server.KafkaApis=false
|
||||
#log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender
|
||||
log4j.logger.kafka.network.RequestChannel$=INFO, requestAppender
|
||||
log4j.additivity.kafka.network.RequestChannel$=false
|
||||
|
||||
#log4j.logger.kafka.controller=TRACE, controllerAppender
|
||||
log4j.logger.kafka.controller=INFO, controllerAppender
|
||||
log4j.additivity.kafka.controller=false
|
||||
|
||||
log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender
|
||||
log4j.additivity.kafka.log.LogCleaner=false
|
||||
|
||||
#log4j.logger.state.change.logger=TRACE, stateChangeAppender
|
||||
log4j.logger.state.change.logger=INFO, stateChangeAppender
|
||||
log4j.additivity.state.change.logger=false
|
||||
|
||||
# Access denials are logged at INFO level, change to DEBUG to also log allowed accesses
|
||||
log4j.logger.kafka.authorizer.logger=INFO, authorizerAppender
|
||||
log4j.additivity.kafka.authorizer.logger=false
|
||||
|
||||
Reference in New Issue
Block a user