修复EAL4中低级警告版本

This commit is contained in:
qidaijie
2021-03-25 14:27:41 +08:00
parent 85f8978887
commit 0023a1455f
18 changed files with 124 additions and 110 deletions

View File

@@ -137,7 +137,7 @@
<groupId>org.apache.storm</groupId> <groupId>org.apache.storm</groupId>
<artifactId>storm-core</artifactId> <artifactId>storm-core</artifactId>
<version>${storm.version}</version> <version>${storm.version}</version>
<scope>provided</scope> <!--<scope>provided</scope>-->
<exclusions> <exclusions>
<exclusion> <exclusion>
<artifactId>slf4j-log4j12</artifactId> <artifactId>slf4j-log4j12</artifactId>

View File

@@ -13,7 +13,7 @@ zookeeper.servers=192.168.44.12:2181
hbase.zookeeper.servers=192.168.44.12:2181 hbase.zookeeper.servers=192.168.44.12:2181
#定位库地址 #定位库地址
ip.library=D:\\K18-Phase2\\tsgSpace\\dat\\dll\\ ip.library=D:\\K18-Phase2\\tsgSpace\\dat\\
#ip.library=/home/bigdata/topology/dat/ #ip.library=/home/bigdata/topology/dat/
#网关的schema位置 #网关的schema位置

View File

@@ -4,6 +4,7 @@ import com.zdjizhi.common.FlowWriteConfig;
import cn.hutool.log.Log; import cn.hutool.log.Log;
import cn.hutool.log.LogFactory; import cn.hutool.log.LogFactory;
import com.zdjizhi.utils.StringUtil; import com.zdjizhi.utils.StringUtil;
import com.zdjizhi.utils.exception.StreamCompletionException;
import org.apache.storm.task.TopologyContext; import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.BasicOutputCollector; import org.apache.storm.topology.BasicOutputCollector;
import org.apache.storm.topology.OutputFieldsDeclarer; import org.apache.storm.topology.OutputFieldsDeclarer;
@@ -38,9 +39,8 @@ public class CompletionBolt extends BaseBasicBolt {
if (StringUtil.isNotBlank(message)) { if (StringUtil.isNotBlank(message)) {
basicOutputCollector.emit(new Values(dealCommonMessage(message))); basicOutputCollector.emit(new Values(dealCommonMessage(message)));
} }
} catch (Exception e) { } catch (StreamCompletionException e) {
logger.error(FlowWriteConfig.KAFKA_TOPIC + "接收/解析过程出现异常"); logger.error(FlowWriteConfig.KAFKA_TOPIC + "接收/解析过程出现异常");
e.printStackTrace();
} }
} }

View File

@@ -1,6 +1,7 @@
package com.zdjizhi.bolt.kafka; package com.zdjizhi.bolt.kafka;
import com.zdjizhi.common.FlowWriteConfig; import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.exception.StreamCompletionException;
import com.zdjizhi.utils.kafka.KafkaLogSend; import com.zdjizhi.utils.kafka.KafkaLogSend;
import com.zdjizhi.utils.system.TupleUtils; import com.zdjizhi.utils.system.TupleUtils;
import cn.hutool.log.Log; import cn.hutool.log.Log;
@@ -52,9 +53,8 @@ public class LogSendBolt extends BaseBasicBolt {
list.clear(); list.clear();
} }
} }
} catch (Exception e) { } catch (StreamCompletionException e) {
logger.error(FlowWriteConfig.KAFKA_TOPIC + "日志发送Kafka过程出现异常"); logger.error(FlowWriteConfig.KAFKA_TOPIC + "日志发送Kafka过程出现异常");
e.printStackTrace();
} }
} }

View File

@@ -1,8 +1,10 @@
package com.zdjizhi.spout; package com.zdjizhi.spout;
import cn.hutool.core.thread.ThreadUtil;
import com.zdjizhi.common.FlowWriteConfig; import com.zdjizhi.common.FlowWriteConfig;
import cn.hutool.log.Log; import cn.hutool.log.Log;
import cn.hutool.log.LogFactory; import cn.hutool.log.LogFactory;
import com.zdjizhi.utils.exception.StreamCompletionException;
import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.ConsumerRecords;
@@ -71,13 +73,12 @@ public class CustomizedKafkaSpout extends BaseRichSpout {
try { try {
// TODO Auto-generated method stub // TODO Auto-generated method stub
ConsumerRecords<String, String> records = consumer.poll(10000L); ConsumerRecords<String, String> records = consumer.poll(10000L);
Thread.sleep(FlowWriteConfig.TOPOLOGY_SPOUT_SLEEP_TIME); ThreadUtil.sleep(FlowWriteConfig.TOPOLOGY_SPOUT_SLEEP_TIME);
for (ConsumerRecord<String, String> record : records) { for (ConsumerRecord<String, String> record : records) {
this.collector.emit(new Values(record.value())); this.collector.emit(new Values(record.value()));
} }
} catch (Exception e) { } catch (StreamCompletionException e) {
logger.error("KafkaSpout发送消息出现异常!", e); logger.error("KafkaSpout发送消息出现异常!", e);
e.printStackTrace();
} }
} }

View File

@@ -7,6 +7,7 @@ import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.spout.CustomizedKafkaSpout; import com.zdjizhi.spout.CustomizedKafkaSpout;
import cn.hutool.log.Log; import cn.hutool.log.Log;
import cn.hutool.log.LogFactory; import cn.hutool.log.LogFactory;
import com.zdjizhi.utils.exception.StreamCompletionException;
import org.apache.storm.Config; import org.apache.storm.Config;
import org.apache.storm.generated.AlreadyAliveException; import org.apache.storm.generated.AlreadyAliveException;
import org.apache.storm.generated.AuthorizationException; import org.apache.storm.generated.AuthorizationException;
@@ -70,8 +71,9 @@ public class LogFlowWriteTopology {
} }
} }
public static void main(String[] args) throws Exception { public static void main(String[] args) {
LogFlowWriteTopology flowWriteTopology; LogFlowWriteTopology flowWriteTopology;
try {
boolean runLocally = true; boolean runLocally = true;
int size = 2; int size = 2;
if (args.length >= size && FlowWriteConfig.MODEL.equalsIgnoreCase(args[1])) { if (args.length >= size && FlowWriteConfig.MODEL.equalsIgnoreCase(args[1])) {
@@ -90,5 +92,8 @@ public class LogFlowWriteTopology {
logger.info("执行远程部署模式..."); logger.info("执行远程部署模式...");
flowWriteTopology.runRemotely(); flowWriteTopology.runRemotely();
} }
} catch (StreamCompletionException | InterruptedException | InvalidTopologyException | AlreadyAliveException | AuthorizationException e) {
logger.error("Topology Start ERROR! message is:" + e);
}
} }
} }

View File

@@ -1,6 +1,7 @@
package com.zdjizhi.topology; package com.zdjizhi.topology;
import cn.hutool.core.thread.ThreadUtil;
import org.apache.storm.Config; import org.apache.storm.Config;
import org.apache.storm.LocalCluster; import org.apache.storm.LocalCluster;
import org.apache.storm.StormSubmitter; import org.apache.storm.StormSubmitter;
@@ -21,7 +22,7 @@ public final class StormRunner{
LocalCluster localCluster = new LocalCluster(); LocalCluster localCluster = new LocalCluster();
localCluster.submitTopology(topologyName, conf, builder.createTopology()); localCluster.submitTopology(topologyName, conf, builder.createTopology());
Thread.sleep((long) runtimeInSeconds * MILLS_IN_SEC); ThreadUtil.sleep((long) runtimeInSeconds * MILLS_IN_SEC);
localCluster.shutdown(); localCluster.shutdown();
} }

View File

@@ -0,0 +1,18 @@
package com.zdjizhi.utils.exception;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.exception
* @Description:
* @date 2021/3/2510:14
*/
public class StreamCompletionException extends RuntimeException {
public StreamCompletionException(Exception e) {
super(e);
}
public StreamCompletionException(String e) {
super(e);
}
}

View File

@@ -1,6 +1,7 @@
package com.zdjizhi.utils.general; package com.zdjizhi.utils.general;
import com.zdjizhi.common.FlowWriteConfig; import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.exception.StreamCompletionException;
import com.zdjizhi.utils.zookeeper.DistributedLock; import com.zdjizhi.utils.zookeeper.DistributedLock;
import com.zdjizhi.utils.zookeeper.ZookeeperUtils; import com.zdjizhi.utils.zookeeper.ZookeeperUtils;
import cn.hutool.log.Log; import cn.hutool.log.Log;
@@ -126,8 +127,7 @@ public class SnowflakeId {
} }
this.workerId = tmpWorkerId; this.workerId = tmpWorkerId;
this.dataCenterId = dataCenterIdNum; this.dataCenterId = dataCenterIdNum;
} catch (Exception e) { } catch (StreamCompletionException e) {
e.printStackTrace();
logger.error("This is not usual error!!!===>>>" + e + "<<<==="); logger.error("This is not usual error!!!===>>>" + e + "<<<===");
}finally { }finally {
lock.unlock(); lock.unlock();

View File

@@ -2,6 +2,7 @@ package com.zdjizhi.utils.general;
import com.zdjizhi.common.FlowWriteConfig; import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.exception.StreamCompletionException;
import com.zdjizhi.utils.json.JsonParseUtil; import com.zdjizhi.utils.json.JsonParseUtil;
import cn.hutool.log.Log; import cn.hutool.log.Log;
import cn.hutool.log.LogFactory; import cn.hutool.log.LogFactory;
@@ -79,9 +80,8 @@ public class TransFormUtils {
functionSet(function, object, appendToKeyName, appendTo, name, param); functionSet(function, object, appendToKeyName, appendTo, name, param);
} }
return JSONObject.toJSONString(object); return JSONObject.toJSONString(object);
} catch (Exception e) { } catch (StreamCompletionException e) {
logger.error(FlowWriteConfig.KAFKA_TOPIC + "日志预处理过程出现异常"); logger.error(FlowWriteConfig.KAFKA_TOPIC + "日志预处理过程出现异常");
e.printStackTrace();
return ""; return "";
} }
} }

View File

@@ -1,9 +1,9 @@
package com.zdjizhi.utils.general; package com.zdjizhi.utils.general;
import cn.hutool.core.codec.Base64;
import com.zdjizhi.common.FlowWriteConfig; import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.hbase.HBaseUtils; import com.zdjizhi.utils.hbase.HBaseUtils;
import com.zdjizhi.utils.json.JsonParseUtil; import com.zdjizhi.utils.json.JsonParseUtil;
import com.zdjizhi.utils.system.LogPrintUtil;
import cn.hutool.log.Log; import cn.hutool.log.Log;
import cn.hutool.log.LogFactory; import cn.hutool.log.LogFactory;
import com.jayway.jsonpath.InvalidPathException; import com.jayway.jsonpath.InvalidPathException;
@@ -12,7 +12,6 @@ import com.zdjizhi.utils.Encodes;
import com.zdjizhi.utils.FormatUtils; import com.zdjizhi.utils.FormatUtils;
import com.zdjizhi.utils.IpLookup; import com.zdjizhi.utils.IpLookup;
import com.zdjizhi.utils.StringUtil; import com.zdjizhi.utils.StringUtil;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@@ -24,8 +23,6 @@ class TransFunction {
private static final Log logger = LogFactory.get(); private static final Log logger = LogFactory.get();
private static final Log log = LogFactory.get();
private static final Pattern PATTERN = Pattern.compile("[0-9]*"); private static final Pattern PATTERN = Pattern.compile("[0-9]*");
/** /**
@@ -78,7 +75,7 @@ class TransFunction {
static String radiusMatch(String ip) { static String radiusMatch(String ip) {
String account = HBaseUtils.getAccount(ip.trim()); String account = HBaseUtils.getAccount(ip.trim());
if (StringUtil.isBlank(account)) { if (StringUtil.isBlank(account)) {
log.warn("HashMap get account is null, Ip is :{}", ip); logger.warn("HashMap get account is null, Ip is :{}", ip);
} }
return account; return account;
} }
@@ -93,8 +90,7 @@ class TransFunction {
try { try {
return FormatUtils.getTopPrivateDomain(domain); return FormatUtils.getTopPrivateDomain(domain);
} catch (StringIndexOutOfBoundsException outException) { } catch (StringIndexOutOfBoundsException outException) {
log.error("解析顶级域名异常,异常域名:{}" + domain); logger.error("解析顶级域名异常,异常域名:{}" + domain);
logger.error(LogPrintUtil.print(outException));
return ""; return "";
} }
} }
@@ -111,13 +107,13 @@ class TransFunction {
try { try {
if (StringUtil.isNotBlank(message)) { if (StringUtil.isNotBlank(message)) {
if (StringUtil.isNotBlank(charset)) { if (StringUtil.isNotBlank(charset)) {
result = Encodes.decodeBase64String(message, charset); result = Base64.decodeStr(message, charset);
} else { } else {
result = Encodes.decodeBase64String(message, FlowWriteConfig.MAIL_DEFAULT_CHARSET); result = Base64.decodeStr(message, FlowWriteConfig.MAIL_DEFAULT_CHARSET);
} }
} }
} catch (Exception e) { } catch (RuntimeException rune) {
logger.error("解析 Base64 异常,异常信息:" + LogPrintUtil.print(e)); logger.error("解析 Base64 异常,异常信息:" + rune);
} }
return result; return result;
} }
@@ -137,7 +133,7 @@ class TransFunction {
flattenResult = read.get(0); flattenResult = read.get(0);
} }
} catch (ClassCastException | InvalidPathException e) { } catch (ClassCastException | InvalidPathException e) {
log.error("设备标签解析异常,[ " + expr + " ]解析表达式错误" + LogPrintUtil.print(e)); logger.error("设备标签解析异常,[ " + expr + " ]解析表达式错误" + e);
} }
return flattenResult; return flattenResult;
} }
@@ -188,8 +184,8 @@ class TransFunction {
} }
} }
} }
} catch (Exception e) { } catch (RuntimeException e) {
logger.error("IF 函数执行异常,异常信息:" + LogPrintUtil.print(e)); logger.error("IF 函数执行异常,异常信息:" + e);
} }
return null; return null;
} }
@@ -208,8 +204,8 @@ class TransFunction {
} else { } else {
return param; return param;
} }
} catch (Exception e) { } catch (RuntimeException e) {
logger.error("SetValue 函数异常,异常信息:" + LogPrintUtil.print(e)); logger.error("SetValue 函数异常,异常信息:" + e);
} }
return null; return null;
} }

View File

@@ -68,10 +68,8 @@ public class HBaseUtils {
logger.warn("HBaseUtils get HBase connection,now to getAll()."); logger.warn("HBaseUtils get HBase connection,now to getAll().");
} catch (IOException ioe) { } catch (IOException ioe) {
logger.error("HBaseUtils getHbaseConn() IOException===>{" + ioe + "}<==="); logger.error("HBaseUtils getHbaseConn() IOException===>{" + ioe + "}<===");
ioe.printStackTrace(); } catch (RuntimeException e) {
} catch (Exception e) {
logger.error("HBaseUtils getHbaseConn() Exception===>{" + e + "}<==="); logger.error("HBaseUtils getHbaseConn() Exception===>{" + e + "}<===");
e.printStackTrace();
} }
} }
@@ -122,10 +120,8 @@ public class HBaseUtils {
time = endTime; time = endTime;
} catch (IOException ioe) { } catch (IOException ioe) {
logger.error("HBaseUtils timestampsFilter is IOException===>{" + ioe + "}<==="); logger.error("HBaseUtils timestampsFilter is IOException===>{" + ioe + "}<===");
ioe.printStackTrace(); } catch (RuntimeException e) {
} catch (Exception e) {
logger.error("HBaseUtils timestampsFilter is Exception===>{" + e + "}<==="); logger.error("HBaseUtils timestampsFilter is Exception===>{" + e + "}<===");
e.printStackTrace();
} finally { } finally {
if (scanner != null) { if (scanner != null) {
scanner.close(); scanner.close();
@@ -134,7 +130,7 @@ public class HBaseUtils {
try { try {
table.close(); table.close();
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); logger.error("HBase Table Close ERROR! Exception message is:" + e);
} }
} }
} }
@@ -160,10 +156,8 @@ public class HBaseUtils {
scanner.close(); scanner.close();
} catch (IOException ioe) { } catch (IOException ioe) {
logger.error("HBaseUtils getAll() is IOException===>{" + ioe + "}<==="); logger.error("HBaseUtils getAll() is IOException===>{" + ioe + "}<===");
ioe.printStackTrace(); } catch (RuntimeException e) {
} catch (Exception e) {
logger.error("HBaseUtils getAll() is Exception===>{" + e + "}<==="); logger.error("HBaseUtils getAll() is Exception===>{" + e + "}<===");
e.printStackTrace();
} }
} }
@@ -179,26 +173,11 @@ public class HBaseUtils {
public void run() { public void run() {
try { try {
change(); change();
} catch (Exception e) { } catch (RuntimeException e) {
logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<==="); logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<===");
e.printStackTrace();
} }
} }
}, 1, FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS, TimeUnit.SECONDS); }, 1, FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS, TimeUnit.SECONDS);
//
// Timer timer = new Timer();
// timer.scheduleAtFixedRate(new TimerTask() {
// @Override
// public void run() {
// try {
// change();
// } catch (Exception e) {
// logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<===");
// e.printStackTrace();
// }
// }
// }, 1, 1000 * FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS);//起始1ms,以后每隔60s
} }

View File

@@ -1,6 +1,5 @@
package com.zdjizhi.utils.http; package com.zdjizhi.utils.http;
import com.zdjizhi.utils.system.LogPrintUtil;
import cn.hutool.log.Log; import cn.hutool.log.Log;
import cn.hutool.log.LogFactory; import cn.hutool.log.LogFactory;
import org.apache.http.HttpEntity; import org.apache.http.HttpEntity;
@@ -19,46 +18,60 @@ import java.io.InputStreamReader;
* @author qidaijie * @author qidaijie
*/ */
public class HttpClientUtil { public class HttpClientUtil {
// private static final int MAX_STR_LEN = 512000;
private static final Log logger = LogFactory.get(); private static final Log logger = LogFactory.get();
/** /**
* 请求网关获取schema * 请求网关获取schema
*
* @param http 网关url * @param http 网关url
* @return schema * @return schema
*/ */
public static String requestByGetMethod(String http) { public static String requestByGetMethod(String http) {
CloseableHttpClient httpClient = HttpClients.createDefault(); CloseableHttpClient httpClient = HttpClients.createDefault();
StringBuilder entityStringBuilder = null; StringBuilder entityStringBuilder;
HttpGet get = new HttpGet(http); HttpGet get = new HttpGet(http);
BufferedReader bufferedReader = null; BufferedReader bufferedReader = null;
try (CloseableHttpResponse httpResponse = httpClient.execute(get)) { CloseableHttpResponse httpResponse = null;
try {
httpResponse = httpClient.execute(get);
HttpEntity entity = httpResponse.getEntity(); HttpEntity entity = httpResponse.getEntity();
entityStringBuilder = new StringBuilder(); entityStringBuilder = new StringBuilder();
if (null != entity) { if (null != entity) {
bufferedReader = new BufferedReader(new InputStreamReader(httpResponse.getEntity().getContent(), "UTF-8"), 8 * 1024); bufferedReader = new BufferedReader(new InputStreamReader(httpResponse.getEntity().getContent(), "UTF-8"), 8 * 1024);
String line; int intC;
while ((line = bufferedReader.readLine()) != null) { while ((intC = bufferedReader.read()) != -1) {
entityStringBuilder.append(line); char c = (char) intC;
if (c == '\n') {
break;
} }
entityStringBuilder.append(c);
} }
} catch (Exception e) {
logger.error(LogPrintUtil.print(e)); return entityStringBuilder.toString();
}
} catch (IOException e) {
logger.error("Get Schema from Query engine ERROR! Exception message is:" + e);
} finally { } finally {
if (httpClient != null) { if (httpClient != null) {
try { try {
httpClient.close(); httpClient.close();
} catch (IOException e) { } catch (IOException e) {
logger.error(LogPrintUtil.print(e)); logger.error("Close HTTP Client ERROR! Exception messgae is:" + e);
}
}
if (httpResponse != null) {
try {
httpResponse.close();
} catch (IOException e) {
logger.error("Close httpResponse ERROR! Exception messgae is:" + e);
} }
} }
if (bufferedReader != null) { if (bufferedReader != null) {
// bufferedReader.close();
org.apache.commons.io.IOUtils.closeQuietly(bufferedReader); org.apache.commons.io.IOUtils.closeQuietly(bufferedReader);
} }
} }
return entityStringBuilder.toString(); return "";
} }
} }

View File

@@ -135,13 +135,8 @@ public class JsonParseUtil {
if (checkKeepField(filedStr)) { if (checkKeepField(filedStr)) {
String name = JsonPath.read(filedStr, "$.name").toString(); String name = JsonPath.read(filedStr, "$.name").toString();
String type = JsonPath.read(filedStr, "$.type").toString(); String type = JsonPath.read(filedStr, "$.type").toString();
// boolean contains = type.contains("[");
// if (contains) {
// map.put(name, Integer.class);
// } else {
//组合用来生成实体类的map //组合用来生成实体类的map
map.put(name, getClassName(type)); map.put(name, getClassName(type));
// }
} }
} }
return map; return map;

View File

@@ -54,7 +54,6 @@ public class KafkaLogSend {
} }
}); });
if (errorSum[0] > FlowWriteConfig.MAX_FAILURE_NUM) { if (errorSum[0] > FlowWriteConfig.MAX_FAILURE_NUM) {
// InfluxDbUtils.sendKafkaFail(list.size());
list.clear(); list.clear();
} }
} }

View File

@@ -1,5 +1,10 @@
package com.zdjizhi.utils.system; package com.zdjizhi.utils.system;
import com.zdjizhi.utils.StringUtil;
import com.zdjizhi.utils.exception.StreamCompletionException;
import java.io.IOException;
import java.util.Locale;
import java.util.Properties; import java.util.Properties;
@@ -46,9 +51,9 @@ public final class FlowWriteConfigurations {
public static Boolean getBooleanProperty(Integer type, String key) { public static Boolean getBooleanProperty(Integer type, String key) {
if (type == 0) { if (type == 0) {
return "true".equals(propService.getProperty(key).toLowerCase().trim()); return StringUtil.equals(propService.getProperty(key).toLowerCase().trim().toUpperCase(Locale.ENGLISH), "true");
} else if (type == 1) { } else if (type == 1) {
return "true".equals(propKafka.getProperty(key).toLowerCase().trim()); return StringUtil.equals(propKafka.getProperty(key).toLowerCase().trim().toUpperCase(Locale.ENGLISH), "true");
} else { } else {
return null; return null;
} }
@@ -58,7 +63,7 @@ public final class FlowWriteConfigurations {
try { try {
propService.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("service_flow_config.properties")); propService.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("service_flow_config.properties"));
propKafka.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("kafka_config.properties")); propKafka.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("kafka_config.properties"));
} catch (Exception e) { } catch (IOException | StreamCompletionException e) {
propKafka = null; propKafka = null;
propService = null; propService = null;
} }

View File

@@ -2,6 +2,7 @@ package com.zdjizhi.utils.zookeeper;
import cn.hutool.log.Log; import cn.hutool.log.Log;
import cn.hutool.log.LogFactory; import cn.hutool.log.LogFactory;
import com.zdjizhi.utils.exception.StreamCompletionException;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.zookeeper.*; import org.apache.zookeeper.*;
import org.apache.zookeeper.data.Stat; import org.apache.zookeeper.data.Stat;
@@ -89,7 +90,7 @@ public class DistributedLock implements Lock, Watcher {
waitForLock(waitLock, sessionTimeout); waitForLock(waitLock, sessionTimeout);
} }
} catch (InterruptedException | KeeperException e) { } catch (InterruptedException | KeeperException e) {
e.printStackTrace(); logger.error("获取锁异常" + e);
} }
} }
@@ -122,7 +123,7 @@ public class DistributedLock implements Lock, Watcher {
String prevNode = currentLock.substring(currentLock.lastIndexOf("/") + 1); String prevNode = currentLock.substring(currentLock.lastIndexOf("/") + 1);
waitLock = lockObjects.get(Collections.binarySearch(lockObjects, prevNode) - 1); waitLock = lockObjects.get(Collections.binarySearch(lockObjects, prevNode) - 1);
} catch (InterruptedException | KeeperException e) { } catch (InterruptedException | KeeperException e) {
e.printStackTrace(); logger.error("获取锁过程异常" + e);
} }
return false; return false;
} }
@@ -135,8 +136,8 @@ public class DistributedLock implements Lock, Watcher {
return true; return true;
} }
return waitForLock(waitLock, timeout); return waitForLock(waitLock, timeout);
} catch (Exception e) { } catch (KeeperException | InterruptedException | StreamCompletionException e) {
e.printStackTrace(); logger.error("判断是否锁定异常" + e);
} }
return false; return false;
} }
@@ -161,7 +162,7 @@ public class DistributedLock implements Lock, Watcher {
currentLock = null; currentLock = null;
zk.close(); zk.close();
} catch (InterruptedException | KeeperException e) { } catch (InterruptedException | KeeperException e) {
e.printStackTrace(); logger.error("关闭锁异常" + e);
} }
} }

View File

@@ -1,6 +1,6 @@
package com.zdjizhi.utils.zookeeper; package com.zdjizhi.utils.zookeeper;
import com.zdjizhi.utils.system.LogPrintUtil; import cn.hutool.core.util.StrUtil;
import cn.hutool.log.Log; import cn.hutool.log.Log;
import cn.hutool.log.LogFactory; import cn.hutool.log.LogFactory;
import org.apache.zookeeper.*; import org.apache.zookeeper.*;
@@ -58,7 +58,7 @@ public class ZookeeperUtils implements Watcher {
} }
} }
} catch (KeeperException | InterruptedException e) { } catch (KeeperException | InterruptedException e) {
logger.error("modify error Can't modify," + LogPrintUtil.print(e)); logger.error("modify error Can't modify," + e);
} finally { } finally {
closeConn(); closeConn();
} }
@@ -76,7 +76,7 @@ public class ZookeeperUtils implements Watcher {
zookeeper = new ZooKeeper(host, SESSION_TIME_OUT, this); zookeeper = new ZooKeeper(host, SESSION_TIME_OUT, this);
countDownLatch.await(); countDownLatch.await();
} catch (IOException | InterruptedException e) { } catch (IOException | InterruptedException e) {
e.printStackTrace(); logger.error("Connection to the Zookeeper Exception! message:" + e);
} }
} }
@@ -89,7 +89,7 @@ public class ZookeeperUtils implements Watcher {
zookeeper.close(); zookeeper.close();
} }
} catch (InterruptedException e) { } catch (InterruptedException e) {
logger.error(LogPrintUtil.print(e)); logger.error("Close the Zookeeper connection Exception! message:" + e);
} }
} }
@@ -104,9 +104,10 @@ public class ZookeeperUtils implements Watcher {
Stat stat = new Stat(); Stat stat = new Stat();
try { try {
byte[] resByte = zookeeper.getData(path, true, stat); byte[] resByte = zookeeper.getData(path, true, stat);
result = new String(resByte);
result = StrUtil.str(resByte, "UTF-8");
} catch (KeeperException | InterruptedException e) { } catch (KeeperException | InterruptedException e) {
logger.error("Get node information exception" + LogPrintUtil.print(e)); logger.error("Get node information exception" + e);
} }
return result; return result;
} }
@@ -130,7 +131,7 @@ public class ZookeeperUtils implements Watcher {
logger.warn("Node already exists ! Don't need to create"); logger.warn("Node already exists ! Don't need to create");
} }
} catch (KeeperException | InterruptedException e) { } catch (KeeperException | InterruptedException e) {
logger.error(LogPrintUtil.print(e)); logger.error(e);
} finally { } finally {
closeConn(); closeConn();
} }