diff --git a/pom.xml b/pom.xml
index a4efe6e..b517dfa 100644
--- a/pom.xml
+++ b/pom.xml
@@ -137,7 +137,7 @@
org.apache.storm
storm-core
${storm.version}
- provided
+
slf4j-log4j12
diff --git a/properties/service_flow_config.properties b/properties/service_flow_config.properties
index 181435f..42106bf 100644
--- a/properties/service_flow_config.properties
+++ b/properties/service_flow_config.properties
@@ -13,7 +13,7 @@ zookeeper.servers=192.168.44.12:2181
hbase.zookeeper.servers=192.168.44.12:2181
#定位库地址
-ip.library=D:\\K18-Phase2\\tsgSpace\\dat\\dll\\
+ip.library=D:\\K18-Phase2\\tsgSpace\\dat\\
#ip.library=/home/bigdata/topology/dat/
#网关的schema位置
diff --git a/src/main/java/com/zdjizhi/bolt/CompletionBolt.java b/src/main/java/com/zdjizhi/bolt/CompletionBolt.java
index 309cf56..b20858f 100644
--- a/src/main/java/com/zdjizhi/bolt/CompletionBolt.java
+++ b/src/main/java/com/zdjizhi/bolt/CompletionBolt.java
@@ -4,6 +4,7 @@ import com.zdjizhi.common.FlowWriteConfig;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.utils.StringUtil;
+import com.zdjizhi.utils.exception.StreamCompletionException;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.BasicOutputCollector;
import org.apache.storm.topology.OutputFieldsDeclarer;
@@ -34,13 +35,12 @@ public class CompletionBolt extends BaseBasicBolt {
@Override
public void execute(Tuple tuple, BasicOutputCollector basicOutputCollector) {
try {
- String message = tuple.getString(0);
- if (StringUtil.isNotBlank(message)) {
- basicOutputCollector.emit(new Values(dealCommonMessage(message)));
- }
- } catch (Exception e) {
+ String message = tuple.getString(0);
+ if (StringUtil.isNotBlank(message)) {
+ basicOutputCollector.emit(new Values(dealCommonMessage(message)));
+ }
+ } catch (StreamCompletionException e) {
logger.error(FlowWriteConfig.KAFKA_TOPIC + "接收/解析过程出现异常");
- e.printStackTrace();
}
}
diff --git a/src/main/java/com/zdjizhi/bolt/kafka/LogSendBolt.java b/src/main/java/com/zdjizhi/bolt/kafka/LogSendBolt.java
index 9cf14b8..a61edcf 100644
--- a/src/main/java/com/zdjizhi/bolt/kafka/LogSendBolt.java
+++ b/src/main/java/com/zdjizhi/bolt/kafka/LogSendBolt.java
@@ -1,6 +1,7 @@
package com.zdjizhi.bolt.kafka;
import com.zdjizhi.common.FlowWriteConfig;
+import com.zdjizhi.utils.exception.StreamCompletionException;
import com.zdjizhi.utils.kafka.KafkaLogSend;
import com.zdjizhi.utils.system.TupleUtils;
import cn.hutool.log.Log;
@@ -52,9 +53,8 @@ public class LogSendBolt extends BaseBasicBolt {
list.clear();
}
}
- } catch (Exception e) {
+ } catch (StreamCompletionException e) {
logger.error(FlowWriteConfig.KAFKA_TOPIC + "日志发送Kafka过程出现异常");
- e.printStackTrace();
}
}
diff --git a/src/main/java/com/zdjizhi/spout/CustomizedKafkaSpout.java b/src/main/java/com/zdjizhi/spout/CustomizedKafkaSpout.java
index d8fde9a..ecc6932 100644
--- a/src/main/java/com/zdjizhi/spout/CustomizedKafkaSpout.java
+++ b/src/main/java/com/zdjizhi/spout/CustomizedKafkaSpout.java
@@ -1,8 +1,10 @@
package com.zdjizhi.spout;
+import cn.hutool.core.thread.ThreadUtil;
import com.zdjizhi.common.FlowWriteConfig;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
+import com.zdjizhi.utils.exception.StreamCompletionException;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
@@ -71,13 +73,12 @@ public class CustomizedKafkaSpout extends BaseRichSpout {
try {
// TODO Auto-generated method stub
ConsumerRecords records = consumer.poll(10000L);
- Thread.sleep(FlowWriteConfig.TOPOLOGY_SPOUT_SLEEP_TIME);
+ ThreadUtil.sleep(FlowWriteConfig.TOPOLOGY_SPOUT_SLEEP_TIME);
for (ConsumerRecord record : records) {
this.collector.emit(new Values(record.value()));
}
- } catch (Exception e) {
+ } catch (StreamCompletionException e) {
logger.error("KafkaSpout发送消息出现异常!", e);
- e.printStackTrace();
}
}
diff --git a/src/main/java/com/zdjizhi/topology/LogFlowWriteTopology.java b/src/main/java/com/zdjizhi/topology/LogFlowWriteTopology.java
index f8146a2..ccde5df 100644
--- a/src/main/java/com/zdjizhi/topology/LogFlowWriteTopology.java
+++ b/src/main/java/com/zdjizhi/topology/LogFlowWriteTopology.java
@@ -7,6 +7,7 @@ import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.spout.CustomizedKafkaSpout;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
+import com.zdjizhi.utils.exception.StreamCompletionException;
import org.apache.storm.Config;
import org.apache.storm.generated.AlreadyAliveException;
import org.apache.storm.generated.AuthorizationException;
@@ -70,25 +71,29 @@ public class LogFlowWriteTopology {
}
}
- public static void main(String[] args) throws Exception {
+ public static void main(String[] args) {
LogFlowWriteTopology flowWriteTopology;
- boolean runLocally = true;
- int size = 2;
- if (args.length >= size && FlowWriteConfig.MODEL.equalsIgnoreCase(args[1])) {
- runLocally = false;
- flowWriteTopology = new LogFlowWriteTopology(args[0]);
- } else {
- flowWriteTopology = new LogFlowWriteTopology();
- }
+ try {
+ boolean runLocally = true;
+ int size = 2;
+ if (args.length >= size && FlowWriteConfig.MODEL.equalsIgnoreCase(args[1])) {
+ runLocally = false;
+ flowWriteTopology = new LogFlowWriteTopology(args[0]);
+ } else {
+ flowWriteTopology = new LogFlowWriteTopology();
+ }
- flowWriteTopology.buildTopology();
+ flowWriteTopology.buildTopology();
- if (runLocally) {
- logger.info("执行本地模式...");
- flowWriteTopology.runLocally();
- } else {
- logger.info("执行远程部署模式...");
- flowWriteTopology.runRemotely();
+ if (runLocally) {
+ logger.info("执行本地模式...");
+ flowWriteTopology.runLocally();
+ } else {
+ logger.info("执行远程部署模式...");
+ flowWriteTopology.runRemotely();
+ }
+ } catch (StreamCompletionException | InterruptedException | InvalidTopologyException | AlreadyAliveException | AuthorizationException e) {
+ logger.error("Topology Start ERROR! message is:" + e);
}
}
}
diff --git a/src/main/java/com/zdjizhi/topology/StormRunner.java b/src/main/java/com/zdjizhi/topology/StormRunner.java
index 472cde7..85024fd 100644
--- a/src/main/java/com/zdjizhi/topology/StormRunner.java
+++ b/src/main/java/com/zdjizhi/topology/StormRunner.java
@@ -1,6 +1,7 @@
package com.zdjizhi.topology;
+import cn.hutool.core.thread.ThreadUtil;
import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.StormSubmitter;
@@ -21,7 +22,7 @@ public final class StormRunner{
LocalCluster localCluster = new LocalCluster();
localCluster.submitTopology(topologyName, conf, builder.createTopology());
- Thread.sleep((long) runtimeInSeconds * MILLS_IN_SEC);
+ ThreadUtil.sleep((long) runtimeInSeconds * MILLS_IN_SEC);
localCluster.shutdown();
}
diff --git a/src/main/java/com/zdjizhi/utils/exception/StreamCompletionException.java b/src/main/java/com/zdjizhi/utils/exception/StreamCompletionException.java
new file mode 100644
index 0000000..2a31b11
--- /dev/null
+++ b/src/main/java/com/zdjizhi/utils/exception/StreamCompletionException.java
@@ -0,0 +1,18 @@
+package com.zdjizhi.utils.exception;
+
+/**
+ * @author qidaijie
+ * @Package com.zdjizhi.utils.exception
+ * @Description:
+ * @date 2021/3/2510:14
+ */
+public class StreamCompletionException extends RuntimeException {
+
+ public StreamCompletionException(Exception e) {
+ super(e);
+ }
+
+ public StreamCompletionException(String e) {
+ super(e);
+ }
+}
diff --git a/src/main/java/com/zdjizhi/utils/general/SnowflakeId.java b/src/main/java/com/zdjizhi/utils/general/SnowflakeId.java
index 0d50226..9663dc6 100644
--- a/src/main/java/com/zdjizhi/utils/general/SnowflakeId.java
+++ b/src/main/java/com/zdjizhi/utils/general/SnowflakeId.java
@@ -1,6 +1,7 @@
package com.zdjizhi.utils.general;
import com.zdjizhi.common.FlowWriteConfig;
+import com.zdjizhi.utils.exception.StreamCompletionException;
import com.zdjizhi.utils.zookeeper.DistributedLock;
import com.zdjizhi.utils.zookeeper.ZookeeperUtils;
import cn.hutool.log.Log;
@@ -126,8 +127,7 @@ public class SnowflakeId {
}
this.workerId = tmpWorkerId;
this.dataCenterId = dataCenterIdNum;
- } catch (Exception e) {
- e.printStackTrace();
+ } catch (StreamCompletionException e) {
logger.error("This is not usual error!!!===>>>" + e + "<<<===");
}finally {
lock.unlock();
diff --git a/src/main/java/com/zdjizhi/utils/general/TransFormUtils.java b/src/main/java/com/zdjizhi/utils/general/TransFormUtils.java
index 823c19b..c2911f5 100644
--- a/src/main/java/com/zdjizhi/utils/general/TransFormUtils.java
+++ b/src/main/java/com/zdjizhi/utils/general/TransFormUtils.java
@@ -2,6 +2,7 @@ package com.zdjizhi.utils.general;
import com.zdjizhi.common.FlowWriteConfig;
+import com.zdjizhi.utils.exception.StreamCompletionException;
import com.zdjizhi.utils.json.JsonParseUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
@@ -79,9 +80,8 @@ public class TransFormUtils {
functionSet(function, object, appendToKeyName, appendTo, name, param);
}
return JSONObject.toJSONString(object);
- } catch (Exception e) {
+ } catch (StreamCompletionException e) {
logger.error(FlowWriteConfig.KAFKA_TOPIC + "日志预处理过程出现异常");
- e.printStackTrace();
return "";
}
}
diff --git a/src/main/java/com/zdjizhi/utils/general/TransFunction.java b/src/main/java/com/zdjizhi/utils/general/TransFunction.java
index 76b2643..6eb839f 100644
--- a/src/main/java/com/zdjizhi/utils/general/TransFunction.java
+++ b/src/main/java/com/zdjizhi/utils/general/TransFunction.java
@@ -1,9 +1,9 @@
package com.zdjizhi.utils.general;
+import cn.hutool.core.codec.Base64;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.hbase.HBaseUtils;
import com.zdjizhi.utils.json.JsonParseUtil;
-import com.zdjizhi.utils.system.LogPrintUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.jayway.jsonpath.InvalidPathException;
@@ -12,7 +12,6 @@ import com.zdjizhi.utils.Encodes;
import com.zdjizhi.utils.FormatUtils;
import com.zdjizhi.utils.IpLookup;
import com.zdjizhi.utils.StringUtil;
-
import java.util.ArrayList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -24,8 +23,6 @@ class TransFunction {
private static final Log logger = LogFactory.get();
- private static final Log log = LogFactory.get();
-
private static final Pattern PATTERN = Pattern.compile("[0-9]*");
/**
@@ -78,7 +75,7 @@ class TransFunction {
static String radiusMatch(String ip) {
String account = HBaseUtils.getAccount(ip.trim());
if (StringUtil.isBlank(account)) {
- log.warn("HashMap get account is null, Ip is :{}", ip);
+ logger.warn("HashMap get account is null, Ip is :{}", ip);
}
return account;
}
@@ -93,8 +90,7 @@ class TransFunction {
try {
return FormatUtils.getTopPrivateDomain(domain);
} catch (StringIndexOutOfBoundsException outException) {
- log.error("解析顶级域名异常,异常域名:{}" + domain);
- logger.error(LogPrintUtil.print(outException));
+ logger.error("解析顶级域名异常,异常域名:{}" + domain);
return "";
}
}
@@ -111,13 +107,13 @@ class TransFunction {
try {
if (StringUtil.isNotBlank(message)) {
if (StringUtil.isNotBlank(charset)) {
- result = Encodes.decodeBase64String(message, charset);
+ result = Base64.decodeStr(message, charset);
} else {
- result = Encodes.decodeBase64String(message, FlowWriteConfig.MAIL_DEFAULT_CHARSET);
+ result = Base64.decodeStr(message, FlowWriteConfig.MAIL_DEFAULT_CHARSET);
}
}
- } catch (Exception e) {
- logger.error("解析 Base64 异常,异常信息:" + LogPrintUtil.print(e));
+ } catch (RuntimeException rune) {
+ logger.error("解析 Base64 异常,异常信息:" + rune);
}
return result;
}
@@ -137,7 +133,7 @@ class TransFunction {
flattenResult = read.get(0);
}
} catch (ClassCastException | InvalidPathException e) {
- log.error("设备标签解析异常,[ " + expr + " ]解析表达式错误" + LogPrintUtil.print(e));
+ logger.error("设备标签解析异常,[ " + expr + " ]解析表达式错误" + e);
}
return flattenResult;
}
@@ -188,8 +184,8 @@ class TransFunction {
}
}
}
- } catch (Exception e) {
- logger.error("IF 函数执行异常,异常信息:" + LogPrintUtil.print(e));
+ } catch (RuntimeException e) {
+ logger.error("IF 函数执行异常,异常信息:" + e);
}
return null;
}
@@ -208,8 +204,8 @@ class TransFunction {
} else {
return param;
}
- } catch (Exception e) {
- logger.error("SetValue 函数异常,异常信息:" + LogPrintUtil.print(e));
+ } catch (RuntimeException e) {
+ logger.error("SetValue 函数异常,异常信息:" + e);
}
return null;
}
diff --git a/src/main/java/com/zdjizhi/utils/hbase/HBaseUtils.java b/src/main/java/com/zdjizhi/utils/hbase/HBaseUtils.java
index 4be1683..042a930 100644
--- a/src/main/java/com/zdjizhi/utils/hbase/HBaseUtils.java
+++ b/src/main/java/com/zdjizhi/utils/hbase/HBaseUtils.java
@@ -68,10 +68,8 @@ public class HBaseUtils {
logger.warn("HBaseUtils get HBase connection,now to getAll().");
} catch (IOException ioe) {
logger.error("HBaseUtils getHbaseConn() IOException===>{" + ioe + "}<===");
- ioe.printStackTrace();
- } catch (Exception e) {
+ } catch (RuntimeException e) {
logger.error("HBaseUtils getHbaseConn() Exception===>{" + e + "}<===");
- e.printStackTrace();
}
}
@@ -122,10 +120,8 @@ public class HBaseUtils {
time = endTime;
} catch (IOException ioe) {
logger.error("HBaseUtils timestampsFilter is IOException===>{" + ioe + "}<===");
- ioe.printStackTrace();
- } catch (Exception e) {
+ } catch (RuntimeException e) {
logger.error("HBaseUtils timestampsFilter is Exception===>{" + e + "}<===");
- e.printStackTrace();
} finally {
if (scanner != null) {
scanner.close();
@@ -134,7 +130,7 @@ public class HBaseUtils {
try {
table.close();
} catch (IOException e) {
- e.printStackTrace();
+ logger.error("HBase Table Close ERROR! Exception message is:" + e);
}
}
}
@@ -160,10 +156,8 @@ public class HBaseUtils {
scanner.close();
} catch (IOException ioe) {
logger.error("HBaseUtils getAll() is IOException===>{" + ioe + "}<===");
- ioe.printStackTrace();
- } catch (Exception e) {
+ } catch (RuntimeException e) {
logger.error("HBaseUtils getAll() is Exception===>{" + e + "}<===");
- e.printStackTrace();
}
}
@@ -179,26 +173,11 @@ public class HBaseUtils {
public void run() {
try {
change();
- } catch (Exception e) {
+ } catch (RuntimeException e) {
logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<===");
- e.printStackTrace();
}
}
}, 1, FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS, TimeUnit.SECONDS);
-
-//
-// Timer timer = new Timer();
-// timer.scheduleAtFixedRate(new TimerTask() {
-// @Override
-// public void run() {
-// try {
-// change();
-// } catch (Exception e) {
-// logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<===");
-// e.printStackTrace();
-// }
-// }
-// }, 1, 1000 * FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS);//起始1ms,以后每隔60s
}
diff --git a/src/main/java/com/zdjizhi/utils/http/HttpClientUtil.java b/src/main/java/com/zdjizhi/utils/http/HttpClientUtil.java
index 77725ee..31162f4 100644
--- a/src/main/java/com/zdjizhi/utils/http/HttpClientUtil.java
+++ b/src/main/java/com/zdjizhi/utils/http/HttpClientUtil.java
@@ -1,6 +1,5 @@
package com.zdjizhi.utils.http;
-import com.zdjizhi.utils.system.LogPrintUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.http.HttpEntity;
@@ -19,46 +18,60 @@ import java.io.InputStreamReader;
* @author qidaijie
*/
public class HttpClientUtil {
-
+ // private static final int MAX_STR_LEN = 512000;
private static final Log logger = LogFactory.get();
/**
* 请求网关获取schema
+ *
* @param http 网关url
* @return schema
*/
public static String requestByGetMethod(String http) {
CloseableHttpClient httpClient = HttpClients.createDefault();
- StringBuilder entityStringBuilder = null;
+ StringBuilder entityStringBuilder;
HttpGet get = new HttpGet(http);
BufferedReader bufferedReader = null;
- try (CloseableHttpResponse httpResponse = httpClient.execute(get)) {
+ CloseableHttpResponse httpResponse = null;
+ try {
+ httpResponse = httpClient.execute(get);
HttpEntity entity = httpResponse.getEntity();
entityStringBuilder = new StringBuilder();
if (null != entity) {
bufferedReader = new BufferedReader(new InputStreamReader(httpResponse.getEntity().getContent(), "UTF-8"), 8 * 1024);
- String line;
- while ((line = bufferedReader.readLine()) != null) {
- entityStringBuilder.append(line);
+ int intC;
+ while ((intC = bufferedReader.read()) != -1) {
+ char c = (char) intC;
+ if (c == '\n') {
+ break;
+ }
+ entityStringBuilder.append(c);
}
+
+ return entityStringBuilder.toString();
}
- } catch (Exception e) {
- logger.error(LogPrintUtil.print(e));
+ } catch (IOException e) {
+ logger.error("Get Schema from Query engine ERROR! Exception message is:" + e);
} finally {
if (httpClient != null) {
try {
httpClient.close();
} catch (IOException e) {
- logger.error(LogPrintUtil.print(e));
+ logger.error("Close HTTP Client ERROR! Exception messgae is:" + e);
+ }
+ }
+ if (httpResponse != null) {
+ try {
+ httpResponse.close();
+ } catch (IOException e) {
+ logger.error("Close httpResponse ERROR! Exception messgae is:" + e);
}
}
if (bufferedReader != null) {
-// bufferedReader.close();
org.apache.commons.io.IOUtils.closeQuietly(bufferedReader);
}
}
- return entityStringBuilder.toString();
+ return "";
}
-
}
diff --git a/src/main/java/com/zdjizhi/utils/json/JsonParseUtil.java b/src/main/java/com/zdjizhi/utils/json/JsonParseUtil.java
index 058c6f8..10e07d1 100644
--- a/src/main/java/com/zdjizhi/utils/json/JsonParseUtil.java
+++ b/src/main/java/com/zdjizhi/utils/json/JsonParseUtil.java
@@ -135,13 +135,8 @@ public class JsonParseUtil {
if (checkKeepField(filedStr)) {
String name = JsonPath.read(filedStr, "$.name").toString();
String type = JsonPath.read(filedStr, "$.type").toString();
-// boolean contains = type.contains("[");
-// if (contains) {
-// map.put(name, Integer.class);
-// } else {
//组合用来生成实体类的map
map.put(name, getClassName(type));
-// }
}
}
return map;
diff --git a/src/main/java/com/zdjizhi/utils/kafka/KafkaLogSend.java b/src/main/java/com/zdjizhi/utils/kafka/KafkaLogSend.java
index b7c270b..87509b4 100644
--- a/src/main/java/com/zdjizhi/utils/kafka/KafkaLogSend.java
+++ b/src/main/java/com/zdjizhi/utils/kafka/KafkaLogSend.java
@@ -54,7 +54,6 @@ public class KafkaLogSend {
}
});
if (errorSum[0] > FlowWriteConfig.MAX_FAILURE_NUM) {
-// InfluxDbUtils.sendKafkaFail(list.size());
list.clear();
}
}
diff --git a/src/main/java/com/zdjizhi/utils/system/FlowWriteConfigurations.java b/src/main/java/com/zdjizhi/utils/system/FlowWriteConfigurations.java
index ecfd0c5..837d881 100644
--- a/src/main/java/com/zdjizhi/utils/system/FlowWriteConfigurations.java
+++ b/src/main/java/com/zdjizhi/utils/system/FlowWriteConfigurations.java
@@ -1,5 +1,10 @@
package com.zdjizhi.utils.system;
+import com.zdjizhi.utils.StringUtil;
+import com.zdjizhi.utils.exception.StreamCompletionException;
+
+import java.io.IOException;
+import java.util.Locale;
import java.util.Properties;
@@ -46,9 +51,9 @@ public final class FlowWriteConfigurations {
public static Boolean getBooleanProperty(Integer type, String key) {
if (type == 0) {
- return "true".equals(propService.getProperty(key).toLowerCase().trim());
+ return StringUtil.equals(propService.getProperty(key).toLowerCase().trim().toUpperCase(Locale.ENGLISH), "true");
} else if (type == 1) {
- return "true".equals(propKafka.getProperty(key).toLowerCase().trim());
+ return StringUtil.equals(propKafka.getProperty(key).toLowerCase().trim().toUpperCase(Locale.ENGLISH), "true");
} else {
return null;
}
@@ -58,7 +63,7 @@ public final class FlowWriteConfigurations {
try {
propService.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("service_flow_config.properties"));
propKafka.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("kafka_config.properties"));
- } catch (Exception e) {
+ } catch (IOException | StreamCompletionException e) {
propKafka = null;
propService = null;
}
diff --git a/src/main/java/com/zdjizhi/utils/zookeeper/DistributedLock.java b/src/main/java/com/zdjizhi/utils/zookeeper/DistributedLock.java
index 04658fb..a8a7312 100644
--- a/src/main/java/com/zdjizhi/utils/zookeeper/DistributedLock.java
+++ b/src/main/java/com/zdjizhi/utils/zookeeper/DistributedLock.java
@@ -2,6 +2,7 @@ package com.zdjizhi.utils.zookeeper;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
+import com.zdjizhi.utils.exception.StreamCompletionException;
import org.apache.log4j.Logger;
import org.apache.zookeeper.*;
import org.apache.zookeeper.data.Stat;
@@ -89,7 +90,7 @@ public class DistributedLock implements Lock, Watcher {
waitForLock(waitLock, sessionTimeout);
}
} catch (InterruptedException | KeeperException e) {
- e.printStackTrace();
+ logger.error("获取锁异常" + e);
}
}
@@ -122,7 +123,7 @@ public class DistributedLock implements Lock, Watcher {
String prevNode = currentLock.substring(currentLock.lastIndexOf("/") + 1);
waitLock = lockObjects.get(Collections.binarySearch(lockObjects, prevNode) - 1);
} catch (InterruptedException | KeeperException e) {
- e.printStackTrace();
+ logger.error("获取锁过程异常" + e);
}
return false;
}
@@ -135,8 +136,8 @@ public class DistributedLock implements Lock, Watcher {
return true;
}
return waitForLock(waitLock, timeout);
- } catch (Exception e) {
- e.printStackTrace();
+ } catch (KeeperException | InterruptedException | StreamCompletionException e) {
+ logger.error("判断是否锁定异常" + e);
}
return false;
}
@@ -161,7 +162,7 @@ public class DistributedLock implements Lock, Watcher {
currentLock = null;
zk.close();
} catch (InterruptedException | KeeperException e) {
- e.printStackTrace();
+ logger.error("关闭锁异常" + e);
}
}
diff --git a/src/main/java/com/zdjizhi/utils/zookeeper/ZookeeperUtils.java b/src/main/java/com/zdjizhi/utils/zookeeper/ZookeeperUtils.java
index fb1c3eb..ebf4368 100644
--- a/src/main/java/com/zdjizhi/utils/zookeeper/ZookeeperUtils.java
+++ b/src/main/java/com/zdjizhi/utils/zookeeper/ZookeeperUtils.java
@@ -1,6 +1,6 @@
package com.zdjizhi.utils.zookeeper;
-import com.zdjizhi.utils.system.LogPrintUtil;
+import cn.hutool.core.util.StrUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.zookeeper.*;
@@ -58,7 +58,7 @@ public class ZookeeperUtils implements Watcher {
}
}
} catch (KeeperException | InterruptedException e) {
- logger.error("modify error Can't modify," + LogPrintUtil.print(e));
+ logger.error("modify error Can't modify," + e);
} finally {
closeConn();
}
@@ -76,7 +76,7 @@ public class ZookeeperUtils implements Watcher {
zookeeper = new ZooKeeper(host, SESSION_TIME_OUT, this);
countDownLatch.await();
} catch (IOException | InterruptedException e) {
- e.printStackTrace();
+ logger.error("Connection to the Zookeeper Exception! message:" + e);
}
}
@@ -89,7 +89,7 @@ public class ZookeeperUtils implements Watcher {
zookeeper.close();
}
} catch (InterruptedException e) {
- logger.error(LogPrintUtil.print(e));
+ logger.error("Close the Zookeeper connection Exception! message:" + e);
}
}
@@ -104,9 +104,10 @@ public class ZookeeperUtils implements Watcher {
Stat stat = new Stat();
try {
byte[] resByte = zookeeper.getData(path, true, stat);
- result = new String(resByte);
+
+ result = StrUtil.str(resByte, "UTF-8");
} catch (KeeperException | InterruptedException e) {
- logger.error("Get node information exception" + LogPrintUtil.print(e));
+ logger.error("Get node information exception" + e);
}
return result;
}
@@ -130,7 +131,7 @@ public class ZookeeperUtils implements Watcher {
logger.warn("Node already exists ! Don't need to create");
}
} catch (KeeperException | InterruptedException e) {
- logger.error(LogPrintUtil.print(e));
+ logger.error(e);
} finally {
closeConn();
}