修复EAL4中低级警告版本

This commit is contained in:
qidaijie
2021-03-25 14:27:41 +08:00
parent 85f8978887
commit 0023a1455f
18 changed files with 124 additions and 110 deletions

View File

@@ -137,7 +137,7 @@
<groupId>org.apache.storm</groupId>
<artifactId>storm-core</artifactId>
<version>${storm.version}</version>
<scope>provided</scope>
<!--<scope>provided</scope>-->
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>

View File

@@ -13,7 +13,7 @@ zookeeper.servers=192.168.44.12:2181
hbase.zookeeper.servers=192.168.44.12:2181
#定位库地址
ip.library=D:\\K18-Phase2\\tsgSpace\\dat\\dll\\
ip.library=D:\\K18-Phase2\\tsgSpace\\dat\\
#ip.library=/home/bigdata/topology/dat/
#网关的schema位置

View File

@@ -4,6 +4,7 @@ import com.zdjizhi.common.FlowWriteConfig;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.utils.StringUtil;
import com.zdjizhi.utils.exception.StreamCompletionException;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.BasicOutputCollector;
import org.apache.storm.topology.OutputFieldsDeclarer;
@@ -34,13 +35,12 @@ public class CompletionBolt extends BaseBasicBolt {
@Override
public void execute(Tuple tuple, BasicOutputCollector basicOutputCollector) {
try {
String message = tuple.getString(0);
if (StringUtil.isNotBlank(message)) {
basicOutputCollector.emit(new Values(dealCommonMessage(message)));
}
} catch (Exception e) {
String message = tuple.getString(0);
if (StringUtil.isNotBlank(message)) {
basicOutputCollector.emit(new Values(dealCommonMessage(message)));
}
} catch (StreamCompletionException e) {
logger.error(FlowWriteConfig.KAFKA_TOPIC + "接收/解析过程出现异常");
e.printStackTrace();
}
}

View File

@@ -1,6 +1,7 @@
package com.zdjizhi.bolt.kafka;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.exception.StreamCompletionException;
import com.zdjizhi.utils.kafka.KafkaLogSend;
import com.zdjizhi.utils.system.TupleUtils;
import cn.hutool.log.Log;
@@ -52,9 +53,8 @@ public class LogSendBolt extends BaseBasicBolt {
list.clear();
}
}
} catch (Exception e) {
} catch (StreamCompletionException e) {
logger.error(FlowWriteConfig.KAFKA_TOPIC + "日志发送Kafka过程出现异常");
e.printStackTrace();
}
}

View File

@@ -1,8 +1,10 @@
package com.zdjizhi.spout;
import cn.hutool.core.thread.ThreadUtil;
import com.zdjizhi.common.FlowWriteConfig;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.utils.exception.StreamCompletionException;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
@@ -71,13 +73,12 @@ public class CustomizedKafkaSpout extends BaseRichSpout {
try {
// TODO Auto-generated method stub
ConsumerRecords<String, String> records = consumer.poll(10000L);
Thread.sleep(FlowWriteConfig.TOPOLOGY_SPOUT_SLEEP_TIME);
ThreadUtil.sleep(FlowWriteConfig.TOPOLOGY_SPOUT_SLEEP_TIME);
for (ConsumerRecord<String, String> record : records) {
this.collector.emit(new Values(record.value()));
}
} catch (Exception e) {
} catch (StreamCompletionException e) {
logger.error("KafkaSpout发送消息出现异常!", e);
e.printStackTrace();
}
}

View File

@@ -7,6 +7,7 @@ import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.spout.CustomizedKafkaSpout;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.utils.exception.StreamCompletionException;
import org.apache.storm.Config;
import org.apache.storm.generated.AlreadyAliveException;
import org.apache.storm.generated.AuthorizationException;
@@ -70,25 +71,29 @@ public class LogFlowWriteTopology {
}
}
public static void main(String[] args) throws Exception {
public static void main(String[] args) {
LogFlowWriteTopology flowWriteTopology;
boolean runLocally = true;
int size = 2;
if (args.length >= size && FlowWriteConfig.MODEL.equalsIgnoreCase(args[1])) {
runLocally = false;
flowWriteTopology = new LogFlowWriteTopology(args[0]);
} else {
flowWriteTopology = new LogFlowWriteTopology();
}
try {
boolean runLocally = true;
int size = 2;
if (args.length >= size && FlowWriteConfig.MODEL.equalsIgnoreCase(args[1])) {
runLocally = false;
flowWriteTopology = new LogFlowWriteTopology(args[0]);
} else {
flowWriteTopology = new LogFlowWriteTopology();
}
flowWriteTopology.buildTopology();
flowWriteTopology.buildTopology();
if (runLocally) {
logger.info("执行本地模式...");
flowWriteTopology.runLocally();
} else {
logger.info("执行远程部署模式...");
flowWriteTopology.runRemotely();
if (runLocally) {
logger.info("执行本地模式...");
flowWriteTopology.runLocally();
} else {
logger.info("执行远程部署模式...");
flowWriteTopology.runRemotely();
}
} catch (StreamCompletionException | InterruptedException | InvalidTopologyException | AlreadyAliveException | AuthorizationException e) {
logger.error("Topology Start ERROR! message is:" + e);
}
}
}

View File

@@ -1,6 +1,7 @@
package com.zdjizhi.topology;
import cn.hutool.core.thread.ThreadUtil;
import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.StormSubmitter;
@@ -21,7 +22,7 @@ public final class StormRunner{
LocalCluster localCluster = new LocalCluster();
localCluster.submitTopology(topologyName, conf, builder.createTopology());
Thread.sleep((long) runtimeInSeconds * MILLS_IN_SEC);
ThreadUtil.sleep((long) runtimeInSeconds * MILLS_IN_SEC);
localCluster.shutdown();
}

View File

@@ -0,0 +1,18 @@
package com.zdjizhi.utils.exception;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.exception
* @Description:
* @date 2021/3/2510:14
*/
public class StreamCompletionException extends RuntimeException {
public StreamCompletionException(Exception e) {
super(e);
}
public StreamCompletionException(String e) {
super(e);
}
}

View File

@@ -1,6 +1,7 @@
package com.zdjizhi.utils.general;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.exception.StreamCompletionException;
import com.zdjizhi.utils.zookeeper.DistributedLock;
import com.zdjizhi.utils.zookeeper.ZookeeperUtils;
import cn.hutool.log.Log;
@@ -126,8 +127,7 @@ public class SnowflakeId {
}
this.workerId = tmpWorkerId;
this.dataCenterId = dataCenterIdNum;
} catch (Exception e) {
e.printStackTrace();
} catch (StreamCompletionException e) {
logger.error("This is not usual error!!!===>>>" + e + "<<<===");
}finally {
lock.unlock();

View File

@@ -2,6 +2,7 @@ package com.zdjizhi.utils.general;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.exception.StreamCompletionException;
import com.zdjizhi.utils.json.JsonParseUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
@@ -79,9 +80,8 @@ public class TransFormUtils {
functionSet(function, object, appendToKeyName, appendTo, name, param);
}
return JSONObject.toJSONString(object);
} catch (Exception e) {
} catch (StreamCompletionException e) {
logger.error(FlowWriteConfig.KAFKA_TOPIC + "日志预处理过程出现异常");
e.printStackTrace();
return "";
}
}

View File

@@ -1,9 +1,9 @@
package com.zdjizhi.utils.general;
import cn.hutool.core.codec.Base64;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.hbase.HBaseUtils;
import com.zdjizhi.utils.json.JsonParseUtil;
import com.zdjizhi.utils.system.LogPrintUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.jayway.jsonpath.InvalidPathException;
@@ -12,7 +12,6 @@ import com.zdjizhi.utils.Encodes;
import com.zdjizhi.utils.FormatUtils;
import com.zdjizhi.utils.IpLookup;
import com.zdjizhi.utils.StringUtil;
import java.util.ArrayList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -24,8 +23,6 @@ class TransFunction {
private static final Log logger = LogFactory.get();
private static final Log log = LogFactory.get();
private static final Pattern PATTERN = Pattern.compile("[0-9]*");
/**
@@ -78,7 +75,7 @@ class TransFunction {
static String radiusMatch(String ip) {
String account = HBaseUtils.getAccount(ip.trim());
if (StringUtil.isBlank(account)) {
log.warn("HashMap get account is null, Ip is :{}", ip);
logger.warn("HashMap get account is null, Ip is :{}", ip);
}
return account;
}
@@ -93,8 +90,7 @@ class TransFunction {
try {
return FormatUtils.getTopPrivateDomain(domain);
} catch (StringIndexOutOfBoundsException outException) {
log.error("解析顶级域名异常,异常域名:{}" + domain);
logger.error(LogPrintUtil.print(outException));
logger.error("解析顶级域名异常,异常域名:{}" + domain);
return "";
}
}
@@ -111,13 +107,13 @@ class TransFunction {
try {
if (StringUtil.isNotBlank(message)) {
if (StringUtil.isNotBlank(charset)) {
result = Encodes.decodeBase64String(message, charset);
result = Base64.decodeStr(message, charset);
} else {
result = Encodes.decodeBase64String(message, FlowWriteConfig.MAIL_DEFAULT_CHARSET);
result = Base64.decodeStr(message, FlowWriteConfig.MAIL_DEFAULT_CHARSET);
}
}
} catch (Exception e) {
logger.error("解析 Base64 异常,异常信息:" + LogPrintUtil.print(e));
} catch (RuntimeException rune) {
logger.error("解析 Base64 异常,异常信息:" + rune);
}
return result;
}
@@ -137,7 +133,7 @@ class TransFunction {
flattenResult = read.get(0);
}
} catch (ClassCastException | InvalidPathException e) {
log.error("设备标签解析异常,[ " + expr + " ]解析表达式错误" + LogPrintUtil.print(e));
logger.error("设备标签解析异常,[ " + expr + " ]解析表达式错误" + e);
}
return flattenResult;
}
@@ -188,8 +184,8 @@ class TransFunction {
}
}
}
} catch (Exception e) {
logger.error("IF 函数执行异常,异常信息:" + LogPrintUtil.print(e));
} catch (RuntimeException e) {
logger.error("IF 函数执行异常,异常信息:" + e);
}
return null;
}
@@ -208,8 +204,8 @@ class TransFunction {
} else {
return param;
}
} catch (Exception e) {
logger.error("SetValue 函数异常,异常信息:" + LogPrintUtil.print(e));
} catch (RuntimeException e) {
logger.error("SetValue 函数异常,异常信息:" + e);
}
return null;
}

View File

@@ -68,10 +68,8 @@ public class HBaseUtils {
logger.warn("HBaseUtils get HBase connection,now to getAll().");
} catch (IOException ioe) {
logger.error("HBaseUtils getHbaseConn() IOException===>{" + ioe + "}<===");
ioe.printStackTrace();
} catch (Exception e) {
} catch (RuntimeException e) {
logger.error("HBaseUtils getHbaseConn() Exception===>{" + e + "}<===");
e.printStackTrace();
}
}
@@ -122,10 +120,8 @@ public class HBaseUtils {
time = endTime;
} catch (IOException ioe) {
logger.error("HBaseUtils timestampsFilter is IOException===>{" + ioe + "}<===");
ioe.printStackTrace();
} catch (Exception e) {
} catch (RuntimeException e) {
logger.error("HBaseUtils timestampsFilter is Exception===>{" + e + "}<===");
e.printStackTrace();
} finally {
if (scanner != null) {
scanner.close();
@@ -134,7 +130,7 @@ public class HBaseUtils {
try {
table.close();
} catch (IOException e) {
e.printStackTrace();
logger.error("HBase Table Close ERROR! Exception message is:" + e);
}
}
}
@@ -160,10 +156,8 @@ public class HBaseUtils {
scanner.close();
} catch (IOException ioe) {
logger.error("HBaseUtils getAll() is IOException===>{" + ioe + "}<===");
ioe.printStackTrace();
} catch (Exception e) {
} catch (RuntimeException e) {
logger.error("HBaseUtils getAll() is Exception===>{" + e + "}<===");
e.printStackTrace();
}
}
@@ -179,26 +173,11 @@ public class HBaseUtils {
public void run() {
try {
change();
} catch (Exception e) {
} catch (RuntimeException e) {
logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<===");
e.printStackTrace();
}
}
}, 1, FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS, TimeUnit.SECONDS);
//
// Timer timer = new Timer();
// timer.scheduleAtFixedRate(new TimerTask() {
// @Override
// public void run() {
// try {
// change();
// } catch (Exception e) {
// logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<===");
// e.printStackTrace();
// }
// }
// }, 1, 1000 * FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS);//起始1ms,以后每隔60s
}

View File

@@ -1,6 +1,5 @@
package com.zdjizhi.utils.http;
import com.zdjizhi.utils.system.LogPrintUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.http.HttpEntity;
@@ -19,46 +18,60 @@ import java.io.InputStreamReader;
* @author qidaijie
*/
public class HttpClientUtil {
// private static final int MAX_STR_LEN = 512000;
private static final Log logger = LogFactory.get();
/**
* 请求网关获取schema
*
* @param http 网关url
* @return schema
*/
public static String requestByGetMethod(String http) {
CloseableHttpClient httpClient = HttpClients.createDefault();
StringBuilder entityStringBuilder = null;
StringBuilder entityStringBuilder;
HttpGet get = new HttpGet(http);
BufferedReader bufferedReader = null;
try (CloseableHttpResponse httpResponse = httpClient.execute(get)) {
CloseableHttpResponse httpResponse = null;
try {
httpResponse = httpClient.execute(get);
HttpEntity entity = httpResponse.getEntity();
entityStringBuilder = new StringBuilder();
if (null != entity) {
bufferedReader = new BufferedReader(new InputStreamReader(httpResponse.getEntity().getContent(), "UTF-8"), 8 * 1024);
String line;
while ((line = bufferedReader.readLine()) != null) {
entityStringBuilder.append(line);
int intC;
while ((intC = bufferedReader.read()) != -1) {
char c = (char) intC;
if (c == '\n') {
break;
}
entityStringBuilder.append(c);
}
return entityStringBuilder.toString();
}
} catch (Exception e) {
logger.error(LogPrintUtil.print(e));
} catch (IOException e) {
logger.error("Get Schema from Query engine ERROR! Exception message is:" + e);
} finally {
if (httpClient != null) {
try {
httpClient.close();
} catch (IOException e) {
logger.error(LogPrintUtil.print(e));
logger.error("Close HTTP Client ERROR! Exception messgae is:" + e);
}
}
if (httpResponse != null) {
try {
httpResponse.close();
} catch (IOException e) {
logger.error("Close httpResponse ERROR! Exception messgae is:" + e);
}
}
if (bufferedReader != null) {
// bufferedReader.close();
org.apache.commons.io.IOUtils.closeQuietly(bufferedReader);
}
}
return entityStringBuilder.toString();
return "";
}
}

View File

@@ -135,13 +135,8 @@ public class JsonParseUtil {
if (checkKeepField(filedStr)) {
String name = JsonPath.read(filedStr, "$.name").toString();
String type = JsonPath.read(filedStr, "$.type").toString();
// boolean contains = type.contains("[");
// if (contains) {
// map.put(name, Integer.class);
// } else {
//组合用来生成实体类的map
map.put(name, getClassName(type));
// }
}
}
return map;

View File

@@ -54,7 +54,6 @@ public class KafkaLogSend {
}
});
if (errorSum[0] > FlowWriteConfig.MAX_FAILURE_NUM) {
// InfluxDbUtils.sendKafkaFail(list.size());
list.clear();
}
}

View File

@@ -1,5 +1,10 @@
package com.zdjizhi.utils.system;
import com.zdjizhi.utils.StringUtil;
import com.zdjizhi.utils.exception.StreamCompletionException;
import java.io.IOException;
import java.util.Locale;
import java.util.Properties;
@@ -46,9 +51,9 @@ public final class FlowWriteConfigurations {
public static Boolean getBooleanProperty(Integer type, String key) {
if (type == 0) {
return "true".equals(propService.getProperty(key).toLowerCase().trim());
return StringUtil.equals(propService.getProperty(key).toLowerCase().trim().toUpperCase(Locale.ENGLISH), "true");
} else if (type == 1) {
return "true".equals(propKafka.getProperty(key).toLowerCase().trim());
return StringUtil.equals(propKafka.getProperty(key).toLowerCase().trim().toUpperCase(Locale.ENGLISH), "true");
} else {
return null;
}
@@ -58,7 +63,7 @@ public final class FlowWriteConfigurations {
try {
propService.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("service_flow_config.properties"));
propKafka.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("kafka_config.properties"));
} catch (Exception e) {
} catch (IOException | StreamCompletionException e) {
propKafka = null;
propService = null;
}

View File

@@ -2,6 +2,7 @@ package com.zdjizhi.utils.zookeeper;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.utils.exception.StreamCompletionException;
import org.apache.log4j.Logger;
import org.apache.zookeeper.*;
import org.apache.zookeeper.data.Stat;
@@ -89,7 +90,7 @@ public class DistributedLock implements Lock, Watcher {
waitForLock(waitLock, sessionTimeout);
}
} catch (InterruptedException | KeeperException e) {
e.printStackTrace();
logger.error("获取锁异常" + e);
}
}
@@ -122,7 +123,7 @@ public class DistributedLock implements Lock, Watcher {
String prevNode = currentLock.substring(currentLock.lastIndexOf("/") + 1);
waitLock = lockObjects.get(Collections.binarySearch(lockObjects, prevNode) - 1);
} catch (InterruptedException | KeeperException e) {
e.printStackTrace();
logger.error("获取锁过程异常" + e);
}
return false;
}
@@ -135,8 +136,8 @@ public class DistributedLock implements Lock, Watcher {
return true;
}
return waitForLock(waitLock, timeout);
} catch (Exception e) {
e.printStackTrace();
} catch (KeeperException | InterruptedException | StreamCompletionException e) {
logger.error("判断是否锁定异常" + e);
}
return false;
}
@@ -161,7 +162,7 @@ public class DistributedLock implements Lock, Watcher {
currentLock = null;
zk.close();
} catch (InterruptedException | KeeperException e) {
e.printStackTrace();
logger.error("关闭锁异常" + e);
}
}

View File

@@ -1,6 +1,6 @@
package com.zdjizhi.utils.zookeeper;
import com.zdjizhi.utils.system.LogPrintUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.zookeeper.*;
@@ -58,7 +58,7 @@ public class ZookeeperUtils implements Watcher {
}
}
} catch (KeeperException | InterruptedException e) {
logger.error("modify error Can't modify," + LogPrintUtil.print(e));
logger.error("modify error Can't modify," + e);
} finally {
closeConn();
}
@@ -76,7 +76,7 @@ public class ZookeeperUtils implements Watcher {
zookeeper = new ZooKeeper(host, SESSION_TIME_OUT, this);
countDownLatch.await();
} catch (IOException | InterruptedException e) {
e.printStackTrace();
logger.error("Connection to the Zookeeper Exception! message:" + e);
}
}
@@ -89,7 +89,7 @@ public class ZookeeperUtils implements Watcher {
zookeeper.close();
}
} catch (InterruptedException e) {
logger.error(LogPrintUtil.print(e));
logger.error("Close the Zookeeper connection Exception! message:" + e);
}
}
@@ -104,9 +104,10 @@ public class ZookeeperUtils implements Watcher {
Stat stat = new Stat();
try {
byte[] resByte = zookeeper.getData(path, true, stat);
result = new String(resByte);
result = StrUtil.str(resByte, "UTF-8");
} catch (KeeperException | InterruptedException e) {
logger.error("Get node information exception" + LogPrintUtil.print(e));
logger.error("Get node information exception" + e);
}
return result;
}
@@ -130,7 +131,7 @@ public class ZookeeperUtils implements Watcher {
logger.warn("Node already exists ! Don't need to create");
}
} catch (KeeperException | InterruptedException e) {
logger.error(LogPrintUtil.print(e));
logger.error(e);
} finally {
closeConn();
}