GAL-349 优化DoS检测程序知识库更新流程

This commit is contained in:
unknown
2023-06-08 16:57:19 +08:00
parent 6fb37324ff
commit b9a694ddb9
6 changed files with 256 additions and 144 deletions

View File

@@ -24,7 +24,7 @@ import java.util.concurrent.TimeUnit;
/**
* @author wlh
*/
public class DosDetection extends BroadcastProcessFunction<DosSketchLog,Map<String, byte[]>, DosEventLog> {
public class DosDetection extends BroadcastProcessFunction<DosSketchLog,Map<String, String>, DosEventLog> {
private static final Log logger = LogFactory.get();
private static Map<String, Map<String, DosBaselineThreshold>> baselineMap = new HashMap<>();
@@ -100,8 +100,10 @@ public class DosDetection extends BroadcastProcessFunction<DosSketchLog,Map<Stri
}
@Override
public void processBroadcastElement(Map<String, byte[]> value, Context ctx, Collector<DosEventLog> out) throws Exception {
IpUtils.updateIpLook(value);
public void processBroadcastElement(Map<String, String> value, Context ctx, Collector<DosEventLog> out) throws Exception {
if (!value.isEmpty()){
IpUtils.updateIpLook(value);
}
}
private DosEventLog getDosEventLogBySensitivityThreshold(DosSketchLog value) {
@@ -147,23 +149,6 @@ public class DosDetection extends BroadcastProcessFunction<DosSketchLog,Map<Stri
result = getDosEventLog(value, bitBase, diffByte, profileId, STATIC_CONDITION_TYPE, BITS_TAG);
}
// long base = threshold.getSessionsPerSec();
// long diff = value.getSketch_sessions() - base;
// long profileId = threshold.getProfileId();
// DosEventLog result = getDosEventLog(value, base, diff, profileId, STATIC_CONDITION_TYPE, SESSIONS_TAG);
// if (result == null) {
// base = threshold.getPacketsPerSec();
// diff = value.getSketch_packets() - base;
// profileId = threshold.getProfileId();
// result = getDosEventLog(value, base, diff,profileId, STATIC_CONDITION_TYPE, PACKETS_TAG);
// if (result == null) {
// base = threshold.getBitsPerSec();
// diff = value.getSketch_bytes() - base;
// profileId=threshold.getProfileId();
// result = getDosEventLog(value, base, diff, profileId, STATIC_CONDITION_TYPE, BITS_TAG);
// }
// }
/*
ArrayList<DosEventLog> dosEventLogs = new ArrayList<>();
if (result != null){

View File

@@ -29,7 +29,7 @@ import java.util.Properties;
* @author 94976
*/
public class OutputStreamSink {
// private static final Logger logger = LoggerFactory.getLogger(OutputStreamSink.class);
// private static final Logger logger = LoggerFactory.getLogger(OutputStreamSink.class);
private static final Log logger = LogFactory.get();
public static OutputTag<DosMetricsLog> outputTag = new OutputTag<DosMetricsLog>("traffic server ip metrics"){};
@@ -46,7 +46,7 @@ public class OutputStreamSink {
}
private static SingleOutputStreamOperator<DosEventLog> getEventSinkStream(SingleOutputStreamOperator<DosSketchLog> middleStream){
DataStreamSource<Map<String, byte[]>> broadcastSource=null;
DataStreamSource<Map<String, String>> broadcastSource=null;
Properties nacosProperties = new Properties();
nacosProperties.put(PropertyKeyConst.SERVER_ADDR,CommonConfig.NACOS_SERVER_ADDR);
@@ -55,7 +55,7 @@ public class OutputStreamSink {
nacosProperties.setProperty(PropertyKeyConst.NAMESPACE, CommonConfig.NACOS_NAMESPACE);
if ("CLUSTER".equals(CommonConfig.CLUSTER_OR_SINGLE)){
broadcastSource = DosSketchSource.broadcastSource(nacosProperties,CommonConfig.HDFS_PATH);
broadcastSource = DosSketchSource.broadcastSource(nacosProperties);
}else {
broadcastSource= DosSketchSource.singleBroadcastSource(nacosProperties);
}
@@ -63,7 +63,7 @@ public class OutputStreamSink {
MapStateDescriptor<String,Map> descriptor =
new MapStateDescriptor<>("descriptorTest", Types.STRING, TypeInformation.of(Map.class));
BroadcastStream<Map<String, byte[]>> broadcast = broadcastSource.broadcast(descriptor);
BroadcastStream<Map<String, String>> broadcast = broadcastSource.broadcast(descriptor);
return middleStream
.connect(broadcast)

View File

@@ -36,11 +36,11 @@ public class DosSketchSource {
}
public static DataStreamSource<Map<String, byte[]>> broadcastSource(Properties nacosProperties, String STORE_PATH){
return streamExeEnv.addSource(new HttpSource(nacosProperties, CommonConfig.NACOS_DATA_ID, CommonConfig.NACOS_GROUP, CommonConfig.NACOS_READ_TIMEOUT,STORE_PATH));
public static DataStreamSource<Map<String, String>> broadcastSource(Properties nacosProperties){
return streamExeEnv.addSource(new HttpSource(nacosProperties, CommonConfig.NACOS_DATA_ID, CommonConfig.NACOS_GROUP, CommonConfig.NACOS_READ_TIMEOUT));
}
public static DataStreamSource<Map<String, byte[]>> singleBroadcastSource(Properties nacosProperties){
public static DataStreamSource<Map<String, String>>singleBroadcastSource(Properties nacosProperties){
return streamExeEnv.addSource(new SingleHttpSource(nacosProperties, CommonConfig.NACOS_DATA_ID, CommonConfig.NACOS_GROUP, CommonConfig.NACOS_READ_TIMEOUT));
}
}

View File

@@ -9,6 +9,7 @@ import com.alibaba.nacos.api.NacosFactory;
import com.alibaba.nacos.api.PropertyKeyConst;
import com.alibaba.nacos.api.config.ConfigService;
import com.alibaba.nacos.api.config.listener.Listener;
import com.alibaba.nacos.api.exception.NacosException;
import com.fasterxml.jackson.databind.JavaType;
import com.google.common.base.Joiner;
import com.jayway.jsonpath.JsonPath;
@@ -29,17 +30,16 @@ import java.util.*;
import java.util.concurrent.Executor;
public class HttpSource extends RichHttpSourceFunction<Map<String, byte[]>> {
public class HttpSource extends RichHttpSourceFunction<Map<String, String>> {
private static final Logger logger = LoggerFactory.getLogger(HttpSource.class);
private static final int TRY_TIMES = 3;
private static final String EXPR = "$.[?(@.version=='latest' && @.name in ['ip_v4_built_in','ip_v6_built_in','ip_v4_user_defined','ip_v6_user_defined'])].['name','sha256','format','path']";
// private static final String EXPR = "$.[?(@.version=='latest' && @.name in ['ip_v4_user_defined'])].['name','sha256','format','path']";
private static Map<String, String> knowledgeMetaCache = new HashMap<>();
private static HashMap<String, byte[]> knowledgeFileCache;
private static HashMap<String, String> knowledgeUpdateCache;
private static final int TRY_TIMES = 3;
private static HttpClientUtils2 httpClientUtils;
@@ -55,9 +55,6 @@ public class HttpSource extends RichHttpSourceFunction<Map<String, byte[]>> {
//nacos 连接超时时间
private long NACOS_READ_TIMEOUT;
//上传到hdfs的路径
private String STORE_PATH;
private ConfigService configService;
private static Header header;
@@ -68,17 +65,11 @@ public class HttpSource extends RichHttpSourceFunction<Map<String, byte[]>> {
private boolean isSending = false;
// private boolean isRunning = true;
public HttpSource(Properties nacosProperties, String NACOS_DATA_ID, String NACOS_GROUP, long NACOS_READ_TIMEOUT, String storePath) {
public HttpSource(Properties nacosProperties, String NACOS_DATA_ID, String NACOS_GROUP, long NACOS_READ_TIMEOUT) {
this.nacosProperties = nacosProperties;
this.NACOS_DATA_ID = NACOS_DATA_ID;
this.NACOS_GROUP = NACOS_GROUP;
this.NACOS_READ_TIMEOUT = NACOS_READ_TIMEOUT;
this.STORE_PATH = storePath;
}
@Override
@@ -88,22 +79,29 @@ public class HttpSource extends RichHttpSourceFunction<Map<String, byte[]>> {
//初始化元数据缓存
knowledgeMetaCache = new HashMap<>(16);
//初始化定位库缓存
knowledgeFileCache = new HashMap<>(16);
knowledgeUpdateCache = new HashMap<>(16);
header = new BasicHeader("token", CommonConfig.HOS_TOKEN);
//连接nacos配置
try {
configService = NacosFactory.createConfigService(nacosProperties);
}catch (NacosException e){
logger.error("Get Schema config from Nacos error,The exception message is :{}", e.getMessage());
}
//初始化知识库
initKnowledge();
logger.info("连接nacos" + nacosProperties.getProperty(PropertyKeyConst.SERVER_ADDR));
configService = NacosFactory.createConfigService(nacosProperties);
}
@Override
public void run(SourceContext ctx) throws Exception {
// ctx.emitWatermark(new Watermark(Long.MAX_VALUE));
String config = configService.getConfig(NACOS_DATA_ID, NACOS_GROUP, NACOS_READ_TIMEOUT);
if (StringUtil.isNotBlank(config)) {
ArrayList<Object> metaList = JsonPath.parse(config).read(EXPR);
loadKnowledge(metaList);
if (isSending){
ctx.collect(knowledgeFileCache);
}
if (!knowledgeUpdateCache.isEmpty()){
ctx.collect(knowledgeUpdateCache);
knowledgeUpdateCache.clear();
}
// }
configService.addListener(NACOS_DATA_ID, NACOS_GROUP, new Listener() {
@@ -118,7 +116,7 @@ public class HttpSource extends RichHttpSourceFunction<Map<String, byte[]>> {
logger.info("receive update config:" + configMsg);
if (StringUtil.isNotBlank(configMsg)) {
ArrayList<Object> metaList = JsonPath.parse(configMsg).read(EXPR);
if (metaList.size() >= 1) {
if (metaList.size() > 0) {
for (Object metadata : metaList) {
JSONObject knowledgeJson = new JSONObject(metadata, false, true);
String fileName = Joiner.on(CommonConfig.LOCATION_SEPARATOR).useForNull("").join(knowledgeJson.getStr("name"),
@@ -130,8 +128,9 @@ public class HttpSource extends RichHttpSourceFunction<Map<String, byte[]>> {
updateKnowledge(fileName, filePath,sha256);
}
}
if (isSending){
ctx.collect(knowledgeFileCache);
if (!knowledgeUpdateCache.isEmpty()){
ctx.collect(knowledgeUpdateCache);
knowledgeUpdateCache.clear();
}
}
}
@@ -139,7 +138,6 @@ public class HttpSource extends RichHttpSourceFunction<Map<String, byte[]>> {
} catch (Exception e) {
logger.error("监听nacos配置失败", e);
}
System.out.println(configMsg);
}
});
@@ -153,10 +151,17 @@ public class HttpSource extends RichHttpSourceFunction<Map<String, byte[]>> {
}
private void loadKnowledge(ArrayList<Object> metaList) {
InputStream inputStream = null;
private void initKnowledge(){
String configMsg = "";
try {
if (metaList.size() >= 1) {
configMsg=configService.getConfig(NACOS_DATA_ID, NACOS_GROUP, NACOS_READ_TIMEOUT);
} catch (NacosException e) {
logger.error("从Nacos获取知识库元数据配置文件异常异常信息为:{}", e.getMessage());
}
if (StringUtil.isNotBlank(configMsg)){
ArrayList<Object> metaList = JsonPath.parse(configMsg).read(EXPR);
if (metaList.size() > 0) {
for (Object metadata : metaList) {
JSONObject knowledgeJson = new JSONObject(metadata, false, true);
String fileName = Joiner.on(CommonConfig.LOCATION_SEPARATOR).useForNull("").join(knowledgeJson.getStr("name"),
@@ -168,8 +173,6 @@ public class HttpSource extends RichHttpSourceFunction<Map<String, byte[]>> {
if (sha256.equals(localFileSha256Hex)){
logger.info("本地文件{}的sha256为:{} ,Nacos内记录为:{} ,sha256相等", fileName, localFileSha256Hex, sha256);
knowledgeMetaCache.put(fileName, sha256);
// knowledgeFileCache.put(fileName, IOUtils.toByteArray(inputStream));
knowledgeFileCache.put(fileName, localFileByte);
}else {
logger.info("本地文件{}的sha256为:{} ,Nacos内记录为:{} ,sha256不相等更新本地文件及缓存", fileName, localFileSha256Hex, sha256);
updateKnowledge(fileName,filePath,sha256);
@@ -177,55 +180,54 @@ public class HttpSource extends RichHttpSourceFunction<Map<String, byte[]>> {
}
}
// } catch (IOException ioException) {
} catch (Exception ioException) {
ioException.printStackTrace();
} finally {
IOUtils.closeQuietly(inputStream);
}
}
private void updateKnowledge(String fileName, String filePath,String sha256) {
InputStream inputStream = null;
// FileOutputStream outputStream = null;
int retryNum = 0;
try {
while (retryNum < TRY_TIMES){
inputStream = httpClientUtils.httpGetInputStream(filePath, 90000, header);
if (inputStream !=null){
byte[] downloadBytes = IOUtils.toByteArray(inputStream);
if (downloadBytes.length>0){
String downloadFileSha256Hex = DigestUtil.sha256Hex(downloadBytes);
if (sha256.equals(downloadFileSha256Hex)){
logger.info("通过HOS下载{}的sha256为:{} ,Nacos内记录为:{} ,sha256相等", fileName, sha256);
// HdfsUtils.uploadFileByBytes(CommonConfig.HDFS_PATH + fileName, downloadBytes);
String downloadFileSha256Hex = DigestUtil.sha256Hex(downloadBytes);
if (sha256.equals(downloadFileSha256Hex)&& downloadBytes.length > 0 ){
logger.info("通过HOS下载{}的sha256为:{} ,Nacos内记录为:{} ,sha256相等", fileName, sha256);
boolean updateStatus = updateLocalFile(fileName, downloadBytes);
if (updateStatus){
knowledgeMetaCache.put(fileName,sha256);
knowledgeFileCache.put(fileName, downloadBytes);
updateLocalFile(fileName);
knowledgeUpdateCache.put(fileName, sha256);
retryNum = TRY_TIMES;
isSending = true;
}else {
logger.error("通过HOS下载{}的sha256为:{} ,Nacos内记录为:{} ,sha256不相等 开始第{}次重试下载文件", fileName, downloadFileSha256Hex, sha256, retryNum);
}else {
retryNum++;
//避免频繁请求HOS
Thread.sleep(10000);
}
}else {
logger.error("通过HOS下载{}的sha256为:{} ,Nacos内记录为:{} ,sha256不相等 开始第{}次重试下载文件", fileName, downloadFileSha256Hex, sha256, retryNum);
retryNum++;
//避免频繁请求HOS
Thread.sleep(10000);
}
}
}
} catch (IOException ioException) {
ioException.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
IOUtils.closeQuietly(inputStream);
// IOUtils.closeQuietly(outputStream);
}
}
private void updateLocalFile(String fileName) {
private boolean updateLocalFile(String fileName,byte[] downloadBytes) {
FileOutputStream outputStream = null;
boolean updateStatus = false;
try {
HdfsUtils.uploadFileByBytes(CommonConfig.HDFS_PATH + fileName, knowledgeFileCache.get(fileName));
HdfsUtils.uploadFileByBytes(CommonConfig.HDFS_PATH + fileName, downloadBytes);
updateStatus=true;
} catch (IOException ioe) {
logger.error("更新本地文件{}时发生IO异常,异常信息为:", fileName, ioe.getMessage());
ioe.printStackTrace();
@@ -235,7 +237,7 @@ public class HttpSource extends RichHttpSourceFunction<Map<String, byte[]>> {
} finally {
IOUtils.closeQuietly(outputStream);
}
return updateStatus;
}
private static byte[] getLocalFile(String name) {

View File

@@ -9,6 +9,7 @@ import com.alibaba.nacos.api.NacosFactory;
import com.alibaba.nacos.api.PropertyKeyConst;
import com.alibaba.nacos.api.config.ConfigService;
import com.alibaba.nacos.api.config.listener.Listener;
import com.alibaba.nacos.api.exception.NacosException;
import com.fasterxml.jackson.databind.JavaType;
import com.google.common.base.Joiner;
import com.jayway.jsonpath.JsonPath;
@@ -29,10 +30,19 @@ import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.Executor;
public class SingleHttpSource extends RichHttpSourceFunction<Map<String, byte[]>> {
public class SingleHttpSource extends RichHttpSourceFunction<Map<String, String>> {
private static final Logger logger = LoggerFactory.getLogger(SingleHttpSource.class);
private static final String EXPR = "$.[?(@.version=='latest' && @.name in ['ip_v4_built_in','ip_v6_built_in','ip_v4_user_defined','ip_v6_user_defined'])].['name','sha256','format','path']";
private static Map<String, String> knowledgeMetaCache = new HashMap<>();
private static HashMap<String, String> knowledgeUpdateCache;
private static final int TRY_TIMES = 3;
private Properties nacosProperties;
private String NACOS_DATA_ID;
@@ -41,25 +51,14 @@ public class SingleHttpSource extends RichHttpSourceFunction<Map<String, byte[]>
private long NACOS_READ_TIMEOUT;
private static String STORE_PATH;
private static HttpClientUtils2 httpClientUtils ;
private ConfigService configService;
private static Header header;
private static final String EXPR = "$.[?(@.version=='latest' && @.name in ['ip_v4_built_in','ip_v6_built_in','ip_v4_user_defined','ip_v6_user_defined'])].['name','sha256','format','path']";
private static Map<String, String> knowledgeMetaCache = new HashMap<>();
private static HashMap<String, byte[]> knowledgeFileCache;
private boolean isRunning = true;
//是否下发,默认不发送
private boolean isSending = false;
private static final int TRY_TIMES = 3;
public SingleHttpSource(Properties nacosProperties, String NACOS_DATA_ID, String NACOS_GROUP, long NACOS_READ_TIMEOUT) {
@@ -75,33 +74,32 @@ public class SingleHttpSource extends RichHttpSourceFunction<Map<String, byte[]>
public void open(Configuration parameters) throws Exception {
super.open(parameters);
httpClientUtils = new HttpClientUtils2();
logger.info("连接nacos" + nacosProperties.getProperty(PropertyKeyConst.SERVER_ADDR));
configService = NacosFactory.createConfigService(nacosProperties);
//初始化元数据缓存
knowledgeMetaCache = new HashMap<>(16);
//初始化定位库缓存
knowledgeFileCache = new HashMap<>(16);
knowledgeUpdateCache = new HashMap<>(16);
header = new BasicHeader("token", CommonConfig.HOS_TOKEN);
//连接nacos配置
try {
configService = NacosFactory.createConfigService(nacosProperties);
}catch (NacosException e){
logger.error("Get Schema config from Nacos error,The exception message is :{}", e.getMessage());
}
//初始化知识库
initKnowledge();
logger.info("连接nacos" + nacosProperties.getProperty(PropertyKeyConst.SERVER_ADDR));
}
@Override
public void run(SourceContext ctx) throws Exception {
// ctx.emitWatermark(new Watermark(Long.MAX_VALUE));
String config = configService.getConfig(NACOS_DATA_ID, NACOS_GROUP, NACOS_READ_TIMEOUT);
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String format = formatter.format(new Date());
logger.info(format + "receive config from nacos:" + config);
System.out.println(format + "receive config from nacos:" + config);
if (StringUtil.isNotBlank(config)) {
ArrayList<Object> metaList = JsonPath.parse(config).read(EXPR);
loadKnowledge(metaList);
if (isSending){
ctx.collect(knowledgeFileCache);
}
if (!knowledgeUpdateCache.isEmpty()){
ctx.collect(knowledgeUpdateCache);
knowledgeUpdateCache.clear();
}
configService.addListener(NACOS_DATA_ID, NACOS_GROUP, new Listener() {
@Override
public Executor getExecutor() {
@@ -114,7 +112,7 @@ public class SingleHttpSource extends RichHttpSourceFunction<Map<String, byte[]>
logger.info("receive update config:" + configMsg);
if (StringUtil.isNotBlank(configMsg)) {
ArrayList<Object> metaList = JsonPath.parse(configMsg).read(EXPR);
if (metaList.size() >= 1) {
if (metaList.size() > 0) {
for (Object metadata : metaList) {
JSONObject knowledgeJson = new JSONObject(metadata, false, true);
String fileName = Joiner.on(CommonConfig.LOCATION_SEPARATOR).useForNull("").join(knowledgeJson.getStr("name"),
@@ -126,8 +124,9 @@ public class SingleHttpSource extends RichHttpSourceFunction<Map<String, byte[]>
updateKnowledge(fileName, filePath,sha256);
}
}
if (isSending){
ctx.collect(knowledgeFileCache);
if (!knowledgeUpdateCache.isEmpty()){
ctx.collect(knowledgeUpdateCache);
knowledgeUpdateCache.clear();
}
}
}
@@ -140,17 +139,31 @@ public class SingleHttpSource extends RichHttpSourceFunction<Map<String, byte[]>
});
while (isRunning) {
Thread.sleep(10000);
try {
Thread.sleep(10000);
}catch (InterruptedException e){
e.printStackTrace();
}
}
}
private void loadKnowledge(ArrayList<Object> metaList) {
// InputStream inputStream = null;
private void initKnowledge(){
String configMsg = "";
try {
if (metaList.size() >= 1) {
configMsg=configService.getConfig(NACOS_DATA_ID, NACOS_GROUP, NACOS_READ_TIMEOUT);
} catch (NacosException e) {
logger.error("从Nacos获取知识库元数据配置文件异常异常信息为:{}", e.getMessage());
}
if (StringUtil.isNotBlank(configMsg)){
ArrayList<Object> metaList = JsonPath.parse(configMsg).read(EXPR);
if (metaList.size() > 0) {
for (Object metadata : metaList) {
JSONObject knowledgeJson = new JSONObject(metadata, false, true);
String fileName = Joiner.on(CommonConfig.LOCATION_SEPARATOR).useForNull("").join(knowledgeJson.getStr("name"),
@@ -162,51 +175,53 @@ public class SingleHttpSource extends RichHttpSourceFunction<Map<String, byte[]>
if (sha256.equals(localFileSha256Hex)){
logger.info("本地文件{}的sha256为:{} ,Nacos内记录为:{} ,sha256相等", fileName, localFileSha256Hex, sha256);
knowledgeMetaCache.put(fileName, sha256);
knowledgeFileCache.put(fileName, localFileByte);
}else {
logger.info("本地文件{}的sha256为:{} ,Nacos内记录为:{} ,sha256不相等更新本地文件及缓存", fileName, localFileSha256Hex, sha256);
updateKnowledge(fileName,filePath,sha256);
}
}
}
} catch (RuntimeException exception) {
exception.printStackTrace();
}
// finally {
// IOUtils.closeQuietly(inputStream);
// }
}
private void updateKnowledge(String fileName, String filePath,String sha256) {
InputStream inputStream = null;
// FileOutputStream outputStream = null;
int retryNum = 0;
try {
while (retryNum < TRY_TIMES){
inputStream = httpClientUtils.httpGetInputStream(filePath, 3000, header);
if (inputStream !=null){
byte[] downloadBytes = IOUtils.toByteArray(inputStream);
if (downloadBytes.length>0){
String downloadFileSha256Hex = DigestUtil.sha256Hex(downloadBytes);
if (sha256.equals(downloadFileSha256Hex)){
logger.info("通过HOS下载{}的sha256为:{} ,Nacos内记录为:{} ,sha256相等", fileName, sha256);
knowledgeMetaCache.put(fileName, sha256);
knowledgeFileCache.put(fileName, downloadBytes);
updateLocalFile(fileName);
if (sha256.equals(downloadFileSha256Hex)&& downloadBytes.length > 0 ){
logger.info("通过HOS下载{}的sha256为:{} ,Nacos内记录为:{} ,sha256相等", fileName, sha256);
boolean updateStatus = updateLocalFile(fileName, downloadBytes);
if (updateStatus){
knowledgeMetaCache.put(fileName,sha256);
knowledgeUpdateCache.put(fileName, sha256);
retryNum = TRY_TIMES;
isSending = true;
}else {
retryNum++;
//避免频繁请求HOS
Thread.sleep(10000);
}
// isSending = true;
}else {
logger.error("通过HOS下载{}的sha256为:{} ,Nacos内记录为:{} ,sha256不相等 开始第{}次重试下载文件", fileName, downloadFileSha256Hex, sha256, retryNum);
retryNum++;
//避免频繁请求HOS
Thread.sleep(10000);
}
}
}
}
} catch (IOException ioException) {
ioException.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
IOUtils.closeQuietly(inputStream);
}
@@ -214,14 +229,15 @@ public class SingleHttpSource extends RichHttpSourceFunction<Map<String, byte[]>
private void updateLocalFile(String fileName) {
// InputStream inputStream = null;
private boolean updateLocalFile(String fileName,byte[] downloadBytes) {
FileOutputStream outputStream = null;
boolean updateStatus = false;
try {
FileUtil.mkdir(CommonConfig.DOWNLOAD_PATH);
File file = new File(CommonConfig.DOWNLOAD_PATH.concat(File.separator).concat(fileName));
outputStream = new FileOutputStream(file);
IoUtil.copy(new ByteArrayInputStream(knowledgeFileCache.get(fileName)), outputStream);
IoUtil.copy(new ByteArrayInputStream(downloadBytes), outputStream);
updateStatus=true;
} catch (IOException ioe) {
logger.error("更新本地文件{}时发生IO异常,异常信息为:", fileName, ioe.getMessage());
ioe.printStackTrace();
@@ -231,7 +247,7 @@ public class SingleHttpSource extends RichHttpSourceFunction<Map<String, byte[]>
} finally {
IOUtils.closeQuietly(outputStream);
}
return updateStatus;
}
private static byte[] getLocalFile(String name) {

View File

@@ -1,11 +1,14 @@
package com.zdjizhi.utils;
import cn.hutool.core.io.file.FileReader;
import cn.hutool.crypto.digest.DigestUtil;
import com.zdjizhi.common.CommonConfig;
import com.zdjizhi.common.CustomFile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
@@ -86,20 +89,126 @@ public class IpUtils {
}
}
public static void updateIpLook(Map<String, byte[]> knowledgeFileCache){
public static void updateIpLook(Map<String, String> knowledgeFileCache){
try{
IpLookupV2.Builder builder = new IpLookupV2.Builder(false);
ipLookup= builder.loadDataFileV4(new ByteArrayInputStream(knowledgeFileCache.get("ip_v4_built_in.mmdb")))
.loadDataFileV6(new ByteArrayInputStream(knowledgeFileCache.get("ip_v6_built_in.mmdb")))
.loadDataFilePrivateV4(new ByteArrayInputStream(knowledgeFileCache.get("ip_v4_user_defined.mmdb")))
.loadDataFilePrivateV6(new ByteArrayInputStream(knowledgeFileCache.get("ip_v6_user_defined.mmdb")))
.build();
if ("CLUSTER".equals(CommonConfig.CLUSTER_OR_SINGLE)) {
byte[] ipv4BuiltBytes = HdfsUtils.getFileBytes(CommonConfig.HDFS_PATH + "ip_v4_built_in.mmdb");
if (ipv4BuiltBytes!=null){
if (knowledgeFileCache.containsKey("ip_v4_built_in.mmdb")){
String sha256 = knowledgeFileCache.get("ip_v4_built_in.mmdb");
byte[] localFileByte = getLocalFile("ip_v4_built_in.mmdb");
String localFileSha256Hex = DigestUtil.sha256Hex(localFileByte);
if (sha256.equals(localFileSha256Hex)){
builder.loadDataFileV4(new ByteArrayInputStream(ipv4BuiltBytes));
}
}
}
byte[] ipv6BuiltBytes = HdfsUtils.getFileBytes(CommonConfig.HDFS_PATH + "ip_v6_built_in.mmdb");
if (ipv6BuiltBytes!=null){
if (knowledgeFileCache.containsKey("ip_v6_built_in.mmdb")) {
String sha256 = knowledgeFileCache.get("ip_v6_built_in.mmdb");
byte[] localFileByte = getLocalFile("ip_v6_built_in.mmdb");
String localFileSha256Hex = DigestUtil.sha256Hex(localFileByte);
if (sha256.equals(localFileSha256Hex)) {
builder.loadDataFileV6(new ByteArrayInputStream(ipv6BuiltBytes));
}
}
}
byte[] ipv4UserBytes = HdfsUtils.getFileBytes(CommonConfig.HDFS_PATH + "ip_v4_user_defined.mmdb");
if (ipv4UserBytes!=null){
if (knowledgeFileCache.containsKey("ip_v4_user_defined.mmdb")) {
String sha256 = knowledgeFileCache.get("ip_v4_user_defined.mmdb");
byte[] localFileByte = getLocalFile("ip_v4_user_defined.mmdb");
String localFileSha256Hex = DigestUtil.sha256Hex(localFileByte);
if (sha256.equals(localFileSha256Hex)) {
builder.loadDataFilePrivateV4(new ByteArrayInputStream(ipv4UserBytes));
}
}
}
byte[] ipv6UserBytes = HdfsUtils.getFileBytes(CommonConfig.HDFS_PATH + "ip_v6_user_defined.mmdb");
if (ipv6UserBytes!=null){
if (knowledgeFileCache.containsKey("ip_v6_user_defined.mmdb")) {
String sha256 = knowledgeFileCache.get("ip_v6_user_defined.mmdb");
byte[] localFileByte = getLocalFile("ip_v6_user_defined.mmdb");
String localFileSha256Hex = DigestUtil.sha256Hex(localFileByte);
if (sha256.equals(localFileSha256Hex)) {
builder.loadDataFilePrivateV6(new ByteArrayInputStream(ipv6UserBytes));
}
}
}
}else if ("SINGLE".equals(CommonConfig.CLUSTER_OR_SINGLE)){
byte[] ipv4BuiltBytes = FileByteUtils.getFileBytes(CommonConfig.DOWNLOAD_PATH + "ip_v4_built_in.mmdb");
if (ipv4BuiltBytes!=null){
if (knowledgeFileCache.containsKey("ip_v4_built_in.mmdb")){
String sha256 = knowledgeFileCache.get("ip_v4_built_in.mmdb");
byte[] localFileByte = getLocalFile("ip_v4_built_in.mmdb");
String localFileSha256Hex = DigestUtil.sha256Hex(localFileByte);
if (sha256.equals(localFileSha256Hex)){
builder.loadDataFileV4(new ByteArrayInputStream(ipv4BuiltBytes));
}
}
}
byte[] ipv6BuiltBytes = FileByteUtils.getFileBytes(CommonConfig.DOWNLOAD_PATH + "ip_v6_built_in.mmdb");
if (ipv6BuiltBytes!=null){
if (knowledgeFileCache.containsKey("ip_v6_built_in.mmdb")) {
String sha256 = knowledgeFileCache.get("ip_v6_built_in.mmdb");
byte[] localFileByte = getLocalFile("ip_v6_built_in.mmdb");
String localFileSha256Hex = DigestUtil.sha256Hex(localFileByte);
if (sha256.equals(localFileSha256Hex)) {
builder.loadDataFileV6(new ByteArrayInputStream(ipv6BuiltBytes));
}
}
}
byte[] ipv4UserBytes = FileByteUtils.getFileBytes(CommonConfig.DOWNLOAD_PATH + "ip_v4_user_defined.mmdb");
if (ipv4UserBytes!=null){
if (knowledgeFileCache.containsKey("ip_v4_user_defined.mmdb")) {
String sha256 = knowledgeFileCache.get("ip_v4_user_defined.mmdb");
byte[] localFileByte = getLocalFile("ip_v4_user_defined.mmdb");
String localFileSha256Hex = DigestUtil.sha256Hex(localFileByte);
if (sha256.equals(localFileSha256Hex)) {
builder.loadDataFilePrivateV4(new ByteArrayInputStream(ipv4UserBytes));
}
}
}
byte[] ipv6UserBytes = FileByteUtils.getFileBytes(CommonConfig.DOWNLOAD_PATH + "ip_v6_user_defined.mmdb");
if (ipv6UserBytes!=null){
if (knowledgeFileCache.containsKey("ip_v6_user_defined.mmdb")) {
String sha256 = knowledgeFileCache.get("ip_v6_user_defined.mmdb");
byte[] localFileByte = getLocalFile("ip_v6_user_defined.mmdb");
String localFileSha256Hex = DigestUtil.sha256Hex(localFileByte);
if (sha256.equals(localFileSha256Hex)) {
builder.loadDataFilePrivateV6(new ByteArrayInputStream(ipv6UserBytes));
}
}
}
}
ipLookup = builder.build();
}catch (Exception e){
LOG.error("加载失败",e);
}
}
private static byte[] getLocalFile(String name) {
byte[] fileBytes = null;
try {
fileBytes = "CLUSTER".equals(CommonConfig.CLUSTER_OR_SINGLE) ?
HdfsUtils.getFileBytes(CommonConfig.HDFS_PATH + name) :
new FileReader(CommonConfig.DOWNLOAD_PATH + name).readBytes();
} catch (RuntimeException | IOException e) {
e.printStackTrace();
}
return fileBytes;
}
public static void main(String[] args) {
System.out.println(ipLookup.countryLookup("49.7.115.37"));