增加GTP-C/RADIUS关联增加VSYS开关,数据向前兼容,没有vsysid的数据默认为1。(TSG-11939)

This commit is contained in:
qidaijie
2022-09-21 18:15:03 +08:00
parent b7eddb0e8c
commit 60d12d3f8c
28 changed files with 184 additions and 216 deletions

View File

@@ -0,0 +1,18 @@
package com.zdjizhi.tools.exception;
/**
* @author qidaijie
* @Package com.zdjizhi.storm.utils.execption
* @Description:
* @date 2021/3/259:42
*/
public class FlowWriteException extends RuntimeException {
public FlowWriteException() {
}
public FlowWriteException(String message) {
super(message);
}
}

View File

@@ -0,0 +1,17 @@
package com.zdjizhi.tools.functions;
import com.zdjizhi.utils.StringUtil;
import org.apache.flink.api.common.functions.FilterFunction;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.functions
* @Description:
* @date 2021/5/2715:01
*/
public class FilterNullFunction implements FilterFunction<String> {
@Override
public boolean filter(String message) {
return StringUtil.isNotBlank(message);
}
}

View File

@@ -0,0 +1,23 @@
package com.zdjizhi.tools.functions;
import com.zdjizhi.tools.general.TransFormMap;
import org.apache.flink.api.common.functions.MapFunction;
import java.util.Map;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.functions
* @Description:
* @date 2021/5/2715:01
*/
public class MapCompletedFunction implements MapFunction<Map<String, Object>, String> {
@Override
@SuppressWarnings("unchecked")
public String map(Map<String, Object> logs) {
return TransFormMap.dealCommonMessage(logs);
}
}

View File

@@ -0,0 +1,23 @@
package com.zdjizhi.tools.functions;
import com.zdjizhi.tools.general.TransFormTypeMap;
import org.apache.flink.api.common.functions.MapFunction;
import java.util.Map;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.functions
* @Description:
* @date 2021/5/2715:01
*/
public class TypeMapCompletedFunction implements MapFunction<Map<String, Object>, String> {
@Override
@SuppressWarnings("unchecked")
public String map(Map<String, Object> logs) {
return TransFormTypeMap.dealCommonMessage(logs);
}
}

View File

@@ -0,0 +1,213 @@
package com.zdjizhi.tools.general;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.tools.zookeeper.DistributedLock;
import com.zdjizhi.tools.zookeeper.ZookeeperUtils;
/**
* 雪花算法
*
* @author qidaijie
*/
public class SnowflakeId {
private static final Log logger = LogFactory.get();
/**
* 共64位 第一位为符号位 默认0
* 时间戳 39位(17 year), centerId:(关联每个环境或任务数) :6位(0-63),
* workerId(关联进程):7(0-127) ,序列号11位(2047/ms)
*
* 序列号 /ms = (-1L ^ (-1L << 11))
* 最大使用年 = (1L << 39) / (1000L * 60 * 60 * 24 * 365)
*/
/**
* 开始时间截 (2020-11-14 00:00:00) max 17years
*/
private final long twepoch = 1605283200000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 8L;
/**
* 数据标识id所占的位数
*/
private final long dataCenterIdBits = 5L;
/**
* 支持的最大机器id结果是63 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
* M << n = M * 2^n
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id结果是31
*/
private final long maxDataCenterId = -1L ^ (-1L << dataCenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 11L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(14+6)
*/
private final long dataCenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(4+6+14)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + dataCenterIdBits;
/**
* 生成序列的掩码这里为2047
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~255)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long dataCenterId;
/**
* 毫秒内序列(0~2047)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
/**
* 设置允许时间回拨的最大限制10s
*/
private static final long rollBackTime = 10000L;
private static SnowflakeId idWorker;
private static ZookeeperUtils zookeeperUtils = new ZookeeperUtils();
static {
idWorker = new SnowflakeId(FlowWriteConfig.ZOOKEEPER_SERVERS, FlowWriteConfig.DATA_CENTER_ID_NUM);
}
//==============================Constructors=====================================
/**
* 构造函数
*/
private SnowflakeId(String zookeeperIp, long dataCenterIdNum) {
DistributedLock lock = new DistributedLock(FlowWriteConfig.ZOOKEEPER_SERVERS, "disLocks1");
try {
lock.lock();
int tmpWorkerId = zookeeperUtils.modifyNode("/Snowflake/" + "worker" + dataCenterIdNum, zookeeperIp);
if (tmpWorkerId > maxWorkerId || tmpWorkerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (dataCenterIdNum > maxDataCenterId || dataCenterIdNum < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than ", maxDataCenterId));
}
this.workerId = tmpWorkerId;
this.dataCenterId = dataCenterIdNum;
} catch (RuntimeException e) {
logger.error("This is not usual error!!!===>>>" + e + "<<<===");
}finally {
lock.unlock();
}
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
private synchronized long nextId() {
long timestamp = timeGen();
//设置一个允许回拨限制时间系统时间回拨范围在rollBackTime内可以等待校准
if (lastTimestamp - timestamp > 0 && lastTimestamp - timestamp < rollBackTime) {
timestamp = tilNextMillis(lastTimestamp);
}
//如果当前时间小于上一次ID生成的时间戳说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift)
| (dataCenterId << dataCenterIdShift)
| (workerId << workerIdShift)
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
return System.currentTimeMillis();
}
/**
* 静态工具类
*
* @return
*/
public static Long generateId() {
return idWorker.nextId();
}
}

View File

@@ -0,0 +1,132 @@
package com.zdjizhi.tools.general;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.utils.JsonMapper;
import com.zdjizhi.tools.json.JsonParseUtil;
import java.util.Map;
/**
* 描述:转换或补全工具类
*
* @author qidaijie
*/
public class TransFormMap {
private static final Log logger = LogFactory.get();
/**
* 解析日志,并补全
*
* @param jsonMap kafka Topic消费原始日志并解析
* @return 补全后的日志
*/
@SuppressWarnings("unchecked")
public static String dealCommonMessage(Map<String, Object> jsonMap) {
try {
JsonParseUtil.dropJsonField(jsonMap);
for (String[] strings : JsonParseUtil.getJobList()) {
//该日志字段的值
Object logValue = JsonParseUtil.getValue(jsonMap, strings[0]);
//结果值映射到的日志字段key
String appendToKey = strings[1];
//匹配操作函数的字段
String function = strings[2];
//额外的参数的值
String param = strings[3];
//结果值映射到的日志字段原始value
Object appendToValue = JsonParseUtil.getValue(jsonMap, appendToKey);
functionSet(function, jsonMap, appendToKey, appendToValue, logValue, param);
}
return JsonMapper.toJsonString(jsonMap);
} catch (RuntimeException e) {
logger.error("TransForm logs failed,The exception is :" + e);
return null;
}
}
/**
* 根据schema描述对应字段进行操作的 函数集合
*
* @param function 匹配操作函数的字段
* @param jsonMap 原始日志解析map
* @param appendToKey 需要补全的字段的key
* @param appendToValue 需要补全的字段的值
* @param logValue 用到的参数的值
* @param param 额外的参数的值
*/
private static void functionSet(String function, Map<String, Object> jsonMap, String appendToKey, Object appendToValue, Object logValue, String param) {
switch (function) {
case "current_timestamp":
if (!(appendToValue instanceof Long)) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.getCurrentTime());
}
break;
case "snowflake_id":
JsonParseUtil.setValue(jsonMap, appendToKey, SnowflakeId.generateId());
break;
case "geo_ip_detail":
if (logValue != null && appendToValue == null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.getGeoIpDetail(logValue.toString()));
}
break;
case "geo_asn":
if (logValue != null && appendToValue == null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.getGeoAsn(logValue.toString()));
}
break;
case "geo_ip_country":
if (logValue != null && appendToValue == null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.getGeoIpCountry(logValue.toString()));
}
break;
case "flattenSpec":
if (logValue != null && param != null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.flattenSpec(logValue.toString(), param));
}
break;
case "if":
if (param != null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.condition(jsonMap, param));
}
break;
case "decode_of_base64":
if (logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.decodeBase64(logValue.toString(), TransFunction.isJsonValue(jsonMap, param)));
}
break;
case "sub_domain":
if (appendToValue == null && logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.getTopDomain(logValue.toString()));
}
break;
case "radius_match":
if (logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.radiusMatch(jsonMap, logValue.toString()));
}
break;
case "gtpc_match":
if (logValue != null) {
TransFunction.gtpcMatch(jsonMap, logValue.toString(), appendToKey, param);
}
break;
case "set_value":
if (param != null) {
JsonParseUtil.setValue(jsonMap, appendToKey, param);
}
break;
case "get_value":
if (logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKey, logValue);
}
break;
default:
}
}
}

View File

@@ -0,0 +1,134 @@
package com.zdjizhi.tools.general;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.JsonMapper;
import com.zdjizhi.tools.json.JsonParseUtil;
import java.util.Map;
/**
* 描述:转换或补全工具类
*
* @author qidaijie
*/
public class TransFormTypeMap {
private static final Log logger = LogFactory.get();
/**
* 解析日志,并补全
*
* @param message kafka Topic原始日志
* @return 补全后的日志
*/
@SuppressWarnings("unchecked")
public static String dealCommonMessage(Map<String, Object> message) {
try {
Map<String, Object> jsonMap = JsonParseUtil.typeTransform(message);
for (String[] strings : JsonParseUtil.getJobList()) {
//该日志字段的值
Object logValue = JsonParseUtil.getValue(jsonMap, strings[0]);
//结果值映射到的日志字段key
String appendToKey = strings[1];
//匹配操作函数的字段
String function = strings[2];
//额外的参数的值
String param = strings[3];
//结果值映射到的日志字段原始value
Object appendToValue = JsonParseUtil.getValue(jsonMap, appendToKey);
functionSet(function, jsonMap, appendToKey, appendToValue, logValue, param);
}
return JsonMapper.toJsonString(jsonMap);
} catch (RuntimeException e) {
logger.error("TransForm logs failed,The exception is :" + e);
e.printStackTrace();
return null;
}
}
/**
* 根据schema描述对应字段进行操作的 函数集合
*
* @param function 匹配操作函数的字段
* @param jsonMap 原始日志解析map
* @param appendToKey 需要补全的字段的key
* @param appendToValue 需要补全的字段的值
* @param logValue 用到的参数的值
* @param param 额外的参数的值
*/
private static void functionSet(String function, Map<String, Object> jsonMap, String appendToKey, Object appendToValue, Object logValue, String param) {
switch (function) {
case "current_timestamp":
if (!(appendToValue instanceof Long)) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.getCurrentTime());
}
break;
case "snowflake_id":
JsonParseUtil.setValue(jsonMap, appendToKey, SnowflakeId.generateId());
break;
case "geo_ip_detail":
if (logValue != null && appendToValue == null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.getGeoIpDetail(logValue.toString()));
}
break;
case "geo_asn":
if (logValue != null && appendToValue == null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.getGeoAsn(logValue.toString()));
}
break;
case "geo_ip_country":
if (logValue != null && appendToValue == null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.getGeoIpCountry(logValue.toString()));
}
break;
case "flattenSpec":
if (logValue != null && param != null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.flattenSpec(logValue.toString(), param));
}
break;
case "if":
if (param != null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.condition(jsonMap, param));
}
break;
case "decode_of_base64":
if (logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.decodeBase64(logValue.toString(), TransFunction.isJsonValue(jsonMap, param)));
}
break;
case "sub_domain":
if (appendToValue == null && logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.getTopDomain(logValue.toString()));
}
break;
case "radius_match":
if (logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKey, TransFunction.radiusMatch(jsonMap, logValue.toString()));
}
break;
case "gtpc_match":
if (logValue != null) {
TransFunction.gtpcMatch(jsonMap, logValue.toString(), appendToKey, param);
}
break;
case "set_value":
if (param != null) {
JsonParseUtil.setValue(jsonMap, appendToKey, param);
}
break;
case "get_value":
if (logValue != null) {
JsonParseUtil.setValue(jsonMap, appendToKey, logValue);
}
break;
default:
}
}
}

View File

@@ -0,0 +1,260 @@
package com.zdjizhi.tools.general;
import cn.hutool.core.codec.Base64;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.jayway.jsonpath.InvalidPathException;
import com.jayway.jsonpath.JsonPath;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.tools.hbase.HBaseUtils;
import com.zdjizhi.utils.FormatUtils;
import com.zdjizhi.utils.IpLookupV2;
import com.zdjizhi.utils.StringUtil;
import com.zdjizhi.tools.json.JsonParseUtil;
import com.zdjizhi.tools.json.JsonPathUtil;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
/**
* @author qidaijie
*/
class TransFunction {
private static final Log logger = LogFactory.get();
/**
* IP定位库工具类
*/
private static IpLookupV2 ipLookup = new IpLookupV2.Builder(false)
.loadDataFileV4(FlowWriteConfig.TOOLS_LIBRARY + "ip_v4_built_in.mmdb")
.loadDataFileV6(FlowWriteConfig.TOOLS_LIBRARY + "ip_v6_built_in.mmdb")
.loadDataFilePrivateV4(FlowWriteConfig.TOOLS_LIBRARY + "ip_v4_user_defined.mmdb")
.loadDataFilePrivateV6(FlowWriteConfig.TOOLS_LIBRARY + "ip_v6_user_defined.mmdb")
.loadAsnDataFile(FlowWriteConfig.TOOLS_LIBRARY + "asn_v4.mmdb")
.loadAsnDataFileV6(FlowWriteConfig.TOOLS_LIBRARY + "asn_v6.mmdb")
.build();
/**
* 生成当前时间戳的操作
*/
static long getCurrentTime() {
return System.currentTimeMillis() / 1000;
}
/**
* 根据clientIp获取location信息
*
* @param ip client IP
* @return ip地址详细信息
*/
static String getGeoIpDetail(String ip) {
String detail = "";
try {
detail = ipLookup.cityLookupDetail(ip);
} catch (NullPointerException npe) {
logger.error("The MMDB file is not loaded or IP is null! " + npe);
} catch (RuntimeException e) {
logger.error("Get clientIP location error! " + e);
}
return detail;
}
/**
* 根据ip获取asn信息
*
* @param ip client/server IP
* @return ASN
*/
static String getGeoAsn(String ip) {
String asn = "";
try {
asn = ipLookup.asnLookup(ip);
} catch (NullPointerException npe) {
logger.error("The MMDB file is not loaded or IP is null! " + npe);
} catch (RuntimeException e) {
logger.error("Get IP ASN error! " + e);
}
return asn;
}
/**
* 根据ip获取country信息
*
* @param ip server IP
* @return 国家
*/
static String getGeoIpCountry(String ip) {
String country = "";
try {
country = ipLookup.countryLookup(ip);
} catch (NullPointerException npe) {
logger.error("The MMDB file is not loaded or IP is null! " + npe);
} catch (RuntimeException e) {
logger.error("Get ServerIP location error! " + e);
}
return country;
}
/**
* radius借助HBase补齐
*
* @param ip client IP
* @return account
*/
static String radiusMatch(Map<String, Object> jsonMap, String ip) {
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = jsonMap.getOrDefault("common_vsys_id", "1").toString();
return HBaseUtils.getAccount(ip + vsysId);
} else {
return HBaseUtils.getAccount(ip);
}
}
/**
* 借助HBase补齐GTP-C信息,解析tunnels信息优先使用gtp_uplink_teid其次使用gtp_downlink_teid
* <p>
* "common_tunnels":[{"tunnels_schema_type":"GTP","gtp_uplink_teid":235261261,"gtp_downlink_teid":665547833,"gtp_sgw_ip":"192.56.5.2","gtp_pgw_ip":"192.56.10.20","gtp_sgw_port":2152,"gtp_pgw_port":2152}]
*
* @param jsonMap 原始日志json
* @param logValue 上行TEID
* @param appendToKey 结果值映射到的日志字段key
* @param param 用于解析jsonarray直接定位到GTP信息所在的位置
*/
static void gtpcMatch(Map<String, Object> jsonMap, String logValue, String appendToKey, String param) {
try {
String teid = null;
String[] exprs = param.split(FlowWriteConfig.FORMAT_SPLITTER);
for (String expr : exprs) {
String value = JsonPathUtil.getTeidValue(logValue, expr);
if (value != null) {
teid = value;
break;
}
}
if (teid != null) {
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = jsonMap.getOrDefault("common_vsys_id", "1").toString();
teid = teid + vsysId;
}
String[] appendToKeys = appendToKey.split(FlowWriteConfig.FORMAT_SPLITTER);
HashMap<String, Object> userData = HBaseUtils.getGtpData(teid);
if (userData != null) {
for (String key : appendToKeys) {
JsonParseUtil.setValue(jsonMap, key, userData.get(key).toString());
}
} else {
logger.warn("Description The user whose TEID is " + teid + " was not matched!");
}
}
} catch (RuntimeException re) {
logger.error("An exception occurred in teid type conversion or parsing of user information!" + re);
}
}
/**
* 解析顶级域名
*
* @param domain 初始域名
* @return 顶级域名
*/
static String getTopDomain(String domain) {
try {
return FormatUtils.getTopPrivateDomain(domain);
} catch (StringIndexOutOfBoundsException outException) {
logger.error("Parse top-level domain exceptions, exception domain names:" + domain);
return "";
}
}
/**
* 根据编码解码base64
*
* @param message base64
* @param charset 编码
* @return 解码字符串
*/
static String decodeBase64(String message, Object charset) {
String result = "";
try {
if (StringUtil.isNotBlank(message)) {
if (charset == null) {
result = Base64.decodeStr(message, FlowWriteConfig.MAIL_DEFAULT_CHARSET);
} else {
result = Base64.decodeStr(message, charset.toString());
}
}
} catch (RuntimeException e) {
logger.error("Resolve Base64 exception, exception information:" + e);
}
return result;
}
/**
* 根据表达式解析json
*
* @param message json
* @param expr 解析表达式
* @return 解析结果
*/
static String flattenSpec(String message, String expr) {
String flattenResult = "";
try {
if (StringUtil.isNotBlank(expr)) {
ArrayList<String> read = JsonPath.parse(message).read(expr);
if (read.size() >= 1) {
flattenResult = read.get(0);
}
}
} catch (ClassCastException | InvalidPathException | ArrayIndexOutOfBoundsException e) {
logger.error("The label resolution exception or [expr] analytic expression error" + e);
}
return flattenResult;
}
/**
* 判断是否为日志字段,是则返回对应value否则返回原始字符串
*
* @param jsonMap 内存实体类
* @param param 字段名/普通字符串
* @return JSON.Value or String
*/
static Object isJsonValue(Map<String, Object> jsonMap, String param) {
if (param.contains(FlowWriteConfig.IS_JSON_KEY_TAG)) {
return JsonParseUtil.getValue(jsonMap, param.substring(2));
} else {
return param;
}
}
/**
* IF函数实现解析日志构建三目运算;包含判断是否为数字若为数字则转换为long类型返回结果。
*
* @param jsonMap 内存实体类
* @param ifParam 字段名/普通字符串
* @return resultA or resultB or null
*/
static Object condition(Map<String, Object> jsonMap, String ifParam) {
Object result = null;
try {
String[] split = ifParam.split(FlowWriteConfig.FORMAT_SPLITTER);
if (split.length == FlowWriteConfig.IF_PARAM_LENGTH) {
String[] norms = split[0].split(FlowWriteConfig.IF_CONDITION_SPLITTER);
Object direction = isJsonValue(jsonMap, norms[0]);
Object resultA = isJsonValue(jsonMap, split[1]);
Object resultB = isJsonValue(jsonMap, split[2]);
if (direction instanceof Number) {
result = (Integer.parseInt(direction.toString()) == Integer.parseInt(norms[1])) ? resultA : resultB;
} else if (direction instanceof String) {
result = direction.equals(norms[1]) ? resultA : resultB;
}
}
} catch (RuntimeException e) {
logger.error("IF 函数执行异常,异常信息:" + e);
}
return result;
}
}

View File

@@ -0,0 +1,196 @@
package com.zdjizhi.tools.hbase;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.StringUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.hbase
* @Description:
* @date 2022/7/1510:12
*/
class GtpCRelation {
private static final Log logger = LogFactory.get();
/**
* 获取全量的GTpc数据
*/
static void getAllGtpCRelation(Connection connection, Map<String, HashMap<String, Object>> gtpcMap) {
long begin = System.currentTimeMillis();
ResultScanner scanner = null;
try {
Table table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_GTPC_TABLE_NAME));
Scan scan = new Scan();
if (FlowWriteConfig.HBASE_GTPC_SCAN_MAX_ROWS > 0) {
scan.setLimit(FlowWriteConfig.HBASE_GTPC_SCAN_MAX_ROWS);
}
scanner = table.getScanner(scan);
for (Result result : scanner) {
int acctStatusType = GtpCRelation.getMsgType(result);
if (acctStatusType == 1) {
String upLinkTeid = HBaseUtils.getTeid(result, "uplink_teid");
String downLinkTeid = HBaseUtils.getTeid(result, "downlink_teid");
String phoneNumber = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "phone_number").trim();
String imsi = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imsi").trim();
String imei = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imei").trim();
Long lastUpdateTime = HBaseUtils.getLong(result, FlowWriteConfig.GTPC_FAMILY_NAME, "last_update_time");
HashMap<String, Object> buildUserData = buildUserData(phoneNumber, imsi, imei, lastUpdateTime);
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = HBaseUtils.getVsysId(result).trim();
updateCache(gtpcMap, upLinkTeid+vsysId, buildUserData, lastUpdateTime);
updateCache(gtpcMap, downLinkTeid+vsysId, buildUserData, lastUpdateTime);
} else {
updateCache(gtpcMap, upLinkTeid, buildUserData, lastUpdateTime);
updateCache(gtpcMap, downLinkTeid, buildUserData, lastUpdateTime);
}
}
}
System.out.println(gtpcMap.toString());
logger.warn("The obtain the number of GTP-C relationships : " + gtpcMap.size());
logger.warn("The time spent to obtain GTP-C relationships : " + (System.currentTimeMillis() - begin) + "ms");
} catch (IOException | RuntimeException e) {
logger.error("The relationship between USER and TEID obtained from HBase is abnormal! message is :" + e);
} finally {
if (scanner != null) {
scanner.close();
}
}
}
/**
* 增量更新GTP-C关系
*
* @param connection HBase连接
* @param gtpcMap gtp-c关系缓存
* @param startTime 开始时间
* @param endTime 结束时间
*/
static void upgradeGtpCRelation(Connection connection, Map<String, HashMap<String, Object>> gtpcMap, Long startTime, Long endTime) {
Long begin = System.currentTimeMillis();
Table table = null;
ResultScanner scanner = null;
Scan scan = new Scan();
try {
table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_GTPC_TABLE_NAME));
scan.setTimeRange(startTime, endTime);
if (FlowWriteConfig.HBASE_GTPC_SCAN_MAX_ROWS > 0) {
scan.setLimit(FlowWriteConfig.HBASE_GTPC_SCAN_MAX_ROWS);
}
scanner = table.getScanner(scan);
for (Result result : scanner) {
int acctStatusType = GtpCRelation.getMsgType(result);
String upLinkTeid = HBaseUtils.getTeid(result, "uplink_teid");
String downLinkTeid = HBaseUtils.getTeid(result, "downlink_teid");
if (acctStatusType == 1) {
String phoneNumber = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "phone_number").trim();
String imsi = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imsi").trim();
String imei = HBaseUtils.getString(result, FlowWriteConfig.GTPC_FAMILY_NAME, "imei").trim();
Long lastUpdateTime = HBaseUtils.getLong(result, FlowWriteConfig.GTPC_FAMILY_NAME, "last_update_time");
HashMap<String, Object> buildUserData = buildUserData(phoneNumber, imsi, imei, lastUpdateTime);
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = HBaseUtils.getVsysId(result).trim();
updateCache(gtpcMap, upLinkTeid+vsysId, buildUserData, lastUpdateTime);
updateCache(gtpcMap, downLinkTeid+vsysId, buildUserData, lastUpdateTime);
} else {
updateCache(gtpcMap, upLinkTeid, buildUserData, lastUpdateTime);
updateCache(gtpcMap, downLinkTeid, buildUserData, lastUpdateTime);
}
} else if (acctStatusType == 2) {
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = HBaseUtils.getVsysId(result).trim();
gtpcMap.remove(upLinkTeid+vsysId);
gtpcMap.remove(downLinkTeid+vsysId);
} else {
gtpcMap.remove(upLinkTeid);
gtpcMap.remove(downLinkTeid);
}
}
}
Long end = System.currentTimeMillis();
logger.warn("The current number of GTPC relationships is: " + gtpcMap.keySet().size());
logger.warn("The time used to update the GTPC relationship is: " + (end - begin) + "ms");
} catch (IOException | RuntimeException e) {
logger.error("GTPC relationship update exception, the content is:" + e);
} finally {
if (scanner != null) {
scanner.close();
}
if (table != null) {
try {
table.close();
} catch (IOException e) {
logger.error("HBase Table Close ERROR! Exception message is:" + e);
}
}
}
}
/**
* 获取当前用户上下线状态信息
*
* @param result HBase内获取的数据
* @return onff_type 状态 1-上线 2-下线
*/
private static int getMsgType(Result result) {
boolean hasType = result.containsColumn(Bytes.toBytes(FlowWriteConfig.GTPC_FAMILY_NAME), Bytes.toBytes("msg_type"));
if (hasType) {
return Bytes.toInt(result.getValue(Bytes.toBytes(FlowWriteConfig.GTPC_FAMILY_NAME), Bytes.toBytes("msg_type")));
} else {
return 0;
}
}
/**
* 构建用户信息
*
* @param phoneNumber 手机号
* @param imsi 用户标识
* @param imei 设备标识
* @return 用户信息
*/
private static HashMap<String, Object> buildUserData(String phoneNumber, String imsi, String imei, Long lastUpdateTime) {
HashMap<String, Object> tmpMap = new HashMap<>(4);
tmpMap.put("common_phone_number", phoneNumber);
tmpMap.put("common_imsi", imsi);
tmpMap.put("common_imei", imei);
tmpMap.put("last_update_time", lastUpdateTime);
return tmpMap;
}
/**
* 判断缓存与新获取的数据时间戳大小,若大于缓存内记录的时间戳;则更新缓存
*
* @param gtpcMap 缓存集合
* @param key 上下行teid
* @param userData 获取HBase内的用户信息
* @param lastUpdateTime 该用户信息最后更新时间
*/
private static void updateCache(Map<String, HashMap<String, Object>> gtpcMap, String key, HashMap<String, Object> userData, Long lastUpdateTime) {
if (StringUtil.isNotBlank(key)){
if (gtpcMap.containsKey(key)) {
Long oldUpdateTime = Long.parseLong(gtpcMap.get(key).get("last_update_time").toString());
if (lastUpdateTime > oldUpdateTime) {
gtpcMap.put(key, userData);
}
} else {
gtpcMap.put(key, userData);
}
}
}
}

View File

@@ -0,0 +1,218 @@
package com.zdjizhi.tools.hbase;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.StringUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* HBase 工具类
*
* @author qidaijie
*/
public class HBaseUtils {
private static final Log logger = LogFactory.get();
private static Map<String, String> radiusMap = new ConcurrentHashMap<>(16);
private static Map<String, HashMap<String, Object>> gtpcMap = new ConcurrentHashMap<>(16);
private static Connection connection;
private static Long time;
private static HBaseUtils hBaseUtils;
private static void getInstance() {
hBaseUtils = new HBaseUtils();
}
/**
* 构造函数-新
*/
private HBaseUtils() {
//获取连接
getConnection();
//拉取所有
RadiusRelation.getAllRadiusRelation(connection, radiusMap);
GtpCRelation.getAllGtpCRelation(connection, gtpcMap);
//定时更新
updateCache();
}
private static void getConnection() {
try {
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum", FlowWriteConfig.HBASE_ZOOKEEPER_SERVERS);
configuration.set("hbase.client.retries.number", "1");
configuration.set("hbase.client.pause", "50");
configuration.set("hbase.rpc.timeout", "3000");
configuration.set("zookeeper.recovery.retry", "1");
configuration.set("zookeeper.recovery.retry.intervalmill", "200");
connection = ConnectionFactory.createConnection(configuration);
time = System.currentTimeMillis();
logger.warn("HBaseUtils get HBase connection,now to getAll().");
} catch (IOException ioe) {
logger.error("HBaseUtils getHbaseConn() IOException===>{" + ioe + "}<===");
} catch (RuntimeException e) {
logger.error("HBaseUtils getHbaseConn() Exception===>{" + e + "}<===");
}
}
/**
* 获取HBase内String类型的值
*
* @param result 结果集
* @param familyName 列族名称
* @param columnName 列名称
* @return 结果数据
*/
static String getString(Result result, String familyName, String columnName) {
byte[] familyBytes = Bytes.toBytes(familyName);
byte[] columnBytes = Bytes.toBytes(columnName);
boolean contains = result.containsColumn(familyBytes, columnBytes);
if (contains) {
String data = Bytes.toString(result.getValue(familyBytes, columnBytes)).trim();
if (StringUtil.isNotBlank(data)) {
return data;
}
}
return "";
}
/**
* 获取HBase内String类型的值
*
* @param result 结果集
* @param columnName 列名称
* @return 结果数据
*/
static Long getLong(Result result, String familyName, String columnName) {
byte[] familyBytes = Bytes.toBytes(familyName);
byte[] columnBytes = Bytes.toBytes(columnName);
boolean contains = result.containsColumn(familyBytes, columnBytes);
if (contains) {
return Bytes.toLong(result.getValue(familyBytes, columnBytes));
}
return 0L;
}
/**
* 获取HBase内String类型的值
*
* @param result 结果集
* @param columnName 列名称
* @return 结果数据
*/
static String getTeid(Result result, String columnName) {
byte[] familyBytes = Bytes.toBytes(FlowWriteConfig.GTPC_FAMILY_NAME);
byte[] columnBytes = Bytes.toBytes(columnName);
boolean contains = result.containsColumn(familyBytes, columnBytes);
if (contains) {
String data = String.valueOf(Bytes.toLong(result.getValue(familyBytes, columnBytes))).trim();
if (StringUtil.isNotBlank(data)) {
return data;
}
}
return "0";
}
/**
* 获取HBase内String类型的值
*
* @param result 结果集
* @return 结果数据
*/
static String getVsysId(Result result) {
byte[] familyBytes = Bytes.toBytes(FlowWriteConfig.COMMON_FAMILY_NAME);
byte[] columnBytes = Bytes.toBytes("vsys_id");
boolean contains = result.containsColumn(familyBytes, columnBytes);
if (contains) {
String data = String.valueOf(Bytes.toInt(result.getValue(familyBytes, columnBytes))).trim();
if (StringUtil.isNotBlank(data)) {
return data;
}
}
return "1";
}
/**
* 更新变量
*/
private static void change() {
if (hBaseUtils == null) {
getInstance();
}
long nowTime = System.currentTimeMillis();
RadiusRelation.upgradeRadiusRelation(connection, radiusMap, time - 1000, nowTime + 500);
GtpCRelation.upgradeGtpCRelation(connection, gtpcMap, time - 1000, nowTime + 500);
System.out.println(gtpcMap);
System.out.println(radiusMap);
time = nowTime;
}
/**
* 验证定时器,每隔一段时间验证一次-验证获取新的Cookie
*/
private void updateCache() {
ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(1);
executorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
if (FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS != 0) {
change();
}
} catch (RuntimeException e) {
logger.error("HBaseUtils update hbaseCache is error===>{" + e + "}<===");
}
}
}, 1, FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS, TimeUnit.SECONDS);
}
/**
* 获取Radius account
*
* @param clientIp client_ip
* @return account
*/
public static String getAccount(String clientIp) {
if (FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS != 0) {
if (hBaseUtils == null) {
getInstance();
}
return radiusMap.get(clientIp);
}
return "";
}
/**
* 获取GTPC用户信息
*
* @param teid TEID
* @return account
*/
public static HashMap<String, Object> getGtpData(String teid) {
if (FlowWriteConfig.HBASE_TICK_TUPLE_FREQ_SECS != 0) {
if (hBaseUtils == null) {
getInstance();
}
return gtpcMap.get(teid);
}
return null;
}
}

View File

@@ -0,0 +1,134 @@
package com.zdjizhi.tools.hbase;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.Map;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.hbase
* @Description:
* @date 2022/7/1510:12
*/
class RadiusRelation {
private static final Log logger = LogFactory.get();
/**
* 获取全量的Radius数据
*/
static void getAllRadiusRelation(Connection connection, Map<String, String> radiusMap) {
long begin = System.currentTimeMillis();
ResultScanner scanner = null;
try {
Table table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_RADIUS_TABLE_NAME));
Scan scan = new Scan();
if (FlowWriteConfig.HBASE_RADIUS_SCAN_MAX_ROWS > 0) {
scan.setLimit(FlowWriteConfig.HBASE_RADIUS_SCAN_MAX_ROWS);
}
scanner = table.getScanner(scan);
for (Result result : scanner) {
int acctStatusType = RadiusRelation.getAcctStatusType(result);
String framedIp = HBaseUtils.getString(result, FlowWriteConfig.RADIUS_FAMILY_NAME, "framed_ip").trim();
String account = HBaseUtils.getString(result, FlowWriteConfig.RADIUS_FAMILY_NAME, "account").trim();
if (acctStatusType == 1) {
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = HBaseUtils.getVsysId(result).trim();
radiusMap.put(framedIp + vsysId, account);
} else {
radiusMap.put(framedIp, account);
}
}
}
System.out.println(radiusMap.toString());
logger.warn("The obtain the number of RADIUS relationships : " + radiusMap.size());
logger.warn("The time spent to obtain radius relationships : " + (System.currentTimeMillis() - begin) + "ms");
} catch (IOException | RuntimeException e) {
logger.error("The relationship between framedIP and account obtained from HBase is abnormal! message is :" + e);
} finally {
if (scanner != null) {
scanner.close();
}
}
}
/**
* 增量更新Radius关系
*
* @param connection HBase连接
* @param radiusMap radius关系缓存
* @param startTime 开始时间
* @param endTime 结束时间
*/
static void upgradeRadiusRelation(Connection connection, Map<String, String> radiusMap, Long startTime, Long endTime) {
Long begin = System.currentTimeMillis();
Table table = null;
ResultScanner scanner = null;
Scan scan = new Scan();
try {
table = connection.getTable(TableName.valueOf(FlowWriteConfig.HBASE_RADIUS_TABLE_NAME));
scan.setTimeRange(startTime, endTime);
if (FlowWriteConfig.HBASE_RADIUS_SCAN_MAX_ROWS > 0) {
scan.setLimit(FlowWriteConfig.HBASE_RADIUS_SCAN_MAX_ROWS);
}
scanner = table.getScanner(scan);
for (Result result : scanner) {
int acctStatusType = RadiusRelation.getAcctStatusType(result);
String framedIp = HBaseUtils.getString(result, FlowWriteConfig.RADIUS_FAMILY_NAME, "framed_ip").trim();
String account = HBaseUtils.getString(result, FlowWriteConfig.RADIUS_FAMILY_NAME, "account").trim();
if (acctStatusType == 1) {
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = HBaseUtils.getVsysId(result).trim();
radiusMap.put(framedIp + vsysId, account);
} else {
radiusMap.put(framedIp, account);
}
} else if (acctStatusType == 2) {
if (FlowWriteConfig.DEFAULT_RELATIONSHIP_MODULE.equals(FlowWriteConfig.DATA_RELATIONSHIP_MODEL)) {
String vsysId = HBaseUtils.getVsysId(result).trim();
radiusMap.remove(framedIp+vsysId);
} else {
radiusMap.remove(framedIp);
}
}
}
Long end = System.currentTimeMillis();
logger.warn("The current number of Radius relationships is: " + radiusMap.keySet().size());
logger.warn("The time used to update the Radius relationship is: " + (end - begin) + "ms");
} catch (IOException | RuntimeException e) {
logger.error("Radius relationship update exception, the content is:" + e);
} finally {
if (scanner != null) {
scanner.close();
}
if (table != null) {
try {
table.close();
} catch (IOException e) {
logger.error("HBase Table Close ERROR! Exception message is:" + e);
}
}
}
}
/**
* 获取当前用户上下线状态信息
*
* @param result HBase内获取的数据
* @return 状态 1-上线 2-下线
*/
private static int getAcctStatusType(Result result) {
boolean hasType = result.containsColumn(Bytes.toBytes(FlowWriteConfig.RADIUS_FAMILY_NAME), Bytes.toBytes("acct_status_type"));
if (hasType) {
return Bytes.toInt(result.getValue(Bytes.toBytes(FlowWriteConfig.RADIUS_FAMILY_NAME), Bytes.toBytes("acct_status_type")));
} else {
return 1;
}
}
}

View File

@@ -0,0 +1,341 @@
package com.zdjizhi.tools.json;
import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONObject;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.nacos.api.NacosFactory;
import com.alibaba.nacos.api.PropertyKeyConst;
import com.alibaba.nacos.api.config.ConfigService;
import com.alibaba.nacos.api.config.listener.Listener;
import com.alibaba.nacos.api.exception.NacosException;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.StringUtil;
import java.util.*;
import java.util.concurrent.Executor;
/**
* 使用FastJson解析json的工具类
*
* @author qidaijie
*/
public class JsonParseUtil {
private static final Log logger = LogFactory.get();
private static Properties propNacos = new Properties();
/**
* 获取需要删除字段的列表
*/
private static ArrayList<String> dropList = new ArrayList<>();
/**
* 获取schema指定的有效字段及类型
*/
private static HashMap<String, Class> jsonFieldsMap;
/**
* 获取包含默认值的字段
*/
private static HashMap<String, Object> defaultFieldsMap = new HashMap<>(16);
/**
* 获取任务列表
* list的每个元素是一个四元字符串数组 (有format标识的字段补全的字段用到的功能函数用到的参数),例如:
* (mail_subject mail_subject decode_of_base64 mail_subject_charset)
*/
private static ArrayList<String[]> jobList;
static {
propNacos.setProperty(PropertyKeyConst.SERVER_ADDR, FlowWriteConfig.NACOS_SERVER);
propNacos.setProperty(PropertyKeyConst.NAMESPACE, FlowWriteConfig.NACOS_SCHEMA_NAMESPACE);
propNacos.setProperty(PropertyKeyConst.USERNAME, FlowWriteConfig.NACOS_USERNAME);
propNacos.setProperty(PropertyKeyConst.PASSWORD, FlowWriteConfig.NACOS_PIN);
try {
ConfigService configService = NacosFactory.createConfigService(propNacos);
String dataId = FlowWriteConfig.NACOS_DATA_ID;
String group = FlowWriteConfig.NACOS_GROUP;
String schema = configService.getConfig(dataId, group, 5000);
if (StringUtil.isNotBlank(schema)) {
jsonFieldsMap = getFieldsFromSchema(schema);
jobList = getJobListFromHttp(schema);
}
configService.addListener(dataId, group, new Listener() {
@Override
public Executor getExecutor() {
return null;
}
@Override
public void receiveConfigInfo(String configMsg) {
if (StringUtil.isNotBlank(configMsg)) {
clearCache();
jsonFieldsMap = getFieldsFromSchema(configMsg);
jobList = getJobListFromHttp(configMsg);
}
}
});
} catch (NacosException e) {
logger.error("Get Schema config from Nacos error,The exception message is :" + e.getMessage());
}
}
/**
* 模式匹配,给定一个类型字符串返回一个类类型
*
* @param type 类型
* @return 类类型
*/
private static Class getClassName(String type) {
Class clazz;
switch (type) {
case "int":
clazz = Integer.class;
break;
case "string":
clazz = String.class;
break;
case "long":
clazz = long.class;
break;
case "array":
clazz = List.class;
break;
case "double":
clazz = double.class;
break;
case "float":
clazz = float.class;
break;
case "char":
clazz = char.class;
break;
case "byte":
clazz = byte.class;
break;
case "boolean":
clazz = boolean.class;
break;
case "short":
clazz = short.class;
break;
default:
clazz = String.class;
}
return clazz;
}
/**
* 获取属性值的方法
*
* @param jsonMap 原始日志
* @param property key
* @return 属性的值
*/
public static Object getValue(Map<String, Object> jsonMap, String property) {
try {
return jsonMap.getOrDefault(property, null);
} catch (RuntimeException e) {
logger.error("获取json-value异常异常key" + property + "异常信息为:" + e);
return null;
}
}
/**
* 更新属性值的方法
*
* @param jsonMap 原始日志json map
* @param property 更新的key
* @param value 更新的值
*/
public static void setValue(Map<String, Object> jsonMap, String property, Object value) {
try {
jsonMap.put(property, value);
} catch (RuntimeException e) {
logger.error("赋予实体类错误类型数据", e);
}
}
/**
* 类型转换
*
* @param jsonMap 原始日志map
*/
public static Map<String, Object> typeTransform(Map<String, Object> jsonMap) throws RuntimeException {
JsonParseUtil.dropJsonField(jsonMap);
JsonParseUtil.setFieldDefault(jsonMap);
HashMap<String, Object> tmpMap = new HashMap<>(256);
for (String key : jsonMap.keySet()) {
if (jsonFieldsMap.containsKey(key)) {
String simpleName = jsonFieldsMap.get(key).getSimpleName();
switch (simpleName) {
case "String":
tmpMap.put(key, JsonTypeUtil.checkString(jsonMap.get(key)));
break;
case "Integer":
tmpMap.put(key, JsonTypeUtil.getIntValue(jsonMap.get(key)));
break;
case "long":
tmpMap.put(key, JsonTypeUtil.checkLongValue(jsonMap.get(key)));
break;
case "List":
tmpMap.put(key, JsonTypeUtil.checkArray(jsonMap.get(key)));
break;
case "Map":
tmpMap.put(key, JsonTypeUtil.checkObject(jsonMap.get(key)));
break;
case "double":
tmpMap.put(key, JsonTypeUtil.checkDouble(jsonMap.get(key)));
break;
default:
tmpMap.put(key, JsonTypeUtil.checkString(jsonMap.get(key)));
}
}
}
return tmpMap;
}
public static ArrayList<String[]> getJobList() {
return jobList;
}
/**
* 删除schema内指定的无效字段jackson
*
* @param jsonMap 原始日志
*/
public static void dropJsonField(Map<String, Object> jsonMap) {
for (String field : dropList) {
jsonMap.remove(field);
}
}
/**
* 根据schema内指定的默认值给数据赋值。
*
* @param jsonMap 原始日志
*/
private static void setFieldDefault(Map<String, Object> jsonMap) {
if (defaultFieldsMap.keySet().size() >= 1) {
for (String fieldName : defaultFieldsMap.keySet()) {
Object logValue = JsonParseUtil.getValue(jsonMap, fieldName);
if (logValue == null) {
jsonMap.put(fieldName, defaultFieldsMap.get(fieldName));
}
}
}
}
/**
* 通过schema来获取所需的字段及字段类型。
*
* @return 用于反射生成schema类型的对象的一个map集合
*/
private static HashMap<String, Class> getFieldsFromSchema(String schema) {
HashMap<String, Class> map = new HashMap<>(256);
//获取fields并转化为数组数组的每个元素都是一个name doc type
JSONObject schemaJson = new JSONObject(schema, false, true);
JSONArray fields = schemaJson.getJSONArray("fields");
for (Object field : fields) {
String filedStr = field.toString();
if (checkKeepField(filedStr)) {
JSONObject fieldJson = new JSONObject(filedStr, false, true);
String name = fieldJson.getStr("name");
String type = fieldJson.getStr("type");
if (fieldJson.containsKey("default")) {
defaultFieldsMap.put(name, fieldJson.get("default"));
}
//组合用来生成实体类的map
map.put(name, getClassName(type));
} else {
dropList.add(filedStr);
}
}
return map;
}
/**
* 判断字段是否需要保留
*
* @param message 单个field-json
* @return true or false
*/
private static boolean checkKeepField(String message) {
boolean isKeepField = true;
JSONObject fieldJson = new JSONObject(message, false, true);
boolean hasDoc = fieldJson.containsKey("doc");
if (hasDoc) {
JSONObject doc = new JSONObject(fieldJson.getStr("doc"), false, true);
if (doc.containsKey("visibility")) {
String visibility = doc.getStr("visibility");
if (FlowWriteConfig.VISIBILITY.equals(visibility)) {
isKeepField = false;
}
}
}
return isKeepField;
}
/**
* 解析schema解析之后返回一个任务列表 (useList toList funcList paramlist)
*
* @param schema 日志schema
* @return 任务列表
*/
private static ArrayList<String[]> getJobListFromHttp(String schema) {
ArrayList<String[]> list = new ArrayList<>();
JSONObject schemaJson = new JSONObject(schema, false, true);
JSONArray fields = schemaJson.getJSONArray("fields");
for (Object field : fields) {
JSONObject fieldJson = new JSONObject(field, false, true);
boolean hasDoc = fieldJson.containsKey("doc");
if (hasDoc) {
JSONObject docJson = fieldJson.getJSONObject("doc");
boolean hasFormat = docJson.containsKey("format");
if (hasFormat) {
String name = fieldJson.getStr("name");
JSONArray formatList = docJson.getJSONArray("format");
for (Object format : formatList) {
JSONObject formatJson = new JSONObject(format, false, true);
String function = formatJson.getStr("function");
String appendTo;
String params = null;
if (formatJson.containsKey("appendTo")) {
appendTo = formatJson.getStr("appendTo");
} else {
appendTo = name;
}
if (formatJson.containsKey("param")) {
params = formatJson.getStr("param");
}
list.add(new String[]{name, appendTo, function, params});
}
}
}
}
return list;
}
/**
* 在配置变动时,清空缓存重新获取
*/
private static void clearCache() {
jobList.clear();
jsonFieldsMap.clear();
dropList.clear();
defaultFieldsMap.clear();
}
}

View File

@@ -0,0 +1,42 @@
package com.zdjizhi.tools.json;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.jayway.jsonpath.InvalidPathException;
import com.jayway.jsonpath.JsonPath;
import com.zdjizhi.utils.StringUtil;
import java.util.ArrayList;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.json
* @Description:
* @date 2022/7/1817:19
*/
public class JsonPathUtil {
private static final Log logger = LogFactory.get();
/**
* 通过 josnPath 解析返回TEID数据
*
* @param message json数据
* @param expr 解析表达式
* @return 返回值
*/
public static String getTeidValue(String message, String expr) {
String result = null;
try {
if (StringUtil.isNotBlank(message) && StringUtil.isNotBlank(expr)) {
ArrayList<Object> read = JsonPath.parse(message).read(expr);
if (read.size() >= 1) {
result = read.get(0).toString();
}
}
} catch (RuntimeException e) {
logger.error("JSONPath parsing json returns Long data exception: " + e);
}
return result;
}
}

View File

@@ -0,0 +1,121 @@
package com.zdjizhi.tools.json;
import com.zdjizhi.tools.exception.FlowWriteException;
import com.zdjizhi.utils.JsonMapper;
import java.util.List;
import java.util.Map;
/**
* @author qidaijie
* @Package PACKAGE_NAME
* @Description:
* @date 2021/7/1217:34
*/
public class JsonTypeUtil {
/**
* String 类型检验转换方法
*
* @param value json value
* @return String value
*/
static String checkString(Object value) {
if (value == null) {
return null;
}
if (value instanceof Map) {
return JsonMapper.toJsonString(value);
}
if (value instanceof List) {
return JsonMapper.toJsonString(value);
}
return value.toString();
}
/**
* array 类型检验转换方法
*
* @param value json value
* @return List value
*/
static Map checkObject(Object value) {
if (value == null) {
return null;
}
if (value instanceof Map) {
return (Map) value;
}
throw new FlowWriteException("can not cast to map, value : " + value);
}
/**
* array 类型检验转换方法
*
* @param value json value
* @return List value
*/
static List checkArray(Object value) {
if (value == null) {
return null;
}
if (value instanceof List) {
return (List) value;
}
throw new FlowWriteException("can not cast to List, value : " + value);
}
/**
* long 类型检验转换方法,若为空返回基础值
*
* @param value json value
* @return Long value
*/
static long checkLongValue(Object value) {
Long longVal = TypeUtils.castToLong(value);
if (longVal == null) {
return 0L;
}
return longVal;
}
/**
* Double 类型校验转换方法
*
* @param value json value
* @return Double value
*/
static Double checkDouble(Object value) {
if (value == null) {
return null;
}
return TypeUtils.castToDouble(value);
}
/**
* int 类型检验转换方法,若为空返回基础值
*
* @param value json value
* @return int value
*/
static int getIntValue(Object value) {
Integer intVal = TypeUtils.castToInt(value);
if (intVal == null) {
return 0;
}
return intVal;
}
}

View File

@@ -0,0 +1,171 @@
package com.zdjizhi.tools.json;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.tools.exception.FlowWriteException;
import com.zdjizhi.utils.StringUtil;
/**
* @author qidaijie
* @Package PACKAGE_NAME
* @Description:
* @date 2021/7/1218:20
*/
public class TypeUtils {
private static final Log logger = LogFactory.get();
/**
* Integer 类型判断方法
*
* @param value json value
* @return Integer value or null
*/
public static Object castToIfFunction(Object value) {
if (value == null) {
return null;
}
if (value instanceof String) {
return value.toString();
}
if (value instanceof Integer) {
return ((Number) value).intValue();
}
if (value instanceof Long) {
return ((Number) value).longValue();
}
// if (value instanceof Map) {
// return (Map) value;
// }
//
// if (value instanceof List) {
// return Collections.singletonList(value.toString());
// }
if (value instanceof Boolean) {
return (Boolean) value ? 1 : 0;
}
throw new FlowWriteException("can not cast to int, value : " + value);
}
/**
* Integer 类型判断方法
*
* @param value json value
* @return Integer value or null
*/
static Integer castToInt(Object value) {
if (value == null) {
return null;
}
if (value instanceof Integer) {
return (Integer) value;
}
if (value instanceof Number) {
return ((Number) value).intValue();
}
if (value instanceof String) {
String strVal = (String) value;
if (StringUtil.isBlank(strVal)) {
return null;
}
//将 10,20 类数据转换为10
if (strVal.contains(FlowWriteConfig.FORMAT_SPLITTER)) {
strVal = strVal.split(FlowWriteConfig.FORMAT_SPLITTER)[0];
}
try {
return Integer.parseInt(strVal);
} catch (NumberFormatException ex) {
logger.error("String change Integer Error,The error Str is:" + strVal);
}
}
if (value instanceof Boolean) {
return (Boolean) value ? 1 : 0;
}
throw new FlowWriteException("can not cast to int, value : " + value);
}
/**
* Double类型判断方法
*
* @param value json value
* @return double value or null
*/
static Double castToDouble(Object value) {
if (value instanceof Number) {
return ((Number) value).doubleValue();
}
if (value instanceof String) {
String strVal = (String) value;
if (StringUtil.isBlank(strVal)) {
return null;
}
//将 10,20 类数据转换为10
if (strVal.contains(FlowWriteConfig.FORMAT_SPLITTER)) {
strVal = strVal.split(FlowWriteConfig.FORMAT_SPLITTER)[0];
}
try {
return Double.parseDouble(strVal);
} catch (NumberFormatException ex) {
logger.error("String change Double Error,The error Str is:" + strVal);
}
}
throw new FlowWriteException("can not cast to double, value : " + value);
}
/**
* Long类型判断方法
*
* @param value json value
* @return (Long)value or null
*/
static Long castToLong(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).longValue();
}
if (value instanceof String) {
String strVal = (String) value;
if (StringUtil.isBlank(strVal)) {
return null;
}
//将 10,20 类数据转换为10
if (strVal.contains(FlowWriteConfig.FORMAT_SPLITTER)) {
strVal = strVal.split(FlowWriteConfig.FORMAT_SPLITTER)[0];
}
try {
return Long.parseLong(strVal);
} catch (NumberFormatException ex) {
logger.error("String change Long Error,The error Str is:" + strVal);
}
}
throw new FlowWriteException("can not cast to long, value : " + value);
}
}

View File

@@ -0,0 +1,48 @@
package com.zdjizhi.tools.kafka;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.kafka.common.config.SslConfigs;
import java.util.Properties;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.kafka
* @Description:
* @date 2021/9/610:37
*/
class CertUtils {
/**
* Kafka SASL认证端口
*/
private static final String SASL_PORT = "9094";
/**
* Kafka SSL认证端口
*/
private static final String SSL_PORT = "9095";
/**
* 根据连接信息端口判断认证方式。
*
* @param servers kafka 连接信息
* @param properties kafka 连接配置信息
*/
static void chooseCert(String servers, Properties properties) {
if (servers.contains(SASL_PORT)) {
properties.put("security.protocol", "SASL_PLAINTEXT");
properties.put("sasl.mechanism", "PLAIN");
properties.put("sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required username="
+ FlowWriteConfig.KAFKA_SASL_JAAS_USER + " password=" + FlowWriteConfig.KAFKA_SASL_JAAS_PIN + ";");
} else if (servers.contains(SSL_PORT)) {
properties.put("security.protocol", "SSL");
properties.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "");
properties.put("ssl.keystore.location", FlowWriteConfig.TOOLS_LIBRARY + "keystore.jks");
properties.put("ssl.keystore.password", FlowWriteConfig.KAFKA_SASL_JAAS_PIN);
properties.put("ssl.truststore.location", FlowWriteConfig.TOOLS_LIBRARY + "truststore.jks");
properties.put("ssl.truststore.password", FlowWriteConfig.KAFKA_SASL_JAAS_PIN);
properties.put("ssl.key.password", FlowWriteConfig.KAFKA_SASL_JAAS_PIN);
}
}
}

View File

@@ -0,0 +1,67 @@
package com.zdjizhi.tools.kafka;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import java.util.Map;
import java.util.Properties;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.kafka
* @Description:
* @date 2021/6/813:54
*/
public class KafkaConsumer {
private static Properties createConsumerConfig() {
Properties properties = new Properties();
properties.put("bootstrap.servers", FlowWriteConfig.SOURCE_KAFKA_SERVERS);
properties.put("group.id", FlowWriteConfig.GROUP_ID);
properties.put("session.timeout.ms", FlowWriteConfig.SESSION_TIMEOUT_MS);
properties.put("max.poll.records", FlowWriteConfig.MAX_POLL_RECORDS);
properties.put("max.partition.fetch.bytes", FlowWriteConfig.MAX_PARTITION_FETCH_BYTES);
properties.put("partition.discovery.interval.ms", "10000");
CertUtils.chooseCert(FlowWriteConfig.SOURCE_KAFKA_SERVERS, properties);
return properties;
}
/**
* 用户序列化kafka数据增加 kafka Timestamp内容。
*
* @return kafka logs -> map
*/
@SuppressWarnings("unchecked")
public static FlinkKafkaConsumer<Map<String, Object>> myDeserializationConsumer() {
FlinkKafkaConsumer<Map<String, Object>> kafkaConsumer = new FlinkKafkaConsumer<>(FlowWriteConfig.SOURCE_KAFKA_TOPIC,
new TimestampDeserializationSchema(), createConsumerConfig());
//随着checkpoint提交将offset提交到kafka
kafkaConsumer.setCommitOffsetsOnCheckpoints(true);
//从消费组当前的offset开始消费
kafkaConsumer.setStartFromGroupOffsets();
return kafkaConsumer;
}
/**
* 官方序列化kafka数据
*
* @return kafka logs
*/
public static FlinkKafkaConsumer<String> flinkConsumer() {
FlinkKafkaConsumer<String> kafkaConsumer = new FlinkKafkaConsumer<>(FlowWriteConfig.SOURCE_KAFKA_TOPIC,
new SimpleStringSchema(), createConsumerConfig());
//随着checkpoint提交将offset提交到kafka
kafkaConsumer.setCommitOffsetsOnCheckpoints(true);
//从消费组当前的offset开始消费
kafkaConsumer.setStartFromGroupOffsets();
return kafkaConsumer;
}
}

View File

@@ -0,0 +1,50 @@
package com.zdjizhi.tools.kafka;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import java.util.Optional;
import java.util.Properties;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.kafka
* @Description:
* @date 2021/6/814:04
*/
public class KafkaProducer {
private static Properties createProducerConfig() {
Properties properties = new Properties();
properties.put("bootstrap.servers", FlowWriteConfig.SINK_KAFKA_SERVERS);
properties.put("acks", FlowWriteConfig.PRODUCER_ACK);
properties.put("retries", FlowWriteConfig.RETRIES);
properties.put("linger.ms", FlowWriteConfig.LINGER_MS);
properties.put("request.timeout.ms", FlowWriteConfig.REQUEST_TIMEOUT_MS);
properties.put("batch.size", FlowWriteConfig.BATCH_SIZE);
properties.put("buffer.memory", FlowWriteConfig.BUFFER_MEMORY);
properties.put("max.request.size", FlowWriteConfig.MAX_REQUEST_SIZE);
properties.put("compression.type", FlowWriteConfig.PRODUCER_KAFKA_COMPRESSION_TYPE);
CertUtils.chooseCert(FlowWriteConfig.SINK_KAFKA_SERVERS, properties);
return properties;
}
public static FlinkKafkaProducer<String> getKafkaProducer() {
FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<String>(
FlowWriteConfig.SINK_KAFKA_TOPIC,
new SimpleStringSchema(),
createProducerConfig(),
//sink与所有分区建立连接轮询写入
Optional.empty());
//允许producer记录失败日志而不是捕获和抛出它们
kafkaProducer.setLogFailuresOnly(true);
return kafkaProducer;
}
}

View File

@@ -0,0 +1,48 @@
package com.zdjizhi.tools.kafka;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.JsonMapper;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import java.util.Map;
/**
* @author qidaijie
* @Package com.zdjizhi.utils.kafka
* @Description:
* @date 2022/3/89:42
*/
public class TimestampDeserializationSchema implements KafkaDeserializationSchema {
private static final Log logger = LogFactory.get();
@Override
public TypeInformation getProducedType() {
return TypeInformation.of(Map.class);
}
@Override
public boolean isEndOfStream(Object nextElement) {
return false;
}
@Override
@SuppressWarnings("unchecked")
public Map<String, Object> deserialize(ConsumerRecord record) throws Exception {
if (record != null) {
try {
long timestamp = record.timestamp() / 1000;
String value = new String((byte[]) record.value(), FlowWriteConfig.ENCODING);
Map<String, Object> json = (Map<String, Object>) JsonMapper.fromJsonString(value, Map.class);
json.put("common_ingestion_time", timestamp);
return json;
} catch (RuntimeException e) {
logger.error("KafkaConsumer Deserialize failed,The exception is : " + e.getMessage());
}
}
return null;
}
}

View File

@@ -0,0 +1,69 @@
package com.zdjizhi.tools.system;
import com.zdjizhi.utils.StringUtil;
import java.io.IOException;
import java.util.Locale;
import java.util.Properties;
/**
* @author Administrator
*/
public final class FlowWriteConfigurations {
private static Properties propDefault = new Properties();
private static Properties propService = new Properties();
public static String getStringProperty(Integer type, String key) {
if (type == 0) {
return propService.getProperty(key).trim();
} else if (type == 1) {
return propDefault.getProperty(key).trim();
} else {
return null;
}
}
public static Integer getIntProperty(Integer type, String key) {
if (type == 0) {
return Integer.parseInt(propService.getProperty(key).trim());
} else if (type == 1) {
return Integer.parseInt(propDefault.getProperty(key).trim());
} else {
return null;
}
}
public static Long getLongProperty(Integer type, String key) {
if (type == 0) {
return Long.parseLong(propService.getProperty(key).trim());
} else if (type == 1) {
return Long.parseLong(propDefault.getProperty(key).trim());
} else {
return null;
}
}
public static Boolean getBooleanProperty(Integer type, String key) {
if (type == 0) {
return StringUtil.equals(propService.getProperty(key).toLowerCase().trim().toUpperCase(Locale.ENGLISH), "true");
} else if (type == 1) {
return StringUtil.equals(propDefault.getProperty(key).toLowerCase().trim().toUpperCase(Locale.ENGLISH), "true");
} else {
return null;
}
}
static {
try {
propService.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("service_flow_config.properties"));
propDefault.load(FlowWriteConfigurations.class.getClassLoader().getResourceAsStream("default_config.properties"));
} catch (IOException | RuntimeException e) {
propDefault = null;
propService = null;
}
}
}

View File

@@ -0,0 +1,190 @@
package com.zdjizhi.tools.zookeeper;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.zookeeper.*;
import org.apache.zookeeper.data.Stat;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
/**
* @author qidaijie
*/
public class DistributedLock implements Lock, Watcher {
private static final Log logger = LogFactory.get();
private ZooKeeper zk = null;
/**
* 根节点
*/
private final String ROOT_LOCK = "/locks";
/**
* 竞争的资源
*/
private String lockName;
/**
* 等待的前一个锁
*/
private String waitLock;
/**
* 当前锁
*/
private String currentLock;
/**
* 计数器
*/
private CountDownLatch countDownLatch;
private int sessionTimeout = 2000;
private List<Exception> exceptionList = new ArrayList<Exception>();
/**
* 配置分布式锁
*
* @param config 连接的url
* @param lockName 竞争资源
*/
public DistributedLock(String config, String lockName) {
this.lockName = lockName;
try {
// 连接zookeeper
zk = new ZooKeeper(config, sessionTimeout, this);
Stat stat = zk.exists(ROOT_LOCK, false);
if (stat == null) {
// 如果根节点不存在,则创建根节点
zk.create(ROOT_LOCK, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
}
} catch (IOException | InterruptedException | KeeperException e) {
logger.error("Node already exists!");
}
}
// 节点监视器
@Override
public void process(WatchedEvent event) {
if (this.countDownLatch != null) {
this.countDownLatch.countDown();
}
}
@Override
public void lock() {
if (exceptionList.size() > 0) {
throw new LockException(exceptionList.get(0));
}
try {
if (this.tryLock()) {
logger.info(Thread.currentThread().getName() + " " + lockName + "获得了锁");
} else {
// 等待锁
waitForLock(waitLock, sessionTimeout);
}
} catch (InterruptedException | KeeperException e) {
logger.error("获取锁异常" + e);
}
}
@Override
public boolean tryLock() {
try {
String splitStr = "_lock_";
if (lockName.contains(splitStr)) {
throw new LockException("锁名有误");
}
// 创建临时有序节点
currentLock = zk.create(ROOT_LOCK + "/" + lockName + splitStr, new byte[0],
ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL_SEQUENTIAL);
// 取所有子节点
List<String> subNodes = zk.getChildren(ROOT_LOCK, false);
// 取出所有lockName的锁
List<String> lockObjects = new ArrayList<String>();
for (String node : subNodes) {
String tmpNode = node.split(splitStr)[0];
if (tmpNode.equals(lockName)) {
lockObjects.add(node);
}
}
Collections.sort(lockObjects);
// 若当前节点为最小节点,则获取锁成功
if (currentLock.equals(ROOT_LOCK + "/" + lockObjects.get(0))) {
return true;
}
// 若不是最小节点,则找到自己的前一个节点
String prevNode = currentLock.substring(currentLock.lastIndexOf("/") + 1);
waitLock = lockObjects.get(Collections.binarySearch(lockObjects, prevNode) - 1);
} catch (InterruptedException | KeeperException e) {
logger.error("获取锁过程异常" + e);
}
return false;
}
@Override
public boolean tryLock(long timeout, TimeUnit unit) {
try {
if (this.tryLock()) {
return true;
}
return waitForLock(waitLock, timeout);
} catch (KeeperException | InterruptedException | RuntimeException e) {
logger.error("判断是否锁定异常" + e);
}
return false;
}
// 等待锁
private boolean waitForLock(String prev, long waitTime) throws KeeperException, InterruptedException {
Stat stat = zk.exists(ROOT_LOCK + "/" + prev, true);
if (stat != null) {
this.countDownLatch = new CountDownLatch(1);
// 计数等待若等到前一个节点消失则precess中进行countDown停止等待获取锁
this.countDownLatch.await(waitTime, TimeUnit.MILLISECONDS);
this.countDownLatch = null;
}
return true;
}
@Override
public void unlock() {
try {
zk.delete(currentLock, -1);
currentLock = null;
zk.close();
} catch (InterruptedException | KeeperException e) {
logger.error("关闭锁异常" + e);
}
}
@Override
public Condition newCondition() {
return null;
}
@Override
public void lockInterruptibly() throws InterruptedException {
this.lock();
}
public class LockException extends RuntimeException {
private static final long serialVersionUID = 1L;
public LockException(String e) {
super(e);
}
public LockException(Exception e) {
super(e);
}
}
}

View File

@@ -0,0 +1,140 @@
package com.zdjizhi.tools.zookeeper;
import cn.hutool.core.util.StrUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import org.apache.zookeeper.*;
import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.data.Stat;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.CountDownLatch;
/**
* @author qidaijie
* @Package cn.ac.iie.utils.zookeeper
* @Description:
* @date 2020/11/1411:28
*/
public class ZookeeperUtils implements Watcher {
private static final Log logger = LogFactory.get();
private static final int ID_MAX = 255;
private ZooKeeper zookeeper;
private static final int SESSION_TIME_OUT = 20000;
private CountDownLatch countDownLatch = new CountDownLatch(1);
@Override
public void process(WatchedEvent event) {
if (event.getState() == Event.KeeperState.SyncConnected) {
countDownLatch.countDown();
}
}
/**
* 修改节点信息
*
* @param path 节点路径
*/
public int modifyNode(String path, String zookeeperIp) {
createNode(path, "0".getBytes(), ZooDefs.Ids.OPEN_ACL_UNSAFE, zookeeperIp);
int workerId = 0;
try {
connectZookeeper(zookeeperIp);
Stat stat = zookeeper.exists(path, true);
workerId = Integer.parseInt(getNodeDate(path));
if (workerId > ID_MAX) {
workerId = 0;
zookeeper.setData(path, "1".getBytes(), stat.getVersion());
} else {
String result = String.valueOf(workerId + 1);
if (stat != null) {
zookeeper.setData(path, result.getBytes(), stat.getVersion());
} else {
logger.error("Node does not exist!,Can't modify");
}
}
} catch (KeeperException | InterruptedException e) {
logger.error("modify error Can't modify," + e);
} finally {
closeConn();
}
logger.warn("workerID is" + workerId);
return workerId;
}
/**
* 连接zookeeper
*
* @param host 地址
*/
public void connectZookeeper(String host) {
try {
zookeeper = new ZooKeeper(host, SESSION_TIME_OUT, this);
countDownLatch.await();
} catch (IOException | InterruptedException e) {
logger.error("Connection to the Zookeeper Exception! message:" + e);
}
}
/**
* 关闭连接
*/
public void closeConn() {
try {
if (zookeeper != null) {
zookeeper.close();
}
} catch (InterruptedException e) {
logger.error("Close the Zookeeper connection Exception! message:" + e);
}
}
/**
* 获取节点内容
*
* @param path 节点路径
* @return 内容/异常null
*/
public String getNodeDate(String path) {
String result = null;
Stat stat = new Stat();
try {
byte[] resByte = zookeeper.getData(path, true, stat);
result = StrUtil.str(resByte, "UTF-8");
} catch (KeeperException | InterruptedException e) {
logger.error("Get node information exception" + e);
}
return result;
}
/**
* @param path 节点创建的路径
* @param date 节点所存储的数据的byte[]
* @param acls 控制权限策略
*/
public void createNode(String path, byte[] date, List<ACL> acls, String zookeeperIp) {
try {
connectZookeeper(zookeeperIp);
Stat exists = zookeeper.exists(path, true);
if (exists == null) {
Stat existsSnowflakeld = zookeeper.exists("/Snowflake", true);
if (existsSnowflakeld == null) {
zookeeper.create("/Snowflake", null, acls, CreateMode.PERSISTENT);
}
zookeeper.create(path, date, acls, CreateMode.PERSISTENT);
} else {
logger.warn("Node already exists ! Don't need to create");
}
} catch (KeeperException | InterruptedException e) {
logger.error(e);
} finally {
closeConn();
}
}
}