getJobList() {
+ return jobList;
}
+
/**
* 通过获取String类型的网关schema链接来获取map,用于生成一个Object类型的对象
+ *
+ * // * @param http 网关schema地址
*
- * @param http 网关schema地址
* @return 用于反射生成schema类型的对象的一个map集合
*/
- public static HashMap getMapFromHttp(String http) {
+ public static HashMap getMapFromHttp(String schema) {
HashMap map = new HashMap<>(16);
- String schema = HttpClientUtil.requestByGetMethod(http);
- Object data = JSON.parseObject(schema).get("data");
-
//获取fields,并转化为数组,数组的每个元素都是一个name doc type
- JSONObject schemaJson = JSON.parseObject(data.toString());
+ JSONObject schemaJson = JSON.parseObject(schema);
JSONArray fields = (JSONArray) schemaJson.get("fields");
for (Object field : fields) {
@@ -239,6 +288,7 @@ public class JsonParseUtil {
/**
* 删除schema内指定的无效字段(jackson)
+ *
* @param jsonMap
*/
public static void dropJsonField(Map jsonMap) {
@@ -247,31 +297,17 @@ public class JsonParseUtil {
}
}
- /**
- * 删除schema内指定的无效字段(fastjson)
- * @param jsonMap
- */
- public static void dropJsonField(JSONObject jsonMap) {
- for (String field : dropList) {
- jsonMap.remove(field);
- }
- }
-
/**
* 根据http链接获取schema,解析之后返回一个任务列表 (useList toList funcList paramlist)
*
- * @param http 网关url
+ * @param schema 网关url
* @return 任务列表
*/
- public static ArrayList getJobListFromHttp(String http) {
+ public static ArrayList getJobListFromHttp(String schema) {
ArrayList list = new ArrayList<>();
- String schema = HttpClientUtil.requestByGetMethod(http);
- //解析data
- Object data = JSON.parseObject(schema).get("data");
-
//获取fields,并转化为数组,数组的每个元素都是一个name doc type
- JSONObject schemaJson = JSON.parseObject(data.toString());
+ JSONObject schemaJson = JSON.parseObject(schema);
JSONArray fields = (JSONArray) schemaJson.get("fields");
for (Object field : fields) {
diff --git a/src/main/java/com/zdjizhi/utils/json/JsonTypeUtils.java b/src/main/java/com/zdjizhi/utils/json/JsonTypeUtils.java
index 0b6bc1e..63af9d5 100644
--- a/src/main/java/com/zdjizhi/utils/json/JsonTypeUtils.java
+++ b/src/main/java/com/zdjizhi/utils/json/JsonTypeUtils.java
@@ -2,6 +2,11 @@ package com.zdjizhi.utils.json;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
+import com.alibaba.nacos.api.NacosFactory;
+import com.alibaba.nacos.api.PropertyKeyConst;
+import com.alibaba.nacos.api.config.ConfigService;
+import com.alibaba.nacos.api.config.listener.Listener;
+import com.alibaba.nacos.api.exception.NacosException;
import com.zdjizhi.common.FlowWriteConfig;
import com.zdjizhi.utils.JsonMapper;
import com.zdjizhi.utils.exception.FlowWriteException;
@@ -9,6 +14,11 @@ import com.zdjizhi.utils.exception.FlowWriteException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.Executor;
+
+import static com.zdjizhi.utils.json.JsonParseUtil.getJobListFromHttp;
+import static com.zdjizhi.utils.json.JsonParseUtil.getMapFromHttp;
/**
* @author qidaijie
@@ -17,70 +27,26 @@ import java.util.Map;
* @date 2021/7/1217:34
*/
public class JsonTypeUtils {
- private static final Log logger = LogFactory.get();
- /**
- * 在内存中加载反射类用的map
- */
- private static HashMap map = JsonParseUtil.getMapFromHttp(FlowWriteConfig.SCHEMA_HTTP);
-
- /**
- * 类型转换
- *
- * @param jsonMap 原始日志map
- */
- public static Map typeTransform(Map jsonMap) throws RuntimeException {
- JsonParseUtil.dropJsonField(jsonMap);
- HashMap tmpMap = new HashMap<>(192);
- for (String key : jsonMap.keySet()) {
- if (map.containsKey(key)) {
- String simpleName = map.get(key).getSimpleName();
- switch (simpleName) {
- case "String":
- tmpMap.put(key, checkString(jsonMap.get(key)));
- break;
- case "Integer":
- tmpMap.put(key, getIntValue(jsonMap.get(key)));
- break;
- case "long":
- tmpMap.put(key, checkLongValue(jsonMap.get(key)));
- break;
- case "List":
- tmpMap.put(key, checkArray(jsonMap.get(key)));
- break;
- case "Map":
- tmpMap.put(key, checkObject(jsonMap.get(key)));
- break;
- case "double":
- tmpMap.put(key, checkDouble(jsonMap.get(key)));
- break;
- default:
- tmpMap.put(key, checkString(jsonMap.get(key)));
- }
- }
- }
- return tmpMap;
- }
-
/**
* String 类型检验转换方法
*
* @param value json value
* @return String value
*/
- private static String checkString(Object value) {
+ static String checkString(Object value) {
if (value == null) {
return null;
}
- if (value instanceof Map){
+ if (value instanceof Map) {
return JsonMapper.toJsonString(value);
}
- if (value instanceof List){
+ if (value instanceof List) {
return JsonMapper.toJsonString(value);
}
- return value.toString();
+ return value.toString();
}
/**
@@ -89,7 +55,7 @@ public class JsonTypeUtils {
* @param value json value
* @return List value
*/
- private static Map checkObject(Object value) {
+ static Map checkObject(Object value) {
if (value == null) {
return null;
}
@@ -107,7 +73,7 @@ public class JsonTypeUtils {
* @param value json value
* @return List value
*/
- private static List checkArray(Object value) {
+ static List checkArray(Object value) {
if (value == null) {
return null;
}
@@ -119,27 +85,19 @@ public class JsonTypeUtils {
throw new FlowWriteException("can not cast to List, value : " + value);
}
- private static Long checkLong(Object value) {
- if (value == null) {
- return null;
- }
-
- return TypeUtils.castToLong(value);
- }
-
/**
* long 类型检验转换方法,若为空返回基础值
*
* @param value json value
* @return Long value
*/
- private static long checkLongValue(Object value) {
+ static long checkLongValue(Object value) {
Long longVal = TypeUtils.castToLong(value);
+
if (longVal == null) {
return 0L;
}
-// return longVal.longValue();
return longVal;
}
@@ -149,7 +107,7 @@ public class JsonTypeUtils {
* @param value json value
* @return Double value
*/
- private static Double checkDouble(Object value) {
+ static Double checkDouble(Object value) {
if (value == null) {
return null;
}
@@ -158,29 +116,18 @@ public class JsonTypeUtils {
}
- private static Integer checkInt(Object value) {
- if (value == null) {
- return null;
- }
-
- return TypeUtils.castToInt(value);
- }
-
-
/**
* int 类型检验转换方法,若为空返回基础值
*
* @param value json value
* @return int value
*/
- private static int getIntValue(Object value) {
+ static int getIntValue(Object value) {
Integer intVal = TypeUtils.castToInt(value);
if (intVal == null) {
return 0;
}
-
-// return intVal.intValue();
return intVal;
}
diff --git a/src/main/java/com/zdjizhi/utils/kafka/KafkaConsumer.java b/src/main/java/com/zdjizhi/utils/kafka/KafkaConsumer.java
index 078c2fe..f3d979b 100644
--- a/src/main/java/com/zdjizhi/utils/kafka/KafkaConsumer.java
+++ b/src/main/java/com/zdjizhi/utils/kafka/KafkaConsumer.java
@@ -2,8 +2,12 @@ package com.zdjizhi.utils.kafka;
import com.zdjizhi.common.FlowWriteConfig;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
+import org.apache.flink.api.common.serialization.TypeInformationSerializationSchema;
+import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
+import org.apache.flink.api.common.typeutils.base.StringSerializer;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
+import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase;
@@ -25,9 +29,7 @@ public class KafkaConsumer {
properties.put("session.timeout.ms", FlowWriteConfig.SESSION_TIMEOUT_MS);
properties.put("max.poll.records", FlowWriteConfig.MAX_POLL_RECORDS);
properties.put("max.partition.fetch.bytes", FlowWriteConfig.MAX_PARTITION_FETCH_BYTES);
- properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
- properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
- properties.setProperty(FlinkKafkaConsumerBase.KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS, "10");
+ properties.put("partition.discovery.interval.ms", "10000");
CertUtils.chooseCert(FlowWriteConfig.SOURCE_KAFKA_SERVERS, properties);
return properties;
@@ -42,7 +44,10 @@ public class KafkaConsumer {
FlinkKafkaConsumer