优化日志使用clickhouse查询
This commit is contained in:
@@ -1,58 +0,0 @@
|
|||||||
package com.nis.util;
|
|
||||||
|
|
||||||
import java.sql.Connection;
|
|
||||||
import java.sql.ResultSet;
|
|
||||||
import java.sql.Statement;
|
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import com.jolbox.bonecp.BoneCPDataSource;
|
|
||||||
import com.nis.web.service.SpringContextHolder;
|
|
||||||
|
|
||||||
public class HiveDataSource {
|
|
||||||
private final static Logger logger = LoggerFactory.getLogger(HiveDataSource.class);
|
|
||||||
static BoneCPDataSource datasource = null;
|
|
||||||
static Connection conn = null;
|
|
||||||
static ResultSet rs = null;
|
|
||||||
static Statement st = null;
|
|
||||||
|
|
||||||
public static ResultSet query(String sql) throws Exception {
|
|
||||||
if (datasource == null) {
|
|
||||||
datasource = (BoneCPDataSource) SpringContextHolder.getBean("HiveDataSource");
|
|
||||||
}
|
|
||||||
conn = datasource.getConnection();
|
|
||||||
logger.info("连接数据中心日志库成功--------------------------");
|
|
||||||
st = conn.createStatement();
|
|
||||||
// logger.info("开始选择{}数据库--------------------------", Constants.HIVEDBNAME);
|
|
||||||
// String hiveAName = "use " + Constants.HIVEDBNAME;
|
|
||||||
// st.execute(hiveAName);
|
|
||||||
// logger.info("选择数据库{}成功,开始执行查询", Constants.HIVEDBNAME);
|
|
||||||
// logger.info("选择数据库{}成功,开始执行查询", Constants.HIVEDBNAME);
|
|
||||||
rs = st.executeQuery(sql);
|
|
||||||
logger.info("执行查询语句成功sql={}",sql);
|
|
||||||
return rs;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void closeConn() {
|
|
||||||
try {
|
|
||||||
if (rs != null) {
|
|
||||||
rs.close();
|
|
||||||
rs = null;
|
|
||||||
}
|
|
||||||
if (st != null) {
|
|
||||||
st.close();
|
|
||||||
st = null;
|
|
||||||
}
|
|
||||||
if (conn != null) {
|
|
||||||
conn.close();
|
|
||||||
conn = null;
|
|
||||||
}
|
|
||||||
logger.info("关闭数据中心连接成功");
|
|
||||||
} catch (Exception e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
logger.error("关闭数据中心连接失败,失败原因" + e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -161,8 +161,8 @@ public class HiveJDBC {
|
|||||||
listString.add(JsonMapper.toJsonString(map2Obj(map, entityClass)));
|
listString.add(JsonMapper.toJsonString(map2Obj(map, entityClass)));
|
||||||
listObject.add(map2Obj(map, entityClass));
|
listObject.add(map2Obj(map, entityClass));
|
||||||
}
|
}
|
||||||
logger.info("开始关闭数据中心连接");
|
// logger.info("开始关闭数据中心连接");
|
||||||
HiveDataSource.closeConn();
|
// HiveDataSource.closeConn();
|
||||||
if (null == listString || listString.size() == 0 || null == listObject || listObject.size() == 0) {
|
if (null == listString || listString.size() == 0 || null == listObject || listObject.size() == 0) {
|
||||||
return null;
|
return null;
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ import org.slf4j.Logger;
|
|||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import com.alibaba.druid.pool.DruidDataSource;
|
import com.alibaba.druid.pool.DruidDataSource;
|
||||||
|
import com.jolbox.bonecp.BoneCPDataSource;
|
||||||
import com.nis.domain.Page;
|
import com.nis.domain.Page;
|
||||||
import com.nis.web.service.SpringContextHolder;
|
import com.nis.web.service.SpringContextHolder;
|
||||||
|
|
||||||
@@ -103,8 +104,6 @@ public class LogJDBCByDruid {
|
|||||||
listString.add(JsonMapper.toJsonString(map2Obj(map, entityClass)));
|
listString.add(JsonMapper.toJsonString(map2Obj(map, entityClass)));
|
||||||
listObject.add(map2Obj(map, entityClass));
|
listObject.add(map2Obj(map, entityClass));
|
||||||
}
|
}
|
||||||
logger.info("开始关闭数据中心连接");
|
|
||||||
HiveDataSource.closeConn();
|
|
||||||
if (null == listString || listString.size() == 0 || null == listObject || listObject.size() == 0) {
|
if (null == listString || listString.size() == 0 || null == listObject || listObject.size() == 0) {
|
||||||
return null;
|
return null;
|
||||||
} else {
|
} else {
|
||||||
@@ -240,5 +239,19 @@ public class LogJDBCByDruid {
|
|||||||
return map;
|
return map;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
public ResultSet query(String sql) throws Exception {
|
||||||
|
conn = getConnection();
|
||||||
|
logger.info("连接数据中心日志库成功--------------------------");
|
||||||
|
st = conn.createStatement();
|
||||||
|
// logger.info("开始选择{}数据库--------------------------", Constants.HIVEDBNAME);
|
||||||
|
// String hiveAName = "use " + Constants.HIVEDBNAME;
|
||||||
|
// st.execute(hiveAName);
|
||||||
|
// logger.info("选择数据库{}成功,开始执行查询", Constants.HIVEDBNAME);
|
||||||
|
// logger.info("选择数据库{}成功,开始执行查询", Constants.HIVEDBNAME);
|
||||||
|
rs = st.executeQuery(sql);
|
||||||
|
logger.info("执行查询语句成功sql={}",sql);
|
||||||
|
return rs;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ import com.alibaba.druid.pool.DruidDataSource;
|
|||||||
import com.nis.domain.Page;
|
import com.nis.domain.Page;
|
||||||
import com.nis.util.Configurations;
|
import com.nis.util.Configurations;
|
||||||
import com.nis.util.Constants;
|
import com.nis.util.Constants;
|
||||||
import com.nis.util.HiveDataSource;
|
import com.nis.util.LogJDBCByDruid;
|
||||||
import com.nis.util.StringUtil;
|
import com.nis.util.StringUtil;
|
||||||
import com.nis.util.redis.SaveRedisThread;
|
import com.nis.util.redis.SaveRedisThread;
|
||||||
|
|
||||||
@@ -36,8 +36,8 @@ public class HiveSqlService {
|
|||||||
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
|
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
|
||||||
private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd");
|
private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd");
|
||||||
|
|
||||||
public static String getSql(Page page, Object bean, String tableName,
|
public static String getSql(Page page, Object bean, String tableName, Map<String, Map<String, String>> col2col,
|
||||||
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
|
String orderBy, String searchActiveSys) throws Exception {
|
||||||
tableName = tableName.toLowerCase();
|
tableName = tableName.toLowerCase();
|
||||||
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
|
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
|
||||||
StringBuffer sql = new StringBuffer();
|
StringBuffer sql = new StringBuffer();
|
||||||
@@ -123,6 +123,7 @@ public class HiveSqlService {
|
|||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (!Constants.ISUSECLICKHOUSE) {//hive需要加这个字段
|
||||||
if (null != foundTimePartStart) {
|
if (null != foundTimePartStart) {
|
||||||
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
|
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
|
||||||
sql.append(" and found_time_partition>=" + foundTimePartStart);
|
sql.append(" and found_time_partition>=" + foundTimePartStart);
|
||||||
@@ -131,26 +132,26 @@ public class HiveSqlService {
|
|||||||
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
|
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
|
||||||
sql.append(" and found_time_partition<=" + foundTimePartEnd);
|
sql.append(" and found_time_partition<=" + foundTimePartEnd);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if(Constants.ISUSECLICKHOUSE) {
|
if (Constants.ISUSECLICKHOUSE) {
|
||||||
//Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
|
// Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
|
||||||
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
|
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
|
||||||
//Integer endNum = startNum - 1 + page.getPageSize();
|
// Integer endNum = startNum - 1 + page.getPageSize();
|
||||||
sql.append(" order by " + orderBy );
|
sql.append(" order by " + orderBy);
|
||||||
sql.append(" limit " + startNum+","+page.getPageSize());//clickhouse的分页与mysql相同
|
sql.append(" limit " + startNum + "," + page.getPageSize());// clickhouse的分页与mysql相同
|
||||||
|
|
||||||
|
} else {
|
||||||
}else {
|
|
||||||
|
|
||||||
// sql.append(" order by " + orderBy + " limit 10000) t1) t2 where
|
// sql.append(" order by " + orderBy + " limit 10000) t1) t2 where
|
||||||
// row_Num between " + startNum + " and " + endNum);
|
// row_Num between " + startNum + " and " + endNum);
|
||||||
sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM);
|
sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM);
|
||||||
logger.info("获取数据中心日志sql==================={}", sql);
|
logger.info("获取数据中心日志sql==================={}", sql);
|
||||||
}
|
}
|
||||||
return sql.toString().toUpperCase();
|
return sql.toString().toLowerCase();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Long getHivePageCount(Object bean, String countKey, String tableName,
|
public static Long getHivePageCount(Object bean, String countKey, String tableName,
|
||||||
@@ -180,10 +181,10 @@ public class HiveSqlService {
|
|||||||
if (col2col.containsKey(key)) {
|
if (col2col.containsKey(key)) {
|
||||||
value = sdf.parse(value.toString().trim()).getTime() / 1000;
|
value = sdf.parse(value.toString().trim()).getTime() / 1000;
|
||||||
if (key.toLowerCase().equals("searchfoundstarttime")) {
|
if (key.toLowerCase().equals("searchfoundstarttime")) {
|
||||||
foundTimePartStart = Long.parseLong(value.toString()) / 3600L/ 24L;
|
foundTimePartStart = Long.parseLong(value.toString()) / 3600L / 24L;
|
||||||
}
|
}
|
||||||
if (key.toLowerCase().equals("searchfoundendtime")) {
|
if (key.toLowerCase().equals("searchfoundendtime")) {
|
||||||
foundTimePartEnd = Long.parseLong(value.toString()) / 3600L/ 24L;
|
foundTimePartEnd = Long.parseLong(value.toString()) / 3600L / 24L;
|
||||||
}
|
}
|
||||||
if (col2col.get(key).get("start") != null) {
|
if (col2col.get(key).get("start") != null) {
|
||||||
// sql.append(" and " +
|
// sql.append(" and " +
|
||||||
@@ -237,7 +238,7 @@ public class HiveSqlService {
|
|||||||
}
|
}
|
||||||
logger.info("获取数据中心日志总条数sql==================" + sql.toString());
|
logger.info("获取数据中心日志总条数sql==================" + sql.toString());
|
||||||
// ResultSet countRs = HiveJDBC.query(countSql.toString());
|
// ResultSet countRs = HiveJDBC.query(countSql.toString());
|
||||||
ResultSet countRs = HiveDataSource.query(sql.toString());
|
ResultSet countRs = new LogJDBCByDruid().query(sql.toString());
|
||||||
String countStr = null;
|
String countStr = null;
|
||||||
while (countRs.next()) {
|
while (countRs.next()) {
|
||||||
countStr = countRs.getObject(1).toString();
|
countStr = countRs.getObject(1).toString();
|
||||||
@@ -256,207 +257,6 @@ public class HiveSqlService {
|
|||||||
return count;
|
return count;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ResultSet getResultSet2(Page page, Object bean, String tableName,
|
|
||||||
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
|
|
||||||
tableName = tableName.toLowerCase();
|
|
||||||
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
|
|
||||||
StringBuffer sql = new StringBuffer();
|
|
||||||
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
|
|
||||||
if (null == showColmun || showColmun.equals("")) {
|
|
||||||
for (String key : filedAndColumnMap.keySet()) {
|
|
||||||
if (!filedAndColumnMap.get(key).toLowerCase().equals("id")) {
|
|
||||||
sql.append(filedAndColumnMap.get(key) + ",");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
sql.append(showColmun);
|
|
||||||
}
|
|
||||||
String sqlTrim = sql.toString().trim();
|
|
||||||
if (sqlTrim.endsWith(",")) {
|
|
||||||
sqlTrim = sqlTrim.substring(0, sqlTrim.length() - 1);
|
|
||||||
}
|
|
||||||
sql.setLength(0);
|
|
||||||
sql.append("select " + sqlTrim + " from(select " + sqlTrim + ",row_Num from(select " + sqlTrim
|
|
||||||
+ ",row_number() over() as row_Num from " + tableName + " t where 1=1 ");
|
|
||||||
if (bean != null) {
|
|
||||||
Class<?> clazz = bean.getClass();
|
|
||||||
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
|
|
||||||
// 获取所有的字段包括public,private,protected,private
|
|
||||||
// Field[] fields = bean.getClass().getDeclaredFields();
|
|
||||||
Field[] fields = clazz.getDeclaredFields();
|
|
||||||
Long foundTimePartStart = null;
|
|
||||||
Long foundTimePartEnd = null;
|
|
||||||
for (int i = 0; i < fields.length; i++) {
|
|
||||||
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
|
|
||||||
Field f = fields[i];
|
|
||||||
String key = f.getName();// 获取字段名
|
|
||||||
String typeName = f.getType().getName();
|
|
||||||
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
|
|
||||||
Object value = getFieldValue(bean, key);
|
|
||||||
if (value != null) {
|
|
||||||
setFieldValue(bean, key, value.toString().trim());
|
|
||||||
if (key.endsWith("Time")) {// 日期开始或结束的字段
|
|
||||||
if (col2col.containsKey(key)) {
|
|
||||||
value = sdf.parse(value.toString().trim()).getTime() / 1000;
|
|
||||||
if (key.toLowerCase().equals("searchfoundstarttime")) {
|
|
||||||
foundTimePartStart = Long.parseLong(value.toString()) / 3600L/ 24L;
|
|
||||||
}
|
|
||||||
if (key.toLowerCase().equals("searchfoundendtime")) {
|
|
||||||
foundTimePartEnd = Long.parseLong(value.toString()) / 3600L/ 24L;
|
|
||||||
}
|
|
||||||
if (col2col.get(key).get("start") != null) {
|
|
||||||
// sql.append(" and " +
|
|
||||||
// filedAndColumnMap.get(col2col.get(key).get("start"))
|
|
||||||
// + ">=to_date('" +
|
|
||||||
// value.toString().trim()
|
|
||||||
// + "','yyyy-mm-dd HH24:mi:ss')");
|
|
||||||
sql.append(" and " + filedAndColumnMap.get(col2col.get(key).get("start")) + ">="
|
|
||||||
+ value);
|
|
||||||
} else {
|
|
||||||
// sql.append(" and " +
|
|
||||||
// filedAndColumnMap.get(col2col.get(key).get("end"))
|
|
||||||
// + "<=to_date('" +
|
|
||||||
// value.toString().trim()
|
|
||||||
// + "','yyyy-mm-dd HH24:mi:ss')");
|
|
||||||
sql.append(" and " + filedAndColumnMap.get(col2col.get(key).get("end")) + "<"
|
|
||||||
+ value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (key.toLowerCase().startsWith("search")) {
|
|
||||||
key = key.replace("search", "");
|
|
||||||
key = key.substring(0, 1).toLowerCase() + key.substring(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeName.equals("java.lang.String")) {
|
|
||||||
sql.append(" and " + filedAndColumnMap.get(key) + "='" + value.toString().trim()
|
|
||||||
+ "'");
|
|
||||||
} else if (typeName.equals("java.lang.Integer") || typeName.equals("int")) {
|
|
||||||
sql.append(" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim());
|
|
||||||
|
|
||||||
} else if (typeName.equals("java.lang.Long") || typeName.equals("long")) {
|
|
||||||
sql.append(
|
|
||||||
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim() + "L");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (null != foundTimePartStart) {
|
|
||||||
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
|
|
||||||
sql.append(" and found_time_partition>=" + foundTimePartStart);
|
|
||||||
}
|
|
||||||
if (null != foundTimePartEnd) {
|
|
||||||
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
|
|
||||||
sql.append(" and found_time_partition<" + foundTimePartEnd);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
|
|
||||||
Integer endNum = startNum - 1 + page.getPageSize();
|
|
||||||
//sql.append(" order by " + orderBy + " limit 10000) t1) t2 where row_Num between " + startNum + " and " + endNum);
|
|
||||||
sql.append(" order by " + orderBy + " ) t1) t2 where row_Num between " + startNum + " and " + endNum);
|
|
||||||
logger.info("获取数据中心日志sql===================" + sql);
|
|
||||||
ResultSet query = HiveDataSource.query(sql.toString());
|
|
||||||
logger.info("获取数据中心日志成功");
|
|
||||||
return query;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Long getHivePageCount2(Object bean, String tableName, Map<String, Map<String, String>> col2col,
|
|
||||||
String searchActiveSys) throws Exception {
|
|
||||||
tableName = tableName.toLowerCase();
|
|
||||||
tableName = Configurations.getStringProperty(tableName, "t_" + tableName).trim();
|
|
||||||
StringBuffer countSql = new StringBuffer();
|
|
||||||
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
|
|
||||||
countSql.append("select count(1) from " + tableName + " where 1=1 ");
|
|
||||||
if (bean != null) {
|
|
||||||
Class<?> clazz = bean.getClass();
|
|
||||||
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
|
|
||||||
// 获取所有的字段包括public,private,protected,private
|
|
||||||
// Field[] fields = bean.getClass().getDeclaredFields();
|
|
||||||
Field[] fields = clazz.getDeclaredFields();
|
|
||||||
Long foundTimePartStart = null;
|
|
||||||
Long foundTimePartEnd = null;
|
|
||||||
for (int i = 0; i < fields.length; i++) {
|
|
||||||
Field f = fields[i];
|
|
||||||
String key = f.getName();// 获取字段名
|
|
||||||
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
|
|
||||||
Object value = getFieldValue(bean, key);
|
|
||||||
if (value != null) {
|
|
||||||
setFieldValue(bean, key, value.toString().trim());
|
|
||||||
if (key.endsWith("Time")) {// 日期开始或结束的字段
|
|
||||||
if (col2col.containsKey(key)) {
|
|
||||||
value = sdf.parse(value.toString().trim()).getTime() / 1000;
|
|
||||||
if (key.toLowerCase().equals("searchfoundstarttime")) {
|
|
||||||
foundTimePartStart = Long.parseLong(value.toString()) / 3600;
|
|
||||||
}
|
|
||||||
if (key.toLowerCase().equals("searchfoundendtime")) {
|
|
||||||
foundTimePartEnd = Long.parseLong(value.toString()) / 3600;
|
|
||||||
}
|
|
||||||
if (col2col.get(key).get("start") != null) {
|
|
||||||
countSql.append(" and " + filedAndColumnMap.get(col2col.get(key).get("start"))
|
|
||||||
+ ">=" + value + "L");
|
|
||||||
} else {
|
|
||||||
countSql.append(" and " + filedAndColumnMap.get(col2col.get(key).get("end"))
|
|
||||||
+ "<" + value + "L");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (key.toLowerCase().startsWith("search")) {
|
|
||||||
key = key.replace("search", "");
|
|
||||||
key = key.substring(0, 1).toLowerCase() + key.substring(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!value.toString().trim().equals("") && filedAndColumnMap.containsKey(key)
|
|
||||||
&& (key.toLowerCase().equals("cfgid")
|
|
||||||
|| key.toLowerCase().equals("entranceid"))) {
|
|
||||||
countSql.append(
|
|
||||||
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim() + "L");
|
|
||||||
} else if (!value.toString().trim().equals("") && filedAndColumnMap.containsKey(key)
|
|
||||||
&& (key.toLowerCase().equals("protocol") || key.toLowerCase().equals("serverip")
|
|
||||||
|| key.toLowerCase().equals("clientip")
|
|
||||||
|| key.toLowerCase().equals("cljip"))) {
|
|
||||||
countSql.append(" and " + filedAndColumnMap.get(key) + "='"
|
|
||||||
+ value.toString().trim() + "'");
|
|
||||||
} else if (!value.toString().trim().equals("") && filedAndColumnMap.containsKey(key)
|
|
||||||
&& key.toLowerCase().equals("servicetype")) {
|
|
||||||
countSql.append(
|
|
||||||
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (null != searchActiveSys && !searchActiveSys.equals(Constants.ACTIVESYS_A)) {// B版数据库才有found_time_partition字段,A版毛衣found_time_partition分区字段
|
|
||||||
if (null != foundTimePartStart) {
|
|
||||||
countSql.append(" and found_time_partition>=" + foundTimePartStart + "L");
|
|
||||||
}
|
|
||||||
if (null != foundTimePartEnd) {
|
|
||||||
countSql.append(" and found_time_partition<" + foundTimePartEnd + "L");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
logger.info("获取数据中心日志总条数sql==================" + countSql.toString());
|
|
||||||
ResultSet countRs = HiveDataSource.query(countSql.toString());
|
|
||||||
String countStr = null;
|
|
||||||
while (countRs.next()) {
|
|
||||||
countStr = countRs.getObject(1).toString();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (countStr == null || countStr.equals("")) {
|
|
||||||
logger.info("获取数据中心日志总条数成功总共===================0条配置");
|
|
||||||
return 0l;
|
|
||||||
}
|
|
||||||
Long count = Long.valueOf(countStr);
|
|
||||||
logger.info("获取数据中心日志总条数成功总共===================" + count + "条配置");
|
|
||||||
HiveDataSource.closeConn();
|
|
||||||
return count;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String getFiledsSql(String mapName, String fileds) throws Exception {
|
public static String getFiledsSql(String mapName, String fileds) throws Exception {
|
||||||
String[] fieldsColoumn = null;
|
String[] fieldsColoumn = null;
|
||||||
// 所有字段名
|
// 所有字段名
|
||||||
|
|||||||
@@ -301,7 +301,7 @@ unMaatService=3:32;4:96;5:32;25:32;28:32;64;65;261:16;262:16;263:16;264:16;265:1
|
|||||||
#0x19 IPSec丢弃 25:7;
|
#0x19 IPSec丢弃 25:7;
|
||||||
#0x340 IP复用地址池配置(回调)832:2,4,5,7
|
#0x340 IP复用地址池配置(回调)832:2,4,5,7
|
||||||
##########
|
##########
|
||||||
serviceDBIndex=1:2,3,4,6,7;2:2,3,4,6,7;16:2;17:2;18:2;19:2;20:2;21:2;22:2;23:2;24:2;26:2;27:2;30:2;31:2;32:2;33:2,6,7;34:2;35:2,6,7;36:2,6,7;37:2;128:2;129:2;130:2;131:2;132:2;133:2;134:2;135:2;136:2;137:2;138:2;139:2;140:2;141:2;142:2;143:2;144:2;145:2,6,7;147:2,6,7;148:2,6,7;149:2;256:3;257:3;258:3;259:3;260:3;267:3;384:3;385:3;386:3;387:3;388:3;395:3;512:4,7;513:4,2,7;514:4;515:4;517:4;520:4;521:4;528:4;544:4;560:4;576:4;592:4;608:4;750:4;768:5;1024:2,6,7;1025:2;1026:2,6,7;1027:2,6,7;1028:2,6,7;1029:2,6,7;1030:2,6,7;1031:2,6,7;1040:2,6,7;1041:2,6,7;1042:2,6,7;1056:2,6,7;1059:2,6,7;1057:6,7;1058:6,2,7;1060:2,6,7;1152:2,6;3:7;4:7;5:2;25:7;28:7;64:2;65:2;261:8;262:9;263:10;264:11;265:3;266:3;389:8;390:9;391:10;392:11;393:3;394:3;832:2,4,5,7
|
serviceDBIndex=1:2,3,4,6,7;2:2,3,4,6,7;16:2;17:2;18:2;19:2;20:2;21:2;22:2;23:2;24:2;26:2;27:2;30:2;31:2;32:2;33:2,6,7;34:2;35:2,6,7;36:2,6,7;37:2;128:2;129:2;130:2;131:2;132:2;133:2;134:2;135:2;136:2;137:2;138:2;139:2;140:2;141:2;142:2;143:2;144:2;145:2,6,7;147:2,6,7;148:2,6,7;149:2;256:3;257:3;258:3;259:3;260:3;267:3;384:3;385:3;386:3;387:3;388:3;395:3;512:4,7;513:4,2,7;514:4;515:4;517:4;520:4;521:4;528:4;544:4;560:4;576:4;592:4;608:4;750:4;768:5;1024:2,6,7;1025:2;1026:2,6,7;1027:2,6,7;1028:2,6,7;1029:2,6,7;1030:2,6,7;1031:2,6,7;1040:2,6,7;1041:2,6,7;1042:2,6,7;1056:2,6,7;1059:2,6,7;1057:6,7;1058:6,2,7;1060:2,6,7;1152:2,6;3:7;4:7;5:2;25:7;28:7;64:2;65:2;261:8,12;262:9,12;263:10;264:11;265:3;266:3;389:8;390:9;391:10;392:11;393:3;394:3;832:2,4,5,7
|
||||||
|
|
||||||
|
|
||||||
##maat配置入阀门需要将编译中的部分参数写到域配置中,目前最多包含ACTION,SERVICE,USER_REGION三个属性
|
##maat配置入阀门需要将编译中的部分参数写到域配置中,目前最多包含ACTION,SERVICE,USER_REGION三个属性
|
||||||
|
|||||||
@@ -128,7 +128,7 @@ redis.host=10.0.6.249
|
|||||||
#redis.host=192.168.10.205
|
#redis.host=192.168.10.205
|
||||||
#亦庄
|
#亦庄
|
||||||
#redis.host=192.168.10.215
|
#redis.host=192.168.10.215
|
||||||
redis.port=6381
|
redis.port=6379
|
||||||
redis.pass=
|
redis.pass=
|
||||||
redis.maxIdle=5
|
redis.maxIdle=5
|
||||||
redis.maxTotal=250
|
redis.maxTotal=250
|
||||||
|
|||||||
Reference in New Issue
Block a user