优化日志使用clickhouse查询

This commit is contained in:
RenKaiGe-Office
2018-09-04 10:20:04 +08:00
parent 7c8e32de8b
commit 7090b8694a
6 changed files with 44 additions and 289 deletions

View File

@@ -1,58 +0,0 @@
package com.nis.util;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.jolbox.bonecp.BoneCPDataSource;
import com.nis.web.service.SpringContextHolder;
public class HiveDataSource {
private final static Logger logger = LoggerFactory.getLogger(HiveDataSource.class);
static BoneCPDataSource datasource = null;
static Connection conn = null;
static ResultSet rs = null;
static Statement st = null;
public static ResultSet query(String sql) throws Exception {
if (datasource == null) {
datasource = (BoneCPDataSource) SpringContextHolder.getBean("HiveDataSource");
}
conn = datasource.getConnection();
logger.info("连接数据中心日志库成功--------------------------");
st = conn.createStatement();
// logger.info("开始选择{}数据库--------------------------", Constants.HIVEDBNAME);
// String hiveAName = "use " + Constants.HIVEDBNAME;
// st.execute(hiveAName);
// logger.info("选择数据库{}成功,开始执行查询", Constants.HIVEDBNAME);
// logger.info("选择数据库{}成功,开始执行查询", Constants.HIVEDBNAME);
rs = st.executeQuery(sql);
logger.info("执行查询语句成功sql={}",sql);
return rs;
}
public static void closeConn() {
try {
if (rs != null) {
rs.close();
rs = null;
}
if (st != null) {
st.close();
st = null;
}
if (conn != null) {
conn.close();
conn = null;
}
logger.info("关闭数据中心连接成功");
} catch (Exception e) {
e.printStackTrace();
logger.error("关闭数据中心连接失败,失败原因" + e);
}
}
}

View File

@@ -161,8 +161,8 @@ public class HiveJDBC {
listString.add(JsonMapper.toJsonString(map2Obj(map, entityClass)));
listObject.add(map2Obj(map, entityClass));
}
logger.info("开始关闭数据中心连接");
HiveDataSource.closeConn();
// logger.info("开始关闭数据中心连接");
// HiveDataSource.closeConn();
if (null == listString || listString.size() == 0 || null == listObject || listObject.size() == 0) {
return null;
} else {

View File

@@ -22,6 +22,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.alibaba.druid.pool.DruidDataSource;
import com.jolbox.bonecp.BoneCPDataSource;
import com.nis.domain.Page;
import com.nis.web.service.SpringContextHolder;
@@ -103,8 +104,6 @@ public class LogJDBCByDruid {
listString.add(JsonMapper.toJsonString(map2Obj(map, entityClass)));
listObject.add(map2Obj(map, entityClass));
}
logger.info("开始关闭数据中心连接");
HiveDataSource.closeConn();
if (null == listString || listString.size() == 0 || null == listObject || listObject.size() == 0) {
return null;
} else {
@@ -240,5 +239,19 @@ public class LogJDBCByDruid {
return map;
}
public ResultSet query(String sql) throws Exception {
conn = getConnection();
logger.info("连接数据中心日志库成功--------------------------");
st = conn.createStatement();
// logger.info("开始选择{}数据库--------------------------", Constants.HIVEDBNAME);
// String hiveAName = "use " + Constants.HIVEDBNAME;
// st.execute(hiveAName);
// logger.info("选择数据库{}成功,开始执行查询", Constants.HIVEDBNAME);
// logger.info("选择数据库{}成功,开始执行查询", Constants.HIVEDBNAME);
rs = st.executeQuery(sql);
logger.info("执行查询语句成功sql={}",sql);
return rs;
}
}

View File

@@ -23,7 +23,7 @@ import com.alibaba.druid.pool.DruidDataSource;
import com.nis.domain.Page;
import com.nis.util.Configurations;
import com.nis.util.Constants;
import com.nis.util.HiveDataSource;
import com.nis.util.LogJDBCByDruid;
import com.nis.util.StringUtil;
import com.nis.util.redis.SaveRedisThread;
@@ -35,9 +35,9 @@ public class HiveSqlService {
Statement st = null;
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd");
public static String getSql(Page page, Object bean, String tableName,
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
public static String getSql(Page page, Object bean, String tableName, Map<String, Map<String, String>> col2col,
String orderBy, String searchActiveSys) throws Exception {
tableName = tableName.toLowerCase();
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
StringBuffer sql = new StringBuffer();
@@ -123,34 +123,35 @@ public class HiveSqlService {
}
}
if (null != foundTimePartStart) {
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
sql.append(" and found_time_partition>=" + foundTimePartStart);
}
if (null != foundTimePartEnd) {
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
sql.append(" and found_time_partition<=" + foundTimePartEnd);
if (!Constants.ISUSECLICKHOUSE) {//hive需要加这个字段
if (null != foundTimePartStart) {
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
sql.append(" and found_time_partition>=" + foundTimePartStart);
}
if (null != foundTimePartEnd) {
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
sql.append(" and found_time_partition<=" + foundTimePartEnd);
}
}
}
}
if(Constants.ISUSECLICKHOUSE) {
//Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
if (Constants.ISUSECLICKHOUSE) {
// Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
//Integer endNum = startNum - 1 + page.getPageSize();
sql.append(" order by " + orderBy );
sql.append(" limit " + startNum+","+page.getPageSize());//clickhouse的分页与mysql相同
}else {
// Integer endNum = startNum - 1 + page.getPageSize();
sql.append(" order by " + orderBy);
sql.append(" limit " + startNum + "," + page.getPageSize());// clickhouse的分页与mysql相同
} else {
// sql.append(" order by " + orderBy + " limit 10000) t1) t2 where
// row_Num between " + startNum + " and " + endNum);
sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM);
logger.info("获取数据中心日志sql==================={}", sql);
}
return sql.toString().toUpperCase();
return sql.toString().toLowerCase();
}
public static Long getHivePageCount(Object bean, String countKey, String tableName,
@@ -180,10 +181,10 @@ public class HiveSqlService {
if (col2col.containsKey(key)) {
value = sdf.parse(value.toString().trim()).getTime() / 1000;
if (key.toLowerCase().equals("searchfoundstarttime")) {
foundTimePartStart = Long.parseLong(value.toString()) / 3600L/ 24L;
foundTimePartStart = Long.parseLong(value.toString()) / 3600L / 24L;
}
if (key.toLowerCase().equals("searchfoundendtime")) {
foundTimePartEnd = Long.parseLong(value.toString()) / 3600L/ 24L;
foundTimePartEnd = Long.parseLong(value.toString()) / 3600L / 24L;
}
if (col2col.get(key).get("start") != null) {
// sql.append(" and " +
@@ -237,7 +238,7 @@ public class HiveSqlService {
}
logger.info("获取数据中心日志总条数sql==================" + sql.toString());
// ResultSet countRs = HiveJDBC.query(countSql.toString());
ResultSet countRs = HiveDataSource.query(sql.toString());
ResultSet countRs = new LogJDBCByDruid().query(sql.toString());
String countStr = null;
while (countRs.next()) {
countStr = countRs.getObject(1).toString();
@@ -256,207 +257,6 @@ public class HiveSqlService {
return count;
}
public static ResultSet getResultSet2(Page page, Object bean, String tableName,
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
tableName = tableName.toLowerCase();
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
StringBuffer sql = new StringBuffer();
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
if (null == showColmun || showColmun.equals("")) {
for (String key : filedAndColumnMap.keySet()) {
if (!filedAndColumnMap.get(key).toLowerCase().equals("id")) {
sql.append(filedAndColumnMap.get(key) + ",");
}
}
} else {
sql.append(showColmun);
}
String sqlTrim = sql.toString().trim();
if (sqlTrim.endsWith(",")) {
sqlTrim = sqlTrim.substring(0, sqlTrim.length() - 1);
}
sql.setLength(0);
sql.append("select " + sqlTrim + " from(select " + sqlTrim + ",row_Num from(select " + sqlTrim
+ ",row_number() over() as row_Num from " + tableName + " t where 1=1 ");
if (bean != null) {
Class<?> clazz = bean.getClass();
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
// 获取所有的字段包括public,private,protected,private
// Field[] fields = bean.getClass().getDeclaredFields();
Field[] fields = clazz.getDeclaredFields();
Long foundTimePartStart = null;
Long foundTimePartEnd = null;
for (int i = 0; i < fields.length; i++) {
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
Field f = fields[i];
String key = f.getName();// 获取字段名
String typeName = f.getType().getName();
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
Object value = getFieldValue(bean, key);
if (value != null) {
setFieldValue(bean, key, value.toString().trim());
if (key.endsWith("Time")) {// 日期开始或结束的字段
if (col2col.containsKey(key)) {
value = sdf.parse(value.toString().trim()).getTime() / 1000;
if (key.toLowerCase().equals("searchfoundstarttime")) {
foundTimePartStart = Long.parseLong(value.toString()) / 3600L/ 24L;
}
if (key.toLowerCase().equals("searchfoundendtime")) {
foundTimePartEnd = Long.parseLong(value.toString()) / 3600L/ 24L;
}
if (col2col.get(key).get("start") != null) {
// sql.append(" and " +
// filedAndColumnMap.get(col2col.get(key).get("start"))
// + ">=to_date('" +
// value.toString().trim()
// + "','yyyy-mm-dd HH24:mi:ss')");
sql.append(" and " + filedAndColumnMap.get(col2col.get(key).get("start")) + ">="
+ value);
} else {
// sql.append(" and " +
// filedAndColumnMap.get(col2col.get(key).get("end"))
// + "<=to_date('" +
// value.toString().trim()
// + "','yyyy-mm-dd HH24:mi:ss')");
sql.append(" and " + filedAndColumnMap.get(col2col.get(key).get("end")) + "<"
+ value);
}
}
} else {
if (key.toLowerCase().startsWith("search")) {
key = key.replace("search", "");
key = key.substring(0, 1).toLowerCase() + key.substring(1);
}
if (typeName.equals("java.lang.String")) {
sql.append(" and " + filedAndColumnMap.get(key) + "='" + value.toString().trim()
+ "'");
} else if (typeName.equals("java.lang.Integer") || typeName.equals("int")) {
sql.append(" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim());
} else if (typeName.equals("java.lang.Long") || typeName.equals("long")) {
sql.append(
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim() + "L");
}
}
}
}
}
if (null != foundTimePartStart) {
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
sql.append(" and found_time_partition>=" + foundTimePartStart);
}
if (null != foundTimePartEnd) {
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
sql.append(" and found_time_partition<" + foundTimePartEnd);
}
}
}
Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
Integer endNum = startNum - 1 + page.getPageSize();
//sql.append(" order by " + orderBy + " limit 10000) t1) t2 where row_Num between " + startNum + " and " + endNum);
sql.append(" order by " + orderBy + " ) t1) t2 where row_Num between " + startNum + " and " + endNum);
logger.info("获取数据中心日志sql===================" + sql);
ResultSet query = HiveDataSource.query(sql.toString());
logger.info("获取数据中心日志成功");
return query;
}
public static Long getHivePageCount2(Object bean, String tableName, Map<String, Map<String, String>> col2col,
String searchActiveSys) throws Exception {
tableName = tableName.toLowerCase();
tableName = Configurations.getStringProperty(tableName, "t_" + tableName).trim();
StringBuffer countSql = new StringBuffer();
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
countSql.append("select count(1) from " + tableName + " where 1=1 ");
if (bean != null) {
Class<?> clazz = bean.getClass();
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
// 获取所有的字段包括public,private,protected,private
// Field[] fields = bean.getClass().getDeclaredFields();
Field[] fields = clazz.getDeclaredFields();
Long foundTimePartStart = null;
Long foundTimePartEnd = null;
for (int i = 0; i < fields.length; i++) {
Field f = fields[i];
String key = f.getName();// 获取字段名
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
Object value = getFieldValue(bean, key);
if (value != null) {
setFieldValue(bean, key, value.toString().trim());
if (key.endsWith("Time")) {// 日期开始或结束的字段
if (col2col.containsKey(key)) {
value = sdf.parse(value.toString().trim()).getTime() / 1000;
if (key.toLowerCase().equals("searchfoundstarttime")) {
foundTimePartStart = Long.parseLong(value.toString()) / 3600;
}
if (key.toLowerCase().equals("searchfoundendtime")) {
foundTimePartEnd = Long.parseLong(value.toString()) / 3600;
}
if (col2col.get(key).get("start") != null) {
countSql.append(" and " + filedAndColumnMap.get(col2col.get(key).get("start"))
+ ">=" + value + "L");
} else {
countSql.append(" and " + filedAndColumnMap.get(col2col.get(key).get("end"))
+ "<" + value + "L");
}
}
} else {
if (key.toLowerCase().startsWith("search")) {
key = key.replace("search", "");
key = key.substring(0, 1).toLowerCase() + key.substring(1);
}
if (!value.toString().trim().equals("") && filedAndColumnMap.containsKey(key)
&& (key.toLowerCase().equals("cfgid")
|| key.toLowerCase().equals("entranceid"))) {
countSql.append(
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim() + "L");
} else if (!value.toString().trim().equals("") && filedAndColumnMap.containsKey(key)
&& (key.toLowerCase().equals("protocol") || key.toLowerCase().equals("serverip")
|| key.toLowerCase().equals("clientip")
|| key.toLowerCase().equals("cljip"))) {
countSql.append(" and " + filedAndColumnMap.get(key) + "='"
+ value.toString().trim() + "'");
} else if (!value.toString().trim().equals("") && filedAndColumnMap.containsKey(key)
&& key.toLowerCase().equals("servicetype")) {
countSql.append(
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim());
}
}
}
}
}
if (null != searchActiveSys && !searchActiveSys.equals(Constants.ACTIVESYS_A)) {// B版数据库才有found_time_partition字段,A版毛衣found_time_partition分区字段
if (null != foundTimePartStart) {
countSql.append(" and found_time_partition>=" + foundTimePartStart + "L");
}
if (null != foundTimePartEnd) {
countSql.append(" and found_time_partition<" + foundTimePartEnd + "L");
}
}
}
}
logger.info("获取数据中心日志总条数sql==================" + countSql.toString());
ResultSet countRs = HiveDataSource.query(countSql.toString());
String countStr = null;
while (countRs.next()) {
countStr = countRs.getObject(1).toString();
break;
}
if (countStr == null || countStr.equals("")) {
logger.info("获取数据中心日志总条数成功总共===================0条配置");
return 0l;
}
Long count = Long.valueOf(countStr);
logger.info("获取数据中心日志总条数成功总共===================" + count + "条配置");
HiveDataSource.closeConn();
return count;
}
public static String getFiledsSql(String mapName, String fileds) throws Exception {
String[] fieldsColoumn = null;
// 所有字段名