diff --git a/src/main/java/com/nis/util/Configurations.java b/src/main/java/com/nis/util/Configurations.java index 1338387..440aade 100644 --- a/src/main/java/com/nis/util/Configurations.java +++ b/src/main/java/com/nis/util/Configurations.java @@ -1,11 +1,9 @@ package com.nis.util; + import java.io.File; import java.util.Map; import java.util.Properties; - - - public final class Configurations { private static Properties prop = new Properties(); @@ -13,10 +11,11 @@ public final class Configurations { try { prop.load(Configurations.class.getResourceAsStream("/nis.properties")); prop.load(Configurations.class.getResourceAsStream("/applicationLog-hive.properties")); + prop.load(Configurations.class.getResourceAsStream("/applicationLog-clickhouse.properties")); prop.load(Configurations.class.getResourceAsStream("/applicationConfig-maatOracle.properties")); prop.load(Configurations.class.getResourceAsStream("/applicationConfig-rule.properties")); prop.load(Configurations.class.getResourceAsStream("/fastdfs-client.properties")); - + } catch (Exception e) { prop = null; System.err.println("未知nis.properties,请确定文件是否存在!"); @@ -24,36 +23,35 @@ public final class Configurations { } public static String getStringProperty(String key, String defaultValue) { - if (prop==null||StringUtil.isBlank(prop.getProperty(key))) { + if (prop == null || StringUtil.isBlank(prop.getProperty(key))) { return defaultValue; } return prop.getProperty(key).trim(); } public static int getIntProperty(String key, int defaultValue) { - if (prop==null||StringUtil.isBlank(prop.getProperty(key))) { + if (prop == null || StringUtil.isBlank(prop.getProperty(key))) { return defaultValue; } return Integer.parseInt(prop.getProperty(key).trim()); } public static long getLongProperty(String key, long defaultValue) { - if (prop==null||StringUtil.isBlank(prop.getProperty(key))) { + if (prop == null || StringUtil.isBlank(prop.getProperty(key))) { return defaultValue; } return Long.parseLong(prop.getProperty(key).trim()); } public static boolean getBooleanProperty(String key, boolean defaultValue) { - if (prop==null||StringUtil.isBlank(prop.getProperty(key))) { + if (prop == null || StringUtil.isBlank(prop.getProperty(key))) { return defaultValue; } return prop.getProperty(key).toLowerCase().trim().equals("true"); } - public static String getFileDirPathProperty(String key, - String defaultValue) { - if (prop==null||StringUtil.isBlank(prop.getProperty(key))) { + public static String getFileDirPathProperty(String key, String defaultValue) { + if (prop == null || StringUtil.isBlank(prop.getProperty(key))) { return defaultValue; } String path = prop.getProperty(key).trim(); @@ -70,16 +68,9 @@ public final class Configurations { } return true; } - - + public static Map getProp() { return prop; } - - - - - - } diff --git a/src/main/java/com/nis/util/Constants.java b/src/main/java/com/nis/util/Constants.java index 3688777..0b3f499 100644 --- a/src/main/java/com/nis/util/Constants.java +++ b/src/main/java/com/nis/util/Constants.java @@ -1,6 +1,5 @@ package com.nis.util; -import java.text.SimpleDateFormat; import java.util.Date; public final class Constants { @@ -187,7 +186,15 @@ public final class Constants { /** * 数据中心数据库名称,程序中每次查询时使用的数据库名称 use HIVEDBNAME */ - public static final String HIVEDBNAME = Configurations.getStringProperty("jdbc.hive.Name", "xa_dfbhit_hive"); + public static final String HIVEDBNAME = Configurations.getStringProperty("jdbc.hive.DBName", "maat"); + /** + * clickhouse数据库名称 + */ + public static final String CLICKHOUSEDBNAME = Configurations.getStringProperty("jdbc.clickHouse.DBName", "k18_ods"); + /** + * 日志查询是否使用clickhouse,否则使用hive + */ + public static final Boolean ISUSECLICKHOUSE = Configurations.getBooleanProperty("isUseClickHouse", true); public static final String DIGEST_GEN_TOOL_PATH = Configurations.getStringProperty("digest.gen.tool.path", "maat-redis/digest_gen"); diff --git a/src/main/java/com/nis/util/LogJDBCByDruid.java b/src/main/java/com/nis/util/LogJDBCByDruid.java index b78add6..ae59511 100644 --- a/src/main/java/com/nis/util/LogJDBCByDruid.java +++ b/src/main/java/com/nis/util/LogJDBCByDruid.java @@ -24,6 +24,7 @@ import org.slf4j.LoggerFactory; import com.alibaba.druid.pool.DruidDataSource; import com.nis.domain.Page; import com.nis.web.service.SpringContextHolder; + /** * *

Title: HiveJDBCByDruid

@@ -42,8 +43,11 @@ public class LogJDBCByDruid { public static Connection getConnection() throws SQLException { if (datasource == null) { - datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid"); -// datasource = (DruidDataSource) SpringContextHolder.getBean("ClickHouseDataSourceByDruid"); + if (Constants.ISUSECLICKHOUSE) { + datasource = (DruidDataSource) SpringContextHolder.getBean("ClickHouseDataSourceByDruid"); + } else { + datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid"); + } } return datasource.getConnection(); } @@ -61,8 +65,8 @@ public class LogJDBCByDruid { * @return * @throws Exception */ - public Map> tableMapping(Page page, String redisKey, String sql, Class entityClass, Object... obj) - throws Exception { + public Map> tableMapping(Page page, String redisKey, String sql, Class entityClass, + Object... obj) throws Exception { Map> mapList = new HashMap>(); try { Map filedAndColumnMap = getColumn2FiledMap(entityClass); @@ -73,11 +77,11 @@ public class LogJDBCByDruid { conn = getConnection(); logger.info("连接数据中心日志库成功--------------------------"); st = conn.createStatement(); + logger.info("开始执行日志查询语句sql={}", sql); rs = st.executeQuery(sql); - logger.info("执行查询语句成功sql={}", sql); ResultSetMetaData metaData = rs.getMetaData(); while (rs.next()) { - Map map = new HashMap(); + Map map = new HashMap(); for (int i = 1; i <= metaData.getColumnCount(); i++) { Object value = rs.getObject(i); String filedName = filedAndColumnMap.get(metaData.getColumnName(i).toString().toLowerCase()); @@ -104,34 +108,38 @@ public class LogJDBCByDruid { if (null == listString || listString.size() == 0 || null == listObject || listObject.size() == 0) { return null; } else { - //暂时没有往缓存写的操作 -// if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) { -// new SaveRedisListThread(redisKey, listString, Constants.HIVE_EXPIRE).start(); -// } + // 暂时没有往缓存写的操作 + // if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) { + // new SaveRedisListThread(redisKey, listString, Constants.HIVE_EXPIRE).start(); + // } } - // sublist包前不包后,0-30实际获取的是0-29的数据 - Integer startNum = (page.getPageNo() - 1) * page.getPageSize(); - Integer endNum = startNum - 1 + page.getPageSize() + 1; - if (listString.size() >= startNum) { - if (listString.size() >= endNum) { - mapList.put("str", listString.subList(startNum, endNum)); - } else { - mapList.put("str", listString.subList(startNum, listString.size())); - } - + if (Constants.ISUSECLICKHOUSE) {// sql查询时已经分页了 + mapList.put("str", listString); + mapList.put("obj", listObject); } else { - mapList.put("str", new ArrayList()); - } - if (listObject.size() >= startNum) { - if (listObject.size() >= endNum) { - mapList.put("obj", listObject.subList(startNum, endNum)); - } else { - mapList.put("obj", listObject.subList(startNum, listObject.size())); - } - } else { - mapList.put("obj", new ArrayList()); - } + // sublist包前不包后,0-30实际获取的是0-29的数据 + Integer startNum = (page.getPageNo() - 1) * page.getPageSize(); + Integer endNum = startNum - 1 + page.getPageSize() + 1; + if (listString.size() >= startNum) { + if (listString.size() >= endNum) { + mapList.put("str", listString.subList(startNum, endNum)); + } else { + mapList.put("str", listString.subList(startNum, listString.size())); + } + } else { + mapList.put("str", new ArrayList()); + } + if (listObject.size() >= startNum) { + if (listObject.size() >= endNum) { + mapList.put("obj", listObject.subList(startNum, endNum)); + } else { + mapList.put("obj", listObject.subList(startNum, listObject.size())); + } + } else { + mapList.put("obj", new ArrayList()); + } + } } finally { closeConn(); } @@ -221,8 +229,6 @@ public class LogJDBCByDruid { return obj; } - - public static Map getColumn2FiledMap(Class clazz) { Map map = new HashMap(); SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class); diff --git a/src/main/java/com/nis/web/controller/BaseRestController.java b/src/main/java/com/nis/web/controller/BaseRestController.java index 455b899..71d1a02 100644 --- a/src/main/java/com/nis/web/controller/BaseRestController.java +++ b/src/main/java/com/nis/web/controller/BaseRestController.java @@ -17,7 +17,6 @@ import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import com.nis.web.service.AuditLogThread; import org.apache.commons.lang3.StringEscapeUtils; import org.apache.log4j.Logger; import org.springframework.http.HttpStatus; @@ -30,8 +29,10 @@ import com.nis.restful.RestBusinessCode; import com.nis.restful.RestConstants; import com.nis.restful.RestResult; import com.nis.restful.RestServiceException; +import com.nis.util.Configurations; import com.nis.util.Constants; import com.nis.util.DateUtils; +import com.nis.web.service.AuditLogThread; import com.nis.web.service.ServicesRequestLogService; /** @@ -44,6 +45,7 @@ import com.nis.web.service.ServicesRequestLogService; public class BaseRestController { protected final Logger logger = Logger.getLogger(this.getClass()); private SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmssSSS"); + /** * * @Title: serviceResponse @@ -64,7 +66,7 @@ public class BaseRestController { */ public Map serviceResponse(AuditLogThread thread, long time, HttpServletRequest request, - HttpServletResponse response, String msg) { + HttpServletResponse response, String msg) { RestResult restResult = new RestResult(); String requestMethod = request.getMethod(); if (requestMethod.equals(RequestMethod.GET.name())) { @@ -110,7 +112,7 @@ public class BaseRestController { * @version V1.0 */ public Map serviceResponse(AuditLogThread thread, long time, HttpServletRequest request, - HttpServletResponse response, String msg, Object data) { + HttpServletResponse response, String msg, Object data) { RestResult restResult = new RestResult(); String requestMethod = request.getMethod(); if (requestMethod.equals(RequestMethod.GET.name())) { @@ -135,10 +137,7 @@ public class BaseRestController { new Thread(thread).start(); return convert(restResult); } - - - - + /** * 不将日志插入到数据库中,只返回到前台 * @param time @@ -148,8 +147,8 @@ public class BaseRestController { * @param data * @return */ - public Map testServiceResponse(long time, HttpServletRequest request, - HttpServletResponse response, String msg, Object data) { + public Map testServiceResponse(long time, HttpServletRequest request, HttpServletResponse response, String msg, + Object data) { RestResult restResult = new RestResult(); String requestMethod = request.getMethod(); if (requestMethod.equals(RequestMethod.GET.name())) { @@ -192,7 +191,7 @@ public class BaseRestController { * @version V1.0 */ public Map compileServiceResponse(AuditLogThread thread, long time, HttpServletRequest request, - HttpServletResponse response, String msg, Object data) { + HttpServletResponse response, String msg, Object data) { RestResult restResult = new RestResult(); String requestMethod = request.getMethod(); if (requestMethod.equals(RequestMethod.GET.name())) { @@ -219,18 +218,18 @@ public class BaseRestController { return convert(restResult); } - /** - * 日志结果响应格式规范 - * @param auditLogThread - * @param executedTime - * @param request - * @param msg - * @param data - * @param logSource - * @return - */ + /** + * 日志结果响应格式规范 + * @param auditLogThread + * @param executedTime + * @param request + * @param msg + * @param data + * @param logSource + * @return + */ public Map serviceLogResponse(AuditLogThread auditLogThread, long executedTime, HttpServletRequest request, - String msg, Object data, Integer logSource) { + String msg, Object data, Integer logSource) { RestResult restResult = new RestResult(); String requestMethod = request.getMethod(); if (requestMethod.equals(RequestMethod.GET.name())) { @@ -251,8 +250,8 @@ public class BaseRestController { restResult.setMsg(msg); restResult.setLogSource(logSource); restResult.setTraceCode(auditLogThread.getTraceCode()); - auditLogThread.setConsumerTime(executedTime); - auditLogThread.setBusinessCode(restResult.getBusinessCode().getValue()); + auditLogThread.setConsumerTime(executedTime); + auditLogThread.setBusinessCode(restResult.getBusinessCode().getValue()); new Thread(auditLogThread).start(); return convert(restResult); } @@ -359,19 +358,19 @@ public class BaseRestController { } } - protected AuditLogThread saveRequestLog(ServicesRequestLogService service, int opAction, - HttpServletRequest request, Object data) { + protected AuditLogThread saveRequestLog(ServicesRequestLogService service, int opAction, HttpServletRequest request, + Object data) { logger.debug("SaveRequestLogThread初始化开始----" + System.currentTimeMillis()); AuditLogThread thread = AuditLogThread.getNewSaveRequestLogThread(request); thread.setService(service); thread.setOpAction(opAction); - thread.setTraceCode(sdf.format(new Date())+(Math.round((Math.random()*9+1)*10000)+"")); + thread.setTraceCode(sdf.format(new Date()) + (Math.round((Math.random() * 9 + 1) * 10000) + "")); if (data != null && ConfigCommonSource.class.isAssignableFrom(data.getClass())) { ConfigCommonSource source = (ConfigCommonSource) data; thread.setOperator(source.getOperator()); thread.setVersion(source.getVersion()); thread.setOpTime(source.getOpTime()); - }else{ + } else { thread.setOperator("admin"); thread.setOpTime(new Date()); } @@ -381,13 +380,13 @@ public class BaseRestController { } // 文件上传用 - protected AuditLogThread saveRequestLog(ServicesRequestLogService service, int opAction, - HttpServletRequest request, Object data, Object fileInfo) { + protected AuditLogThread saveRequestLog(ServicesRequestLogService service, int opAction, HttpServletRequest request, + Object data, Object fileInfo) { AuditLogThread thread = AuditLogThread.getNewSaveRequestLogThread(request); thread.setService(service); - // thread.setContent(fileInfo); + // thread.setContent(fileInfo); thread.setOpAction(opAction); - thread.setTraceCode(sdf.format(new Date())+(Math.round((Math.random()*9+1)*10000)+"")); + thread.setTraceCode(sdf.format(new Date()) + (Math.round((Math.random() * 9 + 1) * 10000) + "")); if (data != null && ConfigCommonSource.class.isAssignableFrom(data.getClass())) { System.out.println(data.getClass().getSimpleName()); ConfigCommonSource source = (ConfigCommonSource) data; @@ -398,4 +397,12 @@ public class BaseRestController { return thread; } + + protected String getTableName(String key, String defaultTableName) { + if (Constants.ISUSECLICKHOUSE) { + key = key.replace("HiveTable", "ClickHouseTable"); + } + return Configurations.getStringProperty(key, defaultTableName); + } + } diff --git a/src/main/java/com/nis/web/controller/restful/LogController.java b/src/main/java/com/nis/web/controller/restful/LogController.java index ce47e70..40e6283 100644 --- a/src/main/java/com/nis/web/controller/restful/LogController.java +++ b/src/main/java/com/nis/web/controller/restful/LogController.java @@ -74,7 +74,7 @@ public class LogController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, dkBehaviorLog, - Configurations.getStringProperty(DkBehaviorLog.class.getSimpleName() + "HiveTable", "DK_BEHAVIOR_LOG"), + getTableName(DkBehaviorLog.class.getSimpleName() + "HiveTable", "DK_BEHAVIOR_LOG"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, DkBehaviorLog.class, "foundTime", "recvTime"); @@ -127,7 +127,7 @@ public class LogController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, pxyHttpLog, - Configurations.getStringProperty(PxyHttpLog.class.getSimpleName() + "HiveTable", "PXY_HTTP_LOG"), + getTableName(PxyHttpLog.class.getSimpleName() + "HiveTable", "PXY_HTTP_LOG"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, PxyHttpLog.class, "foundTime", "recvTime"); diff --git a/src/main/java/com/nis/web/controller/restful/MmLogSearchController.java b/src/main/java/com/nis/web/controller/restful/MmLogSearchController.java index 5ea92b5..dd63296 100644 --- a/src/main/java/com/nis/web/controller/restful/MmLogSearchController.java +++ b/src/main/java/com/nis/web/controller/restful/MmLogSearchController.java @@ -83,7 +83,7 @@ public class MmLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, mmAvIpLog, - Configurations.getStringProperty(MmAvIpLog.class.getSimpleName() + "HiveTable", "MM_AV_IP_LOG"), + getTableName(MmAvIpLog.class.getSimpleName() + "HiveTable", "MM_AV_IP_LOG"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmAvIpLog.class, "foundTime", "recvTime"); @@ -136,7 +136,7 @@ public class MmLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, mmAvUrlLog, - Configurations.getStringProperty(MmAvUrlLog.class.getSimpleName() + "HiveTable", "MM_AV_URL_LOG"), + getTableName(MmAvUrlLog.class.getSimpleName() + "HiveTable", "MM_AV_URL_LOG"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmAvUrlLog.class, "foundTime", "recvTime"); @@ -189,7 +189,7 @@ public class MmLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, mmPicIpLog, - Configurations.getStringProperty(MmPicIpLog.class.getSimpleName() + "HiveTable", "MM_PIC_IP_LOG"), + getTableName(MmPicIpLog.class.getSimpleName() + "HiveTable", "MM_PIC_IP_LOG"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPicIpLog.class, "foundTime", "recvTime"); @@ -242,7 +242,7 @@ public class MmLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, mmPicUrlLog, - Configurations.getStringProperty(MmPicUrlLog.class.getSimpleName() + "HiveTable", "MM_PIC_URL_LOG"), + getTableName(MmPicUrlLog.class.getSimpleName() + "HiveTable", "MM_PIC_URL_LOG"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPicUrlLog.class, "foundTime", "recvTime"); @@ -296,7 +296,7 @@ public class MmLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, mmVoipIpLog, - Configurations.getStringProperty(MmVoipIpLog.class.getSimpleName() + "HiveTable", "MM_VOIP_IP_LOG"), + getTableName(MmVoipIpLog.class.getSimpleName() + "HiveTable", "MM_VOIP_IP_LOG"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmVoipIpLog.class, "foundTime", "recvTime"); @@ -349,7 +349,7 @@ public class MmLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, mmVoipLog, - Configurations.getStringProperty(MmVoipAccountLog.class.getSimpleName() + "HiveTable", "MM_VOIP_ACCOUNT_LOG"), + getTableName(MmVoipAccountLog.class.getSimpleName() + "HiveTable", "MM_VOIP_ACCOUNT_LOG"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmVoipAccountLog.class, "foundTime", "recvTime"); @@ -403,7 +403,7 @@ public class MmLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, mmSampleAudioLog, - Configurations.getStringProperty(MmSampleAudioLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_AUDIO_LOG"), + getTableName(MmSampleAudioLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_AUDIO_LOG"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleAudioLog.class, "foundTime", "recvTime"); @@ -456,7 +456,7 @@ public class MmLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, mmSampleVideoLog, - Configurations.getStringProperty(MmSampleVideoLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VIDEO_LOG"), + getTableName(MmSampleVideoLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VIDEO_LOG"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleVideoLog.class, "foundTime", "recvTime"); @@ -509,7 +509,7 @@ public class MmLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, mmPornAudioLevelLog, - Configurations.getStringProperty(MmPornAudioLevelLog.class.getSimpleName() + "HiveTable", "MM_PORN_AUDIO_LEVEL_LOG"), + getTableName(MmPornAudioLevelLog.class.getSimpleName() + "HiveTable", "MM_PORN_AUDIO_LEVEL_LOG"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPornAudioLevelLog.class, "foundTime", "recvTime"); @@ -562,7 +562,7 @@ public class MmLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, mmPornVideoLevelLog, - Configurations.getStringProperty(MmPornVideoLevelLog.class.getSimpleName() + "HiveTable", "MM_PRON_VIDEO_LOG"), + getTableName(MmPornVideoLevelLog.class.getSimpleName() + "HiveTable", "MM_PRON_VIDEO_LOG"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPornVideoLevelLog.class, "foundTime", "recvTime"); @@ -616,7 +616,7 @@ public class MmLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, mmSamplePicLog, - Configurations.getStringProperty(MmSamplePicLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_PIC_LOG"), + getTableName(MmSamplePicLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_PIC_LOG"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSamplePicLog.class, "foundTime", "recvTime"); @@ -669,7 +669,7 @@ public class MmLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, mmSampleVoipLog, - Configurations.getStringProperty(MmSampleVoipLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VOIP_LOG"), + getTableName(MmSampleVoipLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VOIP_LOG"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleVoipLog.class, "foundTime", "recvTime"); diff --git a/src/main/java/com/nis/web/controller/restful/NtcLogSearchController.java b/src/main/java/com/nis/web/controller/restful/NtcLogSearchController.java index de16246..7033157 100644 --- a/src/main/java/com/nis/web/controller/restful/NtcLogSearchController.java +++ b/src/main/java/com/nis/web/controller/restful/NtcLogSearchController.java @@ -84,12 +84,12 @@ public class NtcLogSearchController extends BaseRestController { orderBy = "found_Time"; } // ResultSet rs = HiveSqlService.getResultSet(page, ntcIpLog, -// Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"), +// getTableName(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"), // getCol2Col(), orderBy, null); // Map tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpLog.class, "foundTime", // "recvTime"); String sql = HiveSqlService.getSql(page, ntcIpLog, - Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"), + getTableName(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null,sql , NtcIpLog.class, "foundTime", "recvTime"); @@ -112,7 +112,7 @@ public class NtcLogSearchController extends BaseRestController { // ntcIpLogPage // .setCount( // HiveSqlService.getHivePageCount(ntcIpLog, null, -// Configurations.getStringProperty( +// getTableName( // NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"), // getCol2Col(), null)); } else { @@ -155,7 +155,7 @@ public class NtcLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, ntcHttpLog, - Configurations.getStringProperty(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"), + getTableName(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcHttpLog.class, "foundTime", "recvTime"); @@ -215,7 +215,7 @@ public class NtcLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, ntcDnsLog, - Configurations.getStringProperty(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"), + getTableName(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDnsLog.class, "foundTime", "recvTime"); @@ -275,7 +275,7 @@ public class NtcLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, ntcMailLog, - Configurations.getStringProperty(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"), + getTableName(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcMailLog.class, "foundTime", "recvTime"); @@ -335,7 +335,7 @@ public class NtcLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, ntcSslLog, - Configurations.getStringProperty(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"), + getTableName(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSslLog.class, "foundTime", "recvTime"); @@ -395,7 +395,7 @@ public class NtcLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, ntcPptpLog, - Configurations.getStringProperty(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"), + getTableName(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcPptpLog.class, "foundTime", "recvTime"); @@ -454,7 +454,7 @@ public class NtcLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, ntcL2tpLog, - Configurations.getStringProperty(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"), + getTableName(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcL2tpLog.class, "foundTime", "recvTime"); @@ -514,7 +514,7 @@ public class NtcLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, ntcOpenvpnLog, - Configurations.getStringProperty(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"), + getTableName(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcOpenvpnLog.class, "foundTime", "recvTime"); @@ -574,7 +574,7 @@ public class NtcLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, ntcIpsecLog, - Configurations.getStringProperty(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"), + getTableName(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcIpsecLog.class, "foundTime", "recvTime"); @@ -634,7 +634,7 @@ public class NtcLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, ntcSshLog, - Configurations.getStringProperty(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"), + getTableName(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSshLog.class, "foundTime", "recvTime"); @@ -694,7 +694,7 @@ public class NtcLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, ntcFtpLog, - Configurations.getStringProperty(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"), + getTableName(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcFtpLog.class, "foundTime", "recvTime"); @@ -752,7 +752,7 @@ public class NtcLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, ntcAppLog, - Configurations.getStringProperty(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"), + getTableName(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcAppLog.class, "foundTime", "recvTime"); @@ -810,7 +810,7 @@ public class NtcLogSearchController extends BaseRestController { orderBy = "found_Time"; } String sql = HiveSqlService.getSql(page, ntcDdosLog, - Configurations.getStringProperty(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"), + getTableName(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDdosLog.class, "foundTime", "recvTime"); @@ -870,7 +870,7 @@ public class NtcLogSearchController extends BaseRestController { } String sql = HiveSqlService.getSql(page, ntcP2pLog, - Configurations.getStringProperty(NtcP2pLog.class.getSimpleName() + "HiveTable", "ntc_p2p_log"), + getTableName(NtcP2pLog.class.getSimpleName() + "HiveTable", "ntc_p2p_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcP2pLog.class, "foundTime", @@ -931,7 +931,7 @@ public class NtcLogSearchController extends BaseRestController { } String sql = HiveSqlService.getSql(page, ntcBgpLog, - Configurations.getStringProperty(NtcBgpLog.class.getSimpleName() + "HiveTable", "ntc_bgp_log"), + getTableName(NtcBgpLog.class.getSimpleName() + "HiveTable", "ntc_bgp_log"), getCol2Col(), orderBy, null); Map> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcBgpLog.class, "foundTime", diff --git a/src/main/java/com/nis/web/service/HiveSqlService.java b/src/main/java/com/nis/web/service/HiveSqlService.java index 24b4fdf..227085a 100644 --- a/src/main/java/com/nis/web/service/HiveSqlService.java +++ b/src/main/java/com/nis/web/service/HiveSqlService.java @@ -4,7 +4,6 @@ import java.lang.reflect.Field; import java.lang.reflect.Method; import java.sql.Connection; import java.sql.ResultSet; -import java.sql.SQLException; import java.sql.Statement; import java.text.ParseException; import java.text.SimpleDateFormat; @@ -34,13 +33,6 @@ public class HiveSqlService { Connection conn = null; ResultSet rs = null; Statement st = null; - - public static Connection getConnection() throws SQLException { - if (datasource == null) { - datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid"); - } - return datasource.getConnection(); - } private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd"); @@ -142,13 +134,23 @@ public class HiveSqlService { } } - Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1; - Integer endNum = startNum - 1 + page.getPageSize(); - // sql.append(" order by " + orderBy + " limit 10000) t1) t2 where - // row_Num between " + startNum + " and " + endNum); - sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM); - logger.info("获取数据中心日志sql==================={}", sql); - return sql.toString(); + + if(Constants.ISUSECLICKHOUSE) { + //Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1; + Integer startNum = (page.getPageNo() - 1) * page.getPageSize(); + //Integer endNum = startNum - 1 + page.getPageSize(); + sql.append(" order by " + orderBy ); + sql.append(" limit " + startNum+","+page.getPageSize());//clickhouse的分页与mysql相同 + + + }else { + + // sql.append(" order by " + orderBy + " limit 10000) t1) t2 where + // row_Num between " + startNum + " and " + endNum); + sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM); + logger.info("获取数据中心日志sql==================={}", sql); + } + return sql.toString().toUpperCase(); } public static Long getHivePageCount(Object bean, String countKey, String tableName, diff --git a/src/main/resources/applicationContext-mybatis.xml b/src/main/resources/applicationContext-mybatis.xml index fe74bbc..5eead10 100644 --- a/src/main/resources/applicationContext-mybatis.xml +++ b/src/main/resources/applicationContext-mybatis.xml @@ -104,7 +104,7 @@ - + @@ -141,7 +141,7 @@ - + diff --git a/src/main/resources/applicationLog-clickhouse.properties b/src/main/resources/applicationLog-clickhouse.properties new file mode 100644 index 0000000..413af59 --- /dev/null +++ b/src/main/resources/applicationLog-clickhouse.properties @@ -0,0 +1,43 @@ +#由于数据中心ClickHouse日志表的表名会变动,所以本系统中将日志的表名提取到配置文件中方便后期修改,key是bean的名称+ClickHouseTable,value是ClickHouseTable的实际表名 +NtcIpLogClickHouseTable=TBS_ODS_NTC_IP_LOG +NtcHttpLogClickHouseTable=TBS_ODS_NTC_HTTP_LOG +NtcDnsLogClickHouseTable=TBS_ODS_NTC_DNS_LOG +NtcMailLogClickHouseTable=TBS_ODS_NTC_MAIL_LOG +NtcSslLogClickHouseTable=TBS_ODS_NTC_SSL_LOG +NtcPptpLogClickHouseTable=TBS_ODS_NTC_PPTP_LOG +NtcL2tpLogClickHouseTable=TBS_ODS_NTC_L2TP_LOG +NtcOpenvpnLogClickHouseTable=TBS_ODS_NTC_OPENVPN_LOG +NtcIpsecLogClickHouseTable=TBS_ODS_NTC_IPSEC_LOG +NtcSshLogClickHouseTable=TBS_ODS_NTC_SSH_LOG +NtcFtpLogClickHouseTable=TBS_ODS_NTC_FTP_LOG +NtcAppLogClickHouseTable=TBS_ODS_NTC_APP_LOG +NtcDdosLogClickHouseTable=TBS_ODS_NTC_DDOS_LOG +NtcP2pLogClickHouseTable=TBS_ODS_NTC_P2P_LOG +NtcBgpLogClickHouseTable=TBS_ODS_NTC_BGP_LOG +DkBehaviorLogClickHouseTable=TBS_ODS_DK_BEHAVIOR_LOG +MmAvIpLogClickHouseTable=TBS_ODS_MM_AV_IP_LOG +MmAvUrlLogClickHouseTable=TBS_ODS_MM_AV_URL_LOG +MmPicIpLogClickHouseTable=TBS_ODS_MM_PIC_IP_LOG +MmPicUrlLogClickHouseTable=TBS_ODS_MM_PIC_URL_LOG +MmVoipIpLogClickHouseTable=TBS_ODS_MM_VOIP_IP_LOG +MmVoipAccountLogClickHouseTable=TBS_ODS_MM_VOIP_ACCOUNT_LOG +MmSampleAudioLogClickHouseTable=TBS_ODS_MM_SAMPLE_AUDIO_LOG +MmSampleVideoLogLogClickHouseTable=TBS_ODS_MM_SAMPLE_VIDEO_LOG +MmPornAudioLevelLogClickHouseTable=TBS_ODS_MM_PORN_AUDIO_LEVEL_LOG +MmPornVideoLevelLogClickHouseTable=TBS_ODS_MM_PORN_VIDEO_LEVEL_LOG +MmSamplePicLogClickHouseTable=TBS_ODS_MM_SAMPLE_PIC_LOG +MmSampleVoipLogClickHouseTable=TBS_ODS_MM_SAMPLE_VOIP_LOG +PxyHttpLogClickHouseTable=TBS_ODS_PXY_HTTP_LOG + + + + + + + + + + + + + diff --git a/src/main/resources/jdbc.properties b/src/main/resources/jdbc.properties index 3c6c3c0..a11951b 100644 --- a/src/main/resources/jdbc.properties +++ b/src/main/resources/jdbc.properties @@ -94,9 +94,9 @@ druid.log.timeBetweenEvictionRunsMillis=60000 #配置一个连接在池中最小生存的时间,单位是毫秒 druid.log.minEvictableIdleTimeMillis=300000 #用来检测连接是否有效的sql,要求是一个查询语句,下面是hive的 -druid.log.validationQuery=select unix_timestamp() +druid.log.hive.validationQuery=select unix_timestamp() #用来检测连接是否有效的sql,要求是一个查询语句,下面是clickhouse的 -#druid.log.validationQuery=show tables +druid.log.clickHouse.validationQuery=show tables #申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效 druid.log.testWhileIdle=true #申请连接时执行validationQuery检测连接是否有效 @@ -128,7 +128,7 @@ redis.host=10.0.6.249 #redis.host=192.168.10.205 #亦庄 #redis.host=192.168.10.215 -redis.port=6379 +redis.port=6381 redis.pass= redis.maxIdle=5 redis.maxTotal=250 diff --git a/src/main/resources/nis.properties b/src/main/resources/nis.properties index 42a7f73..491e62b 100644 --- a/src/main/resources/nis.properties +++ b/src/main/resources/nis.properties @@ -171,11 +171,15 @@ isUseES=false +#日志查询是否使用clickhouse,否则使用hive +isUseClickHouse=true +##clickhouse数据库名称 +jdbc.clickHouse.DBName=k18_ods #数据中心hive日志库数据库名称,程序中每次查询时使用的数据库名称 use dbName -jdbc.hive.Name=maat +jdbc.hive.DBName=maat maxPageSize=100000