diff --git a/src/main/java/com/nis/util/Constants.java b/src/main/java/com/nis/util/Constants.java index 5d5f45c..74620ed 100644 --- a/src/main/java/com/nis/util/Constants.java +++ b/src/main/java/com/nis/util/Constants.java @@ -196,6 +196,10 @@ public final class Constants { * 日志查询是否使用clickhouse,否则使用hive */ public static final Boolean ISUSECLICKHOUSE = Configurations.getBooleanProperty("isUseClickHouse", true); + /** + * 是否开启日志查询count和last功能 + */ + public static final Boolean ISOPENLOGCOUNTANDLAST = Configurations.getBooleanProperty("isOpenLogCountAndLast", true); public static final String DIGEST_GEN_TOOL_PATH = Configurations.getStringProperty("digest.gen.tool.path", "maat-redis/digest_gen"); diff --git a/src/main/java/com/nis/util/LogJDBCByDruid.java b/src/main/java/com/nis/util/LogJDBCByDruid.java index b6b3e77..e45a39d 100644 --- a/src/main/java/com/nis/util/LogJDBCByDruid.java +++ b/src/main/java/com/nis/util/LogJDBCByDruid.java @@ -29,10 +29,10 @@ import com.zdjizhi.utils.StringUtil; /** * *

- * Title: HiveJDBCByDruid + * Title: LogJDBCByDruid *

*

- * Description: 使用druid连接池对hive进行查询并解析结果 + * Description: 使用druid连接池对hive或clickhouse进行查询并解析结果 *

*

* Company: IIE @@ -49,7 +49,7 @@ public class LogJDBCByDruid { ResultSet rs = null; Statement st = null; - public static Connection getConnection() throws SQLException { + private static Connection getConnection() throws SQLException { if (datasource == null) { if (Constants.ISUSECLICKHOUSE) { datasource = (DruidDataSource) SpringContextHolder.getBean("ClickHouseDataSourceByDruid"); @@ -105,12 +105,16 @@ public class LogJDBCByDruid { } else { page.setList(listObject); } + logger.info("执行日志查询语句成功,sql={}", sql); } finally { closeConn(); } } - public void closeConn() { + /** + * 关闭数据库连接 + */ + private void closeConn() { try { if (rs != null) { rs.close(); @@ -138,7 +142,7 @@ public class LogJDBCByDruid { * @return * @throws Exception */ - public static List getDateColumn(Class type) throws Exception { + private static List getDateColumn(Class type) throws Exception { List columnList = new ArrayList(); BeanInfo beanInfo = Introspector.getBeanInfo(type); PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors(); @@ -153,7 +157,15 @@ public class LogJDBCByDruid { return columnList; } - public static Object map2Obj(Map map, Class beanClass) throws Exception { + /** + * 将map中的数据利用反射set到Class中,并返回设置好值的对象 + * + * @param map + * @param beanClass + * @return + * @throws Exception + */ + private static Object map2Obj(Map map, Class beanClass) throws Exception { BeanInfo beanInfo = Introspector.getBeanInfo(beanClass); Object obj = beanClass.newInstance(); PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors(); @@ -194,7 +206,13 @@ public class LogJDBCByDruid { return obj; } - public static Map getColumn2FiledMap(Class clazz) { + /** + * 根据class从DfLogSearchDao.xml中获取对应的resultMap里column与property的关系,key是column + * + * @param clazz + * @return + */ + private static Map getColumn2FiledMap(Class clazz) { Map map = new HashMap(); SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class); ResultMap resultMap = sqlSessionFactory.getConfiguration().getResultMap(clazz.getSimpleName() + "Map"); @@ -206,10 +224,19 @@ public class LogJDBCByDruid { } + /** + * 执行count的sql并将结果和计算的last的值set到page对象中 + * + * @param page + * @param sql + * @throws Exception + */ public void getCount(Page page, String sql) throws Exception { try { conn = getConnection(); + logger.info("连接数据中心日志库成功--------------------------"); st = conn.createStatement(); + logger.info("开始执行查询日志总条数sql={}", sql); rs = st.executeQuery(sql); String countStr = null; while (rs.next()) { @@ -217,11 +244,12 @@ public class LogJDBCByDruid { break; } if (countStr == null || countStr.trim().equals("")) { - logger.info("获取数据中心日志总条数成功总共===================0条配置"); + logger.info("获取数据中心日志总条数成功,总共0条日志,sql={}", sql); page.setCount(0l); page.setLast(1); } else { Long count = Long.valueOf(countStr); + logger.info("获取数据中心日志总条数成功,总共{}条日志,sql={}", count, sql); page.setCount(count); page.setLast(getLastPageNum(count.intValue(), page.getPageSize())); } diff --git a/src/main/java/com/nis/web/service/LogDataService.java b/src/main/java/com/nis/web/service/LogDataService.java index 9c77520..d7e706c 100644 --- a/src/main/java/com/nis/web/service/LogDataService.java +++ b/src/main/java/com/nis/web/service/LogDataService.java @@ -5,10 +5,8 @@ import java.lang.reflect.Method; import java.sql.Connection; import java.sql.ResultSet; import java.sql.Statement; -import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; -import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -16,8 +14,6 @@ import java.util.Map; import org.apache.ibatis.mapping.ResultMap; import org.apache.ibatis.mapping.ResultMapping; import org.apache.ibatis.session.SqlSessionFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import com.alibaba.druid.pool.DruidDataSource; @@ -27,16 +23,22 @@ import com.nis.util.Constants; import com.nis.util.LogJDBCByDruid; import com.zdjizhi.utils.StringUtil; +/** + * 从clickhouse或者hive中查询数据并set到page.list返回给界面 + * + * @author rkg + * + */ @Service public class LogDataService { - private final static Logger logger = LoggerFactory.getLogger(LogDataService.class); + // private final static Logger logger = + // LoggerFactory.getLogger(LogDataService.class); static DruidDataSource datasource = null; Connection conn = null; ResultSet rs = null; Statement st = null; private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd"); - private static Map> col2col = new HashMap>(); static { Map startMap = new HashMap(); @@ -47,6 +49,13 @@ public class LogDataService { col2col.put("searchFoundEndTime", endMap); } + /** + * 根据类名加对应的标识获取hive或者clickhouse中对应的表名 + * + * @param key + * @param defaultTableName + * @return + */ private String getTableName(String key, String defaultTableName) { if (Constants.ISUSECLICKHOUSE) { key = key.replace("HiveTable", "ClickHouseTable"); @@ -54,28 +63,51 @@ public class LogDataService { return Configurations.getStringProperty(key, defaultTableName); } + /** + * 根据page及obj对象中的属性值,利用反射拼接对应的查询sql(支持分页,排序) 和计算总量的sql并去对应的数据库中查询, + * 并将结果set到page对象中,返回给界面展示 + * + * @param page + * @param obj + * @throws Exception + */ public void getData(Page page, Object obj) throws Exception { String className = obj.getClass().getSimpleName(); String tableName = getTableName(className + "HiveTable", ""); if (tableName == null || tableName.trim().equals("")) { throw new RuntimeException("日志类" + className + "对应的表名为空,请检查配置文件"); } - String orderBy = ""; + String orderBy = " order by "; if (null != page.getOrderBy() && !page.getOrderBy().equals("")) { - orderBy = Page.getOrderBySql(className, page.getOrderBy()); + orderBy = orderBy + Page.getOrderBySql(className, page.getOrderBy()); } else { - orderBy = " found_Time desc"; + orderBy = orderBy + "found_Time desc "; } if (Constants.ISUSECLICKHOUSE) { - getDataFromClickHouse(page, obj, tableName, className, orderBy); + getDataFromClickHouse(page, obj, tableName, className, orderBy.toLowerCase()); } else { - getDataFromHive(page, obj, tableName, className, orderBy); + getDataFromHive(page, obj, tableName, className, orderBy.toLowerCase()); } } - public void getDataFromClickHouse(Page page, Object bean, String tableName, String className, String orderBy) - throws Exception { + /** + * 从clickhouse中查询数据,注意clickhouse区分大小写,目前和百分点商定都是用小写 + * + * @param page + * 里面含有pagesize和pageno,order by + * @param bean + * 日志类对象(从DfLogSearchDao.xml中获取对应的map,类名+Map),用来获取各个属性对应的数据库字段名 + * @param tableName + * 表名 + * @param className + * 类名 + * @param orderBy + * orderby条件 + * @throws Exception + */ + private void getDataFromClickHouse(Page page, Object bean, String tableName, String className, + String orderBy) throws Exception { tableName = tableName.toLowerCase(); String showColmun = getFiledsSql(className, page.getFields()); StringBuffer sql = new StringBuffer(); @@ -162,47 +194,43 @@ public class LogDataService { if (whereFoundTime.length() > 0) { int indexOf = whereFoundTime.indexOf("and") + "and".length(); countSql.append(whereFoundTime.substring(indexOf)); - - foundTimeSql.append(whereFoundTime.substring(indexOf)); - if (orderBy.toLowerCase().contains("asc") || orderBy.toLowerCase().contains("desc")) { - foundTimeSql.append(" order by " + orderBy.toLowerCase()); - } else { - foundTimeSql.append(" order by " + orderBy.toLowerCase() + " desc"); - } - - foundTimeSql.append(" limit " + startNum + "," + page.getPageSize()); + + foundTimeSql.append(whereFoundTime.substring(indexOf) + orderBy.toLowerCase() + " limit " + startNum + + "," + page.getPageSize()); sql.append(" found_time in(" + foundTimeSql + ") "); - }else { - throw new RuntimeException("从clickhouse的"+tableName+"表查询时,必须要有一个where条件"); + } else { + throw new RuntimeException("从clickhouse的" + tableName + "表查询时,必须要有一个where条件"); } } else { int foundIndexOf = whereFoundTime.append(whereSB).indexOf("and") + "and".length(); countSql.append(whereFoundTime.substring(foundIndexOf)); - - - foundTimeSql.append(whereFoundTime.substring(foundIndexOf)); - if (orderBy.toLowerCase().contains("asc") || orderBy.toLowerCase().contains("desc")) { - foundTimeSql.append(" order by " + orderBy.toLowerCase()); - } else { - foundTimeSql.append(" order by " + orderBy.toLowerCase() + " desc"); - } - foundTimeSql.append(" limit " + startNum + "," + page.getPageSize()); - - + + foundTimeSql.append(whereFoundTime.substring(foundIndexOf) + orderBy.toLowerCase() + " limit " + startNum + + "," + page.getPageSize()); + int indexOf = whereSB.indexOf("and") + "and".length(); sql.append(whereSB.substring(indexOf) + " and found_time in(" + foundTimeSql + ") "); } - - if (orderBy.toLowerCase().contains("asc") || orderBy.toLowerCase().contains("desc")) { - sql.append(" order by " + orderBy.toLowerCase()); - } else { - sql.append(" order by " + orderBy.toLowerCase() + " desc"); - } - sql.append(" limit " + startNum + "," + page.getPageSize());// clickhouse的分页与mysql相同 + sql.append(orderBy.toLowerCase() + " limit " + startNum + "," + page.getPageSize());// clickhouse的分页与mysql相同 searchFromDataCenter(page, bean, sql, countSql); } - public void getDataFromHive(Page page, Object bean, String tableName, String className, String orderBy) + /** + * 从hive中查询数据 + * + * @param page + * 里面含有pagesize和pageno,order by + * @param bean + * 日志类对象(从DfLogSearchDao.xml中获取对应的map,类名+Map),用来获取各个属性对应的数据库字段名 + * @param tableName + * 表名 + * @param className + * 类名 + * @param orderBy + * orderby条件 + * @throws Exception + */ + private void getDataFromHive(Page page, Object bean, String tableName, String className, String orderBy) throws Exception { tableName = tableName.toLowerCase(); String showColmun = getFiledsSql(className, page.getFields()); @@ -221,11 +249,6 @@ public class LogDataService { if (sqlTrim.endsWith(",")) { sqlTrim = sqlTrim.substring(0, sqlTrim.length() - 1); } - if (orderBy.toLowerCase().contains("asc") || orderBy.toLowerCase().contains("desc")) { - orderBy = " order by " + orderBy; - } else { - orderBy = " order by " + orderBy + " desc "; - } sql.setLength(0); sql.append(" select " + sqlTrim.toLowerCase() + " from (select " + sqlTrim.toLowerCase() + ",row_number() over(partition by found_time_partition " + orderBy + ") as row_num from " @@ -331,18 +354,35 @@ public class LogDataService { searchFromDataCenter(page, bean, sql, countSql); } + /** + * 执行sql + * + * @param page + * @param bean + * @param selSql + * @param countSql + * @throws Exception + */ private void searchFromDataCenter(Page page, Object bean, StringBuffer selSql, StringBuffer countSql) throws Exception { - new LogJDBCByDruid().getTableData(page, selSql.toString(), bean.getClass()); - if (page.getList() != null && page.getList().size() > 0) { - new LogJDBCByDruid().getCount(page, countSql.toString()); + new LogJDBCByDruid().getTableData(page, selSql.toString().toLowerCase(), bean.getClass()); + if (Constants.ISOPENLOGCOUNTANDLAST) { + if (page.getList() != null && page.getList().size() > 0) { + new LogJDBCByDruid().getCount(page, countSql.toString().toLowerCase()); + } } } - private static Map getFiledsType(Object o) { - Field[] fields = o.getClass().getSuperclass().getDeclaredFields(); - Field[] superfields = o.getClass().getDeclaredFields(); + /** + * 利用反射获取class中各个属性的数据类型,key是属性名称,value是数据类型 + * + * @param obj + * @return + */ + private static Map getFiledsType(Object obj) { + Field[] fields = obj.getClass().getSuperclass().getDeclaredFields(); + Field[] superfields = obj.getClass().getDeclaredFields(); Map infoMap = new HashMap(); for (int i = 0; i < fields.length; i++) { infoMap.put(fields[i].getName(), fields[i].getType().toString().replace("class", "")); @@ -353,16 +393,23 @@ public class LogDataService { return infoMap; } - public static String getFiledsSql(String mapName, String fileds) throws Exception { - String[] fieldsColoumn = null; - // 所有字段名 - List columnList = new ArrayList(); - // 所有属性名 - List propertyList = new ArrayList(); - // 属性名称为key,字段名称为value - Map columnMap = new HashMap(); - + /** + * 将fileds中的字段根据DfLogSearchDao.xml中对应的resultMap转换为数据库中的字段 + * + * @param mapName + * @param fileds与界面商定好的是传日志类中的对象名(界面没有表结构不知道对象属性对应的数据库字段名称是什么),不是数据库中的字段名 + * @return + * @throws Exception + */ + private static String getFiledsSql(String mapName, String fileds) throws Exception { if (!StringUtil.isBlank(fileds)) { + String[] fieldsColoumn = null; + // 所有字段名 + List columnList = new ArrayList(); + // 所有属性名 + List propertyList = new ArrayList(); + // 属性名称为key,字段名称为value + Map columnMap = new HashMap(); // 解析Fileds的字段/属性名称 fieldsColoumn = fileds.split(","); @@ -399,7 +446,13 @@ public class LogDataService { return fileds; } - public static Map getFiledAndColumnMap(Class clazz) { + /** + * 根据class从DfLogSearchDao.xml中获取对应的resultMap里column与property的关系,key是property + * + * @param clazz + * @return + */ + private static Map getFiledAndColumnMap(Class clazz) { Map map = new HashMap(); SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class); ResultMap resultMap = sqlSessionFactory.getConfiguration().getResultMap(clazz.getSimpleName() + "Map"); @@ -436,7 +489,7 @@ public class LogDataService { } /** - * 利用发射调用bean.set方法将value设置到字段 + * 利用反射调用bean.set方法将value设置到字段 * * @param bean * @param fieldName @@ -456,63 +509,4 @@ public class LogDataService { method.invoke(bean, value); } - /** - * 比较开始时间或者结束时间是否比当前系统时间早48小时 - * - * @param startTime - * 开始时间 - * @param endTime - * 结束时间 - * @return - * @throws ParseException - */ - public static boolean ifTimeGreaterThan48(String startTime, String endTime) throws Exception { - logger.info("ifTimeGreaterThan48方法开始" + System.currentTimeMillis()); - if (null != startTime && !startTime.equals("") && null != endTime && !endTime.equals("")) {// 开始和结束时间都不为空 - Date startDate = sdf.parse(startTime); - Date endDate = sdf.parse(endTime); - if (startDate.getTime() < endDate.getTime()) {// 开始时间比结束时间早 - logger.info("ifTimeGreaterThan48方法结束" + System.currentTimeMillis()); - return gt48(endTime); - } else {// 开始时间比结束时间晚,不符合一般情况 - logger.info("ifTimeGreaterThan48方法结束" + System.currentTimeMillis()); - return false; - } - } else if (null != endTime && !endTime.equals("")) {// 开始时间为空,结束时间不为空 - logger.info("ifTimeGreaterThan48方法结束" + System.currentTimeMillis()); - return gt48(endTime); - } else if (null != startTime && !startTime.equals("")) {// 结束时间为空,开始时间不为空 - logger.info("ifTimeGreaterThan48方法结束" + System.currentTimeMillis()); - return gt48(startTime); - } else {// 开始和结束时间都为空 - logger.info("ifTimeGreaterThan48方法结束" + System.currentTimeMillis()); - return false; - } - - } - - public static boolean gt48(String eqTime) throws ParseException { - logger.info("gt48方法开始" + System.currentTimeMillis()); - Date eqDate = sdf.parse(eqTime); - Long dateNum = eqDate.getTime(); - Long currentDate = new Date().getTime(); - Long time = 0l; - if (dateNum < currentDate) { - time = currentDate - dateNum;// 获取结束时间与当前系统时间的时间差毫秒数 - } else { - logger.info("gt48方法结束" + System.currentTimeMillis()); - return false;// 结束时间比当前系统时间晚,不符合从数据中心查询数据要求(当前日期48小时前数据从数据中心查询) - } - double hours = time.doubleValue() / (1000 * 60 * 60); - Long datacenterTime = Constants.DATACENTER_TIME; - double doubleValue = datacenterTime.doubleValue(); - if (hours > doubleValue) { - logger.info("gt48方法结束" + System.currentTimeMillis()); - return true;// 符合要求 - } else { - logger.info("gt48方法结束" + System.currentTimeMillis()); - return false; - } - - } } diff --git a/src/main/resources/applicationLog-hive.properties b/src/main/resources/applicationLog-hive.properties index 824f387..5ffeebc 100644 --- a/src/main/resources/applicationLog-hive.properties +++ b/src/main/resources/applicationLog-hive.properties @@ -22,7 +22,7 @@ MmPicUrlLogHiveTable=MM_PIC_URL_LOG MmVoipIpLogHiveTable=MM_VOIP_IP_LOG MmVoipAccountLogHiveTable=MM_VOIP_ACCOUNT_LOG MmSampleAudioLogHiveTable=MM_SAMPLE_AUDIO_LOG -MmSampleVideoLogLogHiveTable=MM_SAMPLE_VIDEO_LOG +MmSampleVideoLogHiveTable=MM_SAMPLE_VIDEO_LOG MmPornAudioLevelLogHiveTable=MM_PORN_AUDIO_LEVEL_LOG MmPornVideoLevelLogHiveTable=MM_PORN_VIDEO_LEVEL_LOG MmSamplePicLogHiveTable=MM_SAMPLE_PIC_LOG diff --git a/src/main/resources/nis.properties b/src/main/resources/nis.properties index 75728a9..bc848d8 100644 --- a/src/main/resources/nis.properties +++ b/src/main/resources/nis.properties @@ -209,3 +209,7 @@ isUseMinio=true ##存储到redis中的文件类型(拼接的前缀) fileProtocol=redis:// + +#是否开启日志查询count和last功能 +isOpenLogCountAndLast=true +