2017-12-19 14:55:52 +08:00
package com.nis.web.service ;
import java.lang.reflect.Field ;
import java.lang.reflect.Method ;
2018-12-17 22:48:15 +06:00
import java.text.ParseException ;
2017-12-19 14:55:52 +08:00
import java.text.SimpleDateFormat ;
import java.util.ArrayList ;
import java.util.HashMap ;
import java.util.List ;
import java.util.Map ;
2018-12-11 21:56:15 +08:00
import org.apache.commons.lang3.StringEscapeUtils ;
2017-12-19 14:55:52 +08:00
import org.apache.ibatis.mapping.ResultMap ;
import org.apache.ibatis.mapping.ResultMapping ;
import org.apache.ibatis.session.SqlSessionFactory ;
2018-11-09 17:00:02 +08:00
import org.slf4j.Logger ;
import org.slf4j.LoggerFactory ;
2018-11-09 14:25:25 +08:00
import org.springframework.beans.factory.annotation.Autowired ;
2018-10-31 19:06:18 +08:00
import org.springframework.stereotype.Service ;
2017-12-19 14:55:52 +08:00
import com.nis.domain.Page ;
2018-12-16 11:10:25 +06:00
import com.nis.domain.restful.NtcAsnRecord ;
2018-12-15 20:36:05 +06:00
import com.nis.domain.restful.NtcConnRecordPercent ;
import com.nis.restful.RestBusinessCode ;
import com.nis.restful.RestServiceException ;
2018-10-31 19:06:18 +08:00
import com.nis.util.Configurations ;
2017-12-19 14:55:52 +08:00
import com.nis.util.Constants ;
2018-12-15 15:59:59 +06:00
import com.nis.web.dao.impl.LocalLogJDBCByDruid ;
2018-11-09 14:25:25 +08:00
import com.nis.web.dao.impl.LogJDBCByDruid ;
2018-10-18 19:19:59 +08:00
import com.zdjizhi.utils.StringUtil ;
2018-11-01 17:27:56 +08:00
2018-11-02 16:21:56 +08:00
/ * *
* 从clickhouse或者hive中查询数据并set到page . list返回给界面
*
* @author rkg
*
* /
2018-10-31 19:06:18 +08:00
@Service
public class LogDataService {
2018-11-09 17:00:02 +08:00
private final static Logger logger = LoggerFactory . getLogger ( LogDataService . class ) ;
2018-11-09 14:25:25 +08:00
@Autowired
private LogJDBCByDruid logJDBCByDruid ;
2018-12-15 15:59:59 +06:00
@Autowired
private LocalLogJDBCByDruid localLogJDBCByDruid ;
2017-12-19 14:55:52 +08:00
private static SimpleDateFormat sdf = new SimpleDateFormat ( " yyyy-MM-dd HH:mm:ss " ) ;
2018-07-27 17:18:02 +08:00
private static SimpleDateFormat sdf2 = new SimpleDateFormat ( " yyyyMMdd " ) ;
2018-10-31 19:06:18 +08:00
private static Map < String , Map < String , String > > col2col = new HashMap < String , Map < String , String > > ( ) ;
static {
Map < String , String > startMap = new HashMap < String , String > ( ) ;
startMap . put ( " start " , " foundTime " ) ;
col2col . put ( " searchFoundStartTime " , startMap ) ;
Map < String , String > endMap = new HashMap < String , String > ( ) ;
endMap . put ( " end " , " foundTime " ) ;
col2col . put ( " searchFoundEndTime " , endMap ) ;
}
2018-12-15 20:36:05 +06:00
public NtcConnRecordPercent getNtcConnRecordPercent ( NtcConnRecordPercent ntcConnRecordPercent ) throws Exception {
long startTime = sdf . parse ( ntcConnRecordPercent . getSearchFoundStartTime ( ) . toString ( ) . trim ( ) ) . getTime ( ) / 1000 ;
long endTime = sdf . parse ( ntcConnRecordPercent . getSearchFoundEndTime ( ) . toString ( ) . trim ( ) ) . getTime ( ) / 1000 ;
if ( endTime - startTime < 0 ) {
throw new RestServiceException ( " searchFoundStartTime() can not exceed searchFoundEndTime " ,
RestBusinessCode . param_formate_error . getValue ( ) ) ;
}
long second = endTime - startTime ;
StringBuffer sql = new StringBuffer ( ) ;
sql . append ( " SELECT SUM(s2c_pkt_num + s2c_pkt_num)*8/ " ) ;
sql . append ( second ) ;
sql . append ( " AS pps , SUM(c2s_byte_num + s2c_byte_num)*8/ " ) ;
sql . append ( second ) ;
sql . append (
2018-12-16 11:10:25 +06:00
" AS bps FROM tbs_ods_ntc_conn_record_log_local t WHERE found_time IN ( SELECT DISTINCT found_time FROM tbs_ods_ntc_conn_record_log_local WHERE found_time >= " ) ;
2018-12-15 20:36:05 +06:00
sql . append ( startTime ) ;
sql . append ( " and found_time< " ) ;
sql . append ( endTime ) ;
StringBuffer totalSql = new StringBuffer ( ) ;
totalSql . append ( sql ) ;
totalSql . append ( " and stream_dir in(1,2,3)) and stream_dir in(1,2,3) " ) ;
sql . append ( " and stream_dir in(1,2) ) and stream_dir in(1,2) " ) ;
localLogJDBCByDruid . getNtcConnRecordPercentData ( ntcConnRecordPercent , sql . toString ( ) , false ) ;
localLogJDBCByDruid . getNtcConnRecordPercentData ( ntcConnRecordPercent , totalSql . toString ( ) , true ) ;
return ntcConnRecordPercent ;
}
2018-12-17 22:48:15 +06:00
public static void main ( String [ ] args ) throws ParseException {
String asn = " s_asn " ;
long startTime = sdf . parse ( " 2018-12-17 21:30:00 " ) . getTime ( ) / 1000 ;
long endTime = sdf . parse ( " 2018-12-17 21:50:00 " ) . getTime ( ) / 1000 ;
long second = 200l ;
StringBuffer sql = new StringBuffer ( ) ;
sql . append ( " SELECT SUM(s2c_pkt_num + s2c_pkt_num)*8/ " ) ;
sql . append ( second ) ;
sql . append ( " AS pps , SUM(c2s_byte_num + s2c_byte_num)*8/ " ) ;
sql . append ( second ) ;
sql . append ( " AS bps, " ) ;
sql . append ( asn ) ;
sql . append (
" asn FROM tbs_ods_ntc_conn_record_log_local t WHERE found_time IN ( SELECT DISTINCT found_time FROM tbs_ods_ntc_conn_record_log_local WHERE found_time >= " ) ;
sql . append ( startTime ) ;
sql . append ( " and found_time< " ) ;
sql . append ( endTime ) ;
StringBuffer countSql = new StringBuffer ( ) ;
countSql . append ( " select count(1) from ( " ) ;
countSql . append ( sql ) ;
countSql . append ( " ) group by " ) ;
countSql . append ( asn + " ) " ) ;
Integer startNum = 0 ;
Integer limitCount = startNum + 20 ;
sql . append ( " limit " + limitCount + " ) group by " + asn + " limit " + startNum + " , " + 20 ) ;
System . out . println ( sql ) ;
System . out . println ( countSql ) ;
}
2018-12-15 20:36:05 +06:00
2018-12-16 11:10:25 +06:00
public void getNtcAsnRecord ( Page page , NtcAsnRecord ntcAsnRecord ) throws Exception {
long startTime = sdf . parse ( ntcAsnRecord . getSearchFoundStartTime ( ) . toString ( ) . trim ( ) ) . getTime ( ) / 1000 ;
long endTime = sdf . parse ( ntcAsnRecord . getSearchFoundEndTime ( ) . toString ( ) . trim ( ) ) . getTime ( ) / 1000 ;
if ( endTime - startTime < 0 ) {
throw new RestServiceException ( " searchFoundStartTime() can not exceed searchFoundEndTime " ,
RestBusinessCode . param_formate_error . getValue ( ) ) ;
}
2018-12-17 22:48:15 +06:00
String asn = " s_asn " ;
if ( ntcAsnRecord . getSearchAsnType ( ) . trim ( ) . equals ( " 1 " ) ) {
asn = " d_asn " ;
}
2018-12-16 11:10:25 +06:00
long second = endTime - startTime ;
StringBuffer sql = new StringBuffer ( ) ;
sql . append ( " SELECT SUM(s2c_pkt_num + s2c_pkt_num)*8/ " ) ;
sql . append ( second ) ;
sql . append ( " AS pps , SUM(c2s_byte_num + s2c_byte_num)*8/ " ) ;
sql . append ( second ) ;
2018-12-17 22:48:15 +06:00
sql . append ( " AS bps, " ) ;
sql . append ( asn ) ;
2018-12-16 11:10:25 +06:00
sql . append (
2018-12-17 22:48:15 +06:00
" asn FROM tbs_ods_ntc_conn_record_log_local t WHERE found_time IN ( SELECT DISTINCT found_time FROM tbs_ods_ntc_conn_record_log_local WHERE found_time >= " ) ;
2018-12-16 11:10:25 +06:00
sql . append ( startTime ) ;
sql . append ( " and found_time< " ) ;
sql . append ( endTime ) ;
StringBuffer countSql = new StringBuffer ( ) ;
countSql . append ( " select count(1) from ( " ) ;
countSql . append ( sql ) ;
2018-12-17 22:48:15 +06:00
countSql . append ( " ) group by " ) ;
countSql . append ( asn + " ) " ) ;
2018-12-16 11:10:25 +06:00
Integer startNum = ( page . getPageNo ( ) - 1 ) * page . getPageSize ( ) ;
Integer limitCount = startNum + page . getPageSize ( ) ;
2018-12-17 22:48:15 +06:00
sql . append ( " limit " + limitCount + " ) group by " + asn + " limit " + startNum + " , " + page . getPageSize ( ) ) ;
2018-12-16 11:10:25 +06:00
localLogJDBCByDruid . getCount ( page , countSql . toString ( ) ) ;
page . setList ( localLogJDBCByDruid . getNtcAsnRecordData ( sql . toString ( ) ) ) ;
}
2018-11-02 16:21:56 +08:00
/ * *
* 根据类名加对应的标识获取hive或者clickhouse中对应的表名
*
* @param key
* @param defaultTableName
* @return
* /
2018-10-31 19:06:18 +08:00
private String getTableName ( String key , String defaultTableName ) {
if ( Constants . ISUSECLICKHOUSE ) {
key = key . replace ( " HiveTable " , " ClickHouseTable " ) ;
2018-09-10 10:57:27 +08:00
}
2018-10-31 19:06:18 +08:00
return Configurations . getStringProperty ( key , defaultTableName ) ;
2018-09-10 10:57:27 +08:00
}
2018-11-02 16:21:56 +08:00
/ * *
* 根据page及obj对象中的属性值 , 利用反射拼接对应的查询sql ( 支持分页 , 排序 ) 和计算总量的sql并去对应的数据库中查询 ,
* 并将结果set到page对象中 , 返回给界面展示
*
* @param page
* @param obj
* @throws Exception
* /
2018-10-31 19:06:18 +08:00
public < T > void getData ( Page < T > page , Object obj ) throws Exception {
String className = obj . getClass ( ) . getSimpleName ( ) ;
String tableName = getTableName ( className + " HiveTable " , " " ) ;
if ( tableName = = null | | tableName . trim ( ) . equals ( " " ) ) {
throw new RuntimeException ( " 日志类 " + className + " 对应的表名为空,请检查配置文件 " ) ;
}
2018-11-02 16:21:56 +08:00
String orderBy = " order by " ;
2018-10-31 19:06:18 +08:00
if ( null ! = page . getOrderBy ( ) & & ! page . getOrderBy ( ) . equals ( " " ) ) {
2018-11-02 16:21:56 +08:00
orderBy = orderBy + Page . getOrderBySql ( className , page . getOrderBy ( ) ) ;
2018-10-31 19:06:18 +08:00
} else {
2018-11-02 16:21:56 +08:00
orderBy = orderBy + " found_Time desc " ;
2018-10-31 19:06:18 +08:00
}
2018-10-30 11:13:45 +08:00
if ( Constants . ISUSECLICKHOUSE ) {
2018-11-02 16:21:56 +08:00
getDataFromClickHouse ( page , obj , tableName , className , orderBy . toLowerCase ( ) ) ;
2018-10-30 11:13:45 +08:00
} else {
2018-11-02 16:21:56 +08:00
getDataFromHive ( page , obj , tableName , className , orderBy . toLowerCase ( ) ) ;
2018-10-30 11:13:45 +08:00
}
2018-10-31 19:06:18 +08:00
2018-10-30 11:13:45 +08:00
}
2018-11-02 16:21:56 +08:00
/ * *
* 从clickhouse中查询数据 , 注意clickhouse区分大小写 , 目前和百分点商定都是用小写
*
2018-12-15 20:36:05 +06:00
* @param page 里面含有pagesize和pageno , order by
* @param bean 日志类对象 ( 从DfLogSearchDao . xml中获取对应的map , 类名 + Map ) , 用来获取各个属性对应的数据库字段名
* @param tableName 表名
* @param className 类名
* @param orderBy orderby条件
2018-11-02 16:21:56 +08:00
* @throws Exception
* /
private < T > void getDataFromClickHouse ( Page < T > page , Object bean , String tableName , String className ,
String orderBy ) throws Exception {
2017-12-19 14:55:52 +08:00
tableName = tableName . toLowerCase ( ) ;
2018-10-31 19:06:18 +08:00
String showColmun = getFiledsSql ( className , page . getFields ( ) ) ;
2017-12-19 14:55:52 +08:00
StringBuffer sql = new StringBuffer ( ) ;
Map < String , String > filedAndColumnMap = getFiledAndColumnMap ( bean . getClass ( ) ) ;
if ( null = = showColmun | | showColmun . equals ( " " ) ) {
for ( String key : filedAndColumnMap . keySet ( ) ) {
if ( ! filedAndColumnMap . get ( key ) . toLowerCase ( ) . equals ( " id " ) ) {
sql . append ( filedAndColumnMap . get ( key ) + " , " ) ;
}
}
} else {
sql . append ( showColmun ) ;
}
String sqlTrim = sql . toString ( ) . trim ( ) ;
if ( sqlTrim . endsWith ( " , " ) ) {
sqlTrim = sqlTrim . substring ( 0 , sqlTrim . length ( ) - 1 ) ;
}
sql . setLength ( 0 ) ;
2018-11-01 17:27:56 +08:00
sql . append ( " select " + sqlTrim . toLowerCase ( ) + " from " + tableName . toLowerCase ( ) + " t where " ) ;
StringBuffer whereFoundTime = new StringBuffer ( ) ;
2018-10-31 19:06:18 +08:00
StringBuffer countSql = new StringBuffer ( ) ;
2018-11-01 17:27:56 +08:00
countSql . append ( " select count(1) from " + tableName + " where " ) ;
2018-10-31 19:06:18 +08:00
2018-09-10 10:57:27 +08:00
StringBuffer whereSB = new StringBuffer ( ) ;
2018-08-07 14:29:07 +08:00
if ( ! StringUtil . isEmpty ( bean ) ) {
2017-12-19 14:55:52 +08:00
Class < ? > clazz = bean . getClass ( ) ;
2018-09-10 10:57:27 +08:00
Map < String , String > filedsType = null ;
2018-10-30 11:13:45 +08:00
filedsType = getFiledsType ( bean ) ;
for ( ; clazz ! = Object . class ; clazz = clazz . getSuperclass ( ) ) {
Field [ ] fields = clazz . getDeclaredFields ( ) ;
for ( int i = 0 ; i < fields . length ; i + + ) {
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
Field f = fields [ i ] ;
String key = f . getName ( ) ; // 获取字段名
if ( f . getType ( ) . getName ( ) . equals ( " java.lang.String " ) & & key . startsWith ( " search " ) ) {
Object value = getFieldValue ( bean , key ) ;
if ( ! StringUtil . isEmpty ( value ) ) {
setFieldValue ( bean , key , value . toString ( ) . trim ( ) ) ;
if ( key . endsWith ( " Time " ) ) { // 日期开始或结束的字段
if ( col2col . containsKey ( key ) ) {
value = sdf . parse ( value . toString ( ) . trim ( ) ) . getTime ( ) / 1000 ;
if ( col2col . get ( key ) . get ( " start " ) ! = null ) {
2018-11-01 17:27:56 +08:00
whereFoundTime . append ( " and "
2018-10-30 11:13:45 +08:00
+ filedAndColumnMap . get ( col2col . get ( key ) . get ( " start " ) ) . toLowerCase ( )
+ " >= " + value ) ;
} else {
2018-11-01 17:27:56 +08:00
whereFoundTime . append ( " and "
2018-10-30 11:13:45 +08:00
+ filedAndColumnMap . get ( col2col . get ( key ) . get ( " end " ) ) . toLowerCase ( ) + " < "
+ value ) ;
}
}
} else {
if ( key . toLowerCase ( ) . startsWith ( " search " ) ) {
key = key . replace ( " search " , " " ) ;
key = key . substring ( 0 , 1 ) . toLowerCase ( ) + key . substring ( 1 ) ;
}
// clickhouse写法
String type = filedsType . get ( key ) . trim ( ) ;
if ( type . equals ( " java.lang.String " ) ) {
String field = filedAndColumnMap . get ( key ) . toLowerCase ( ) ;
2018-12-17 22:48:15 +06:00
if ( field . equals ( " url " ) | | field . equals ( " website " ) ) {
2018-10-30 11:13:45 +08:00
whereSB . append ( " and " + field + " like ' " + value . toString ( ) . trim ( ) + " %' " ) ;
} else {
2018-12-15 20:36:05 +06:00
whereSB . append ( " and " + field + " =' "
+ StringEscapeUtils . unescapeHtml4 ( value . toString ( ) . trim ( ) ) + " ' " ) ;
2018-10-30 11:13:45 +08:00
}
} else if ( type . equals ( " java.lang.Integer " ) | | type . equals ( " int " )
| | type . equals ( " java.lang.Long " ) | | type . equals ( " long " ) ) {
whereSB . append ( " and " + filedAndColumnMap . get ( key ) . toLowerCase ( ) + " = "
+ value . toString ( ) . trim ( ) ) ;
}
}
}
}
}
2018-09-10 10:57:27 +08:00
}
2018-10-30 11:13:45 +08:00
}
2018-11-01 17:27:56 +08:00
Integer startNum = ( page . getPageNo ( ) - 1 ) * page . getPageSize ( ) ;
StringBuffer foundTimeSql = new StringBuffer ( ) ;
2018-11-09 14:25:25 +08:00
foundTimeSql . append ( " select found_time from " + tableName + " where " ) ;
2018-11-09 17:00:02 +08:00
Integer limitCount = startNum + page . getPageSize ( ) ;
2018-11-01 17:27:56 +08:00
if ( whereSB . length ( ) = = 0 ) { // 没有其他查询条件只有默认的found_time条件
if ( whereFoundTime . length ( ) > 0 ) {
int indexOf = whereFoundTime . indexOf ( " and " ) + " and " . length ( ) ;
countSql . append ( whereFoundTime . substring ( indexOf ) ) ;
2018-11-02 16:21:56 +08:00
2018-11-09 17:00:02 +08:00
foundTimeSql
. append ( whereFoundTime . substring ( indexOf ) + orderBy . toLowerCase ( ) + " limit " + limitCount ) ;
2018-11-01 17:27:56 +08:00
sql . append ( " found_time in( " + foundTimeSql + " ) " ) ;
2018-11-02 16:21:56 +08:00
} else {
throw new RuntimeException ( " 从clickhouse的 " + tableName + " 表查询时,必须要有一个where条件 " ) ;
2018-11-01 17:27:56 +08:00
}
} else {
int foundIndexOf = whereFoundTime . append ( whereSB ) . indexOf ( " and " ) + " and " . length ( ) ;
countSql . append ( whereFoundTime . substring ( foundIndexOf ) ) ;
2018-11-09 17:00:02 +08:00
foundTimeSql
. append ( whereFoundTime . substring ( foundIndexOf ) + orderBy . toLowerCase ( ) + " limit " + limitCount ) ;
2018-11-02 16:21:56 +08:00
2018-10-30 11:13:45 +08:00
int indexOf = whereSB . indexOf ( " and " ) + " and " . length ( ) ;
2018-11-01 17:27:56 +08:00
sql . append ( whereSB . substring ( indexOf ) + " and found_time in( " + foundTimeSql + " ) " ) ;
2018-10-30 11:13:45 +08:00
}
2018-11-02 16:21:56 +08:00
sql . append ( orderBy . toLowerCase ( ) + " limit " + startNum + " , " + page . getPageSize ( ) ) ; // clickhouse的分页与mysql相同
2018-12-15 20:36:05 +06:00
if ( tableName . toUpperCase ( ) . equals ( " TBS_ODS_NTC_CONN_RECORD_LOG_LOCAL " ) ) {
2018-12-15 15:59:59 +06:00
searchFromLocalCK ( page , bean , sql , countSql ) ;
2018-12-15 20:36:05 +06:00
} else {
2018-12-15 15:59:59 +06:00
searchFromDataCenter ( page , bean , sql , countSql ) ;
}
2018-12-15 20:36:05 +06:00
2018-10-30 11:13:45 +08:00
}
2018-11-02 16:21:56 +08:00
/ * *
* 从hive中查询数据
*
2018-12-15 20:36:05 +06:00
* @param page 里面含有pagesize和pageno , order by
* @param bean 日志类对象 ( 从DfLogSearchDao . xml中获取对应的map , 类名 + Map ) , 用来获取各个属性对应的数据库字段名
* @param tableName 表名
* @param className 类名
* @param orderBy orderby条件
2018-11-02 16:21:56 +08:00
* @throws Exception
* /
private < T > void getDataFromHive ( Page < T > page , Object bean , String tableName , String className , String orderBy )
2018-10-31 19:06:18 +08:00
throws Exception {
2018-10-30 11:13:45 +08:00
tableName = tableName . toLowerCase ( ) ;
2018-10-31 19:06:18 +08:00
String showColmun = getFiledsSql ( className , page . getFields ( ) ) ;
2018-10-30 11:13:45 +08:00
StringBuffer sql = new StringBuffer ( ) ;
Map < String , String > filedAndColumnMap = getFiledAndColumnMap ( bean . getClass ( ) ) ;
if ( null = = showColmun | | showColmun . equals ( " " ) ) {
for ( String key : filedAndColumnMap . keySet ( ) ) {
if ( ! filedAndColumnMap . get ( key ) . toLowerCase ( ) . equals ( " id " ) ) {
sql . append ( filedAndColumnMap . get ( key ) + " , " ) ;
}
}
} else {
sql . append ( showColmun ) ;
}
2018-11-09 17:00:02 +08:00
String sqlTrim = sql . toString ( ) . trim ( ) ;
2018-10-30 11:13:45 +08:00
if ( sqlTrim . endsWith ( " , " ) ) {
sqlTrim = sqlTrim . substring ( 0 , sqlTrim . length ( ) - 1 ) ;
}
sql . setLength ( 0 ) ;
sql . append ( " select " + sqlTrim . toLowerCase ( ) + " from (select " + sqlTrim . toLowerCase ( )
+ " ,row_number() over(partition by found_time_partition " + orderBy + " ) as row_num from "
+ tableName . toLowerCase ( ) + " " ) ;
2018-10-31 19:06:18 +08:00
StringBuffer countSql = new StringBuffer ( ) ;
countSql . append ( " select count(1) from " + tableName + " " ) ;
2018-10-30 11:13:45 +08:00
StringBuffer whereSB = new StringBuffer ( ) ;
if ( ! StringUtil . isEmpty ( bean ) ) {
Class < ? > clazz = bean . getClass ( ) ;
2017-12-19 14:55:52 +08:00
for ( ; clazz ! = Object . class ; clazz = clazz . getSuperclass ( ) ) {
// 获取所有的字段包括public,private,protected,private
// Field[] fields = bean.getClass().getDeclaredFields();
Field [ ] fields = clazz . getDeclaredFields ( ) ;
Long foundTimePartStart = null ;
Long foundTimePartEnd = null ;
for ( int i = 0 ; i < fields . length ; i + + ) {
2018-07-02 16:10:48 +08:00
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
2017-12-19 14:55:52 +08:00
Field f = fields [ i ] ;
String key = f . getName ( ) ; // 获取字段名
2018-07-02 16:10:48 +08:00
String typeName = f . getType ( ) . getName ( ) ;
2017-12-19 14:55:52 +08:00
if ( f . getType ( ) . getName ( ) . equals ( " java.lang.String " ) & & key . startsWith ( " search " ) ) {
Object value = getFieldValue ( bean , key ) ;
2018-08-07 14:29:07 +08:00
if ( ! StringUtil . isEmpty ( value ) ) {
2017-12-19 14:55:52 +08:00
setFieldValue ( bean , key , value . toString ( ) . trim ( ) ) ;
if ( key . endsWith ( " Time " ) ) { // 日期开始或结束的字段
if ( col2col . containsKey ( key ) ) {
2018-07-27 17:18:02 +08:00
Long partition = Long . parseLong ( sdf2 . format ( sdf . parse ( value . toString ( ) . trim ( ) ) ) ) ;
2017-12-19 14:55:52 +08:00
value = sdf . parse ( value . toString ( ) . trim ( ) ) . getTime ( ) / 1000 ;
if ( key . toLowerCase ( ) . equals ( " searchfoundstarttime " ) ) {
2018-07-27 17:18:02 +08:00
foundTimePartStart = partition ;
2017-12-19 14:55:52 +08:00
}
if ( key . toLowerCase ( ) . equals ( " searchfoundendtime " ) ) {
2018-07-27 17:18:02 +08:00
foundTimePartEnd = partition ;
2017-12-19 14:55:52 +08:00
}
if ( col2col . get ( key ) . get ( " start " ) ! = null ) {
// sql.append(" and " +
// filedAndColumnMap.get(col2col.get(key).get("start"))
// + ">=to_date('" +
// value.toString().trim()
// + "','yyyy-mm-dd HH24:mi:ss')");
2018-09-10 10:57:27 +08:00
whereSB . append ( " and "
+ filedAndColumnMap . get ( col2col . get ( key ) . get ( " start " ) ) . toLowerCase ( )
+ " >= " + value ) ;
2017-12-19 14:55:52 +08:00
} else {
// sql.append(" and " +
// filedAndColumnMap.get(col2col.get(key).get("end"))
// + "<=to_date('" +
// value.toString().trim()
// + "','yyyy-mm-dd HH24:mi:ss')");
2018-09-10 10:57:27 +08:00
whereSB . append ( " and "
+ filedAndColumnMap . get ( col2col . get ( key ) . get ( " end " ) ) . toLowerCase ( ) + " < "
2018-07-02 16:10:48 +08:00
+ value ) ;
2017-12-19 14:55:52 +08:00
}
}
} else {
if ( key . toLowerCase ( ) . startsWith ( " search " ) ) {
key = key . replace ( " search " , " " ) ;
key = key . substring ( 0 , 1 ) . toLowerCase ( ) + key . substring ( 1 ) ;
}
2018-10-30 11:13:45 +08:00
if ( typeName . equals ( " java.lang.String " ) ) {
String field = filedAndColumnMap . get ( key ) ;
if ( field . equals ( " url " ) ) {
whereSB . append ( " and " + field + " like ' " + value . toString ( ) . trim ( ) + " %' " ) ;
} else {
whereSB . append ( " and " + field + " =' " + value . toString ( ) . trim ( ) + " ' " ) ;
2018-09-10 10:57:27 +08:00
}
2018-10-30 11:13:45 +08:00
} else if ( typeName . equals ( " java.lang.Integer " ) | | typeName . equals ( " int " ) ) {
whereSB . append (
" and " + filedAndColumnMap . get ( key ) + " = " + value . toString ( ) . trim ( ) ) ;
} else if ( typeName . equals ( " java.lang.Long " ) | | typeName . equals ( " long " ) ) {
whereSB . append (
" and " + filedAndColumnMap . get ( key ) + " = " + value . toString ( ) . trim ( ) + " L " ) ;
2017-12-19 14:55:52 +08:00
}
2018-10-30 11:13:45 +08:00
2017-12-19 14:55:52 +08:00
}
}
}
}
2018-10-30 11:13:45 +08:00
if ( null ! = foundTimePartStart ) {
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
whereSB . append ( " and found_time_partition>= " + foundTimePartStart ) ;
}
if ( null ! = foundTimePartEnd ) {
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
whereSB . append ( " and found_time_partition<= " + foundTimePartEnd ) ;
2017-12-19 14:55:52 +08:00
}
}
}
2018-09-10 10:57:27 +08:00
if ( whereSB . length ( ) > 0 ) {
2018-09-11 18:18:56 +08:00
int indexOf = whereSB . indexOf ( " and " ) + " and " . length ( ) ;
sql . append ( " where " + whereSB . substring ( indexOf ) ) ;
2018-10-31 19:06:18 +08:00
countSql . append ( " where " + whereSB . substring ( indexOf ) ) ;
2018-09-10 10:57:27 +08:00
}
2018-10-30 11:13:45 +08:00
Integer startNum = ( page . getPageNo ( ) - 1 ) * page . getPageSize ( ) + 1 ;
Integer endNum = startNum - 1 + page . getPageSize ( ) ;
sql . append ( " ) t where row_Num between " + startNum + " and " + endNum ) ;
2018-10-31 19:06:18 +08:00
searchFromDataCenter ( page , bean , sql , countSql ) ;
2018-10-30 11:13:45 +08:00
}
2018-09-04 10:20:04 +08:00
2018-11-02 16:21:56 +08:00
/ * *
* 执行sql
*
* @param page
* @param bean
* @param selSql
* @param countSql
* @throws Exception
* /
2018-10-31 19:06:18 +08:00
private < T > void searchFromDataCenter ( Page < T > page , Object bean , StringBuffer selSql , StringBuffer countSql )
throws Exception {
2018-11-09 17:00:02 +08:00
// if (Constants.ISOPENLOGCOUNTANDLAST) {
2018-12-15 20:36:05 +06:00
logJDBCByDruid . getCount ( page , countSql . toString ( ) ) ;
2018-11-09 17:00:02 +08:00
// }
if ( page . getCount ( ) > 0 ) {
logJDBCByDruid . getTableData ( page , selSql . toString ( ) , bean . getClass ( ) ) ;
2018-12-15 20:36:05 +06:00
if ( page . getLast ( ) > 100 ) {
2018-12-09 14:51:25 +06:00
page . setLast ( 100 ) ;
}
2018-11-09 17:00:02 +08:00
} else {
2018-12-15 20:36:05 +06:00
logger . info ( " 没有查询到数据,sql={} " , countSql . toString ( ) ) ;
2018-09-04 09:24:22 +08:00
}
2018-10-30 11:13:45 +08:00
2018-12-15 15:59:59 +06:00
}
2018-12-15 20:36:05 +06:00
2018-12-15 15:59:59 +06:00
private < T > void searchFromLocalCK ( Page < T > page , Object bean , StringBuffer selSql , StringBuffer countSql )
throws Exception {
// if (Constants.ISOPENLOGCOUNTANDLAST) {
localLogJDBCByDruid . getCount ( page , countSql . toString ( ) ) ;
// }
if ( page . getCount ( ) > 0 ) {
localLogJDBCByDruid . getTableData ( page , selSql . toString ( ) , bean . getClass ( ) ) ;
2018-12-15 20:36:05 +06:00
if ( page . getLast ( ) > 100 ) {
2018-12-15 15:59:59 +06:00
page . setLast ( 100 ) ;
}
} else {
2018-12-15 20:36:05 +06:00
logger . info ( " 没有查询到数据,sql={} " , countSql . toString ( ) ) ;
2018-12-15 15:59:59 +06:00
}
2018-12-15 20:36:05 +06:00
2018-10-30 11:13:45 +08:00
}
2018-11-02 16:21:56 +08:00
/ * *
* 利用反射获取class中各个属性的数据类型 , key是属性名称 , value是数据类型
*
* @param obj
* @return
* /
private static Map < String , String > getFiledsType ( Object obj ) {
Field [ ] fields = obj . getClass ( ) . getSuperclass ( ) . getDeclaredFields ( ) ;
Field [ ] superfields = obj . getClass ( ) . getDeclaredFields ( ) ;
2018-10-31 19:06:18 +08:00
Map < String , String > infoMap = new HashMap < String , String > ( ) ;
for ( int i = 0 ; i < fields . length ; i + + ) {
infoMap . put ( fields [ i ] . getName ( ) , fields [ i ] . getType ( ) . toString ( ) . replace ( " class " , " " ) ) ;
2017-12-19 14:55:52 +08:00
}
2018-10-31 19:06:18 +08:00
for ( int i = 0 ; i < superfields . length ; i + + ) {
infoMap . put ( superfields [ i ] . getName ( ) , superfields [ i ] . getType ( ) . toString ( ) . replace ( " class " , " " ) ) ;
2017-12-19 14:55:52 +08:00
}
2018-10-31 19:06:18 +08:00
return infoMap ;
2017-12-19 14:55:52 +08:00
}
2018-11-02 16:21:56 +08:00
/ * *
* 将fileds中的字段根据DfLogSearchDao . xml中对应的resultMap转换为数据库中的字段
*
* @param mapName
2018-12-15 20:36:05 +06:00
* @param fileds与界面商定好的是传日志类中的对象名 ( 界面没有表结构不知道对象属性对应的数据库字段名称是什么 ) , 不是数据库中的字段名
2018-11-02 16:21:56 +08:00
* @return
* @throws Exception
* /
private static String getFiledsSql ( String mapName , String fileds ) throws Exception {
2017-12-19 14:55:52 +08:00
if ( ! StringUtil . isBlank ( fileds ) ) {
2018-11-02 16:21:56 +08:00
String [ ] fieldsColoumn = null ;
// 所有字段名
List < String > columnList = new ArrayList < String > ( ) ;
// 所有属性名
List < String > propertyList = new ArrayList < String > ( ) ;
// 属性名称为key, 字段名称为value
Map < String , String > columnMap = new HashMap < String , String > ( ) ;
2017-12-19 14:55:52 +08:00
// 解析Fileds的字段/属性名称
fieldsColoumn = fileds . split ( " , " ) ;
// 从resultMap中获取字段名称和属性名称
if ( fieldsColoumn ! = null ) {
SqlSessionFactory sqlSessionFactory = SpringContextHolder . getBean ( SqlSessionFactory . class ) ;
ResultMap map = sqlSessionFactory . getConfiguration ( ) . getResultMap ( mapName + " Map " ) ;
List < ResultMapping > mapping = map . getResultMappings ( ) ;
for ( ResultMapping mapp : mapping ) {
columnList . add ( mapp . getColumn ( ) . toLowerCase ( ) ) ;
propertyList . add ( mapp . getProperty ( ) ) ;
columnMap . put ( mapp . getProperty ( ) , mapp . getColumn ( ) ) ;
}
}
if ( fieldsColoumn ! = null ) {
fileds = " " ;
for ( String column : fieldsColoumn ) {
if ( ! StringUtil . isBlank ( column ) ) {
column = column . trim ( ) ;
if ( columnList . contains ( column ) ) {
fileds + = " , " + column ;
} else if ( propertyList . contains ( column ) ) {
fileds + = " , " + columnMap . get ( column ) . toString ( ) ;
}
}
}
if ( ! StringUtil . isBlank ( fileds ) ) {
fileds = fileds . substring ( 1 ) ;
}
}
}
return fileds ;
}
2018-11-02 16:21:56 +08:00
/ * *
* 根据class从DfLogSearchDao . xml中获取对应的resultMap里column与property的关系 , key是property
*
* @param clazz
* @return
* /
private static Map < String , String > getFiledAndColumnMap ( Class < ? > clazz ) {
2017-12-19 14:55:52 +08:00
Map < String , String > map = new HashMap < String , String > ( ) ;
SqlSessionFactory sqlSessionFactory = SpringContextHolder . getBean ( SqlSessionFactory . class ) ;
ResultMap resultMap = sqlSessionFactory . getConfiguration ( ) . getResultMap ( clazz . getSimpleName ( ) + " Map " ) ;
List < ResultMapping > mapping = resultMap . getResultMappings ( ) ;
for ( ResultMapping mapp : mapping ) {
map . put ( mapp . getProperty ( ) , mapp . getColumn ( ) . toLowerCase ( ) ) ;
}
return map ;
}
/ * *
* 利用反射通过get方法获取bean中字段fieldName的值
*
* @param bean
* @param fieldName
* @return
* @throws Exception
* /
private static Object getFieldValue ( Object bean , String fieldName ) throws Exception {
StringBuffer result = new StringBuffer ( ) ;
String methodName = result . append ( " get " ) . append ( fieldName . substring ( 0 , 1 ) . toUpperCase ( ) )
. append ( fieldName . substring ( 1 ) ) . toString ( ) ;
Object rObject = null ;
Method method = null ;
@SuppressWarnings ( " rawtypes " )
Class [ ] classArr = new Class [ 0 ] ;
method = bean . getClass ( ) . getMethod ( methodName , classArr ) ;
rObject = method . invoke ( bean , new Object [ 0 ] ) ;
return rObject ;
}
/ * *
2018-11-02 16:21:56 +08:00
* 利用反射调用bean . set方法将value设置到字段
2017-12-19 14:55:52 +08:00
*
* @param bean
* @param fieldName
* @param value
* @throws Exception
* /
private static void setFieldValue ( Object bean , String fieldName , Object value ) throws Exception {
StringBuffer result = new StringBuffer ( ) ;
String methodName = result . append ( " set " ) . append ( fieldName . substring ( 0 , 1 ) . toUpperCase ( ) )
. append ( fieldName . substring ( 1 ) ) . toString ( ) ;
/ * *
* 利用发射调用bean . set方法将value设置到字段
* /
Class [ ] classArr = new Class [ 1 ] ;
classArr [ 0 ] = " java.lang.String " . getClass ( ) ;
Method method = bean . getClass ( ) . getMethod ( methodName , classArr ) ;
method . invoke ( bean , value ) ;
}
}