fix(配置文件)增加开发环境,清除中文空格问题
This commit is contained in:
@@ -19,7 +19,6 @@ import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import com.nis.domain.LogEntity;
|
||||
import com.nis.domain.Page;
|
||||
import com.nis.domain.restful.NtcRadiusReport;
|
||||
import com.nis.domain.restful.NtcReportEntity;
|
||||
import com.nis.restful.RestBusinessCode;
|
||||
import com.nis.restful.RestServiceException;
|
||||
@@ -240,6 +239,62 @@ public abstract class BaseLogService {
|
||||
}
|
||||
logger.info("请求参数校验结束----" + System.currentTimeMillis());
|
||||
|
||||
}
|
||||
/**
|
||||
* 验证日志查询条件格式是否正确
|
||||
* @param thread
|
||||
* @param start
|
||||
* @param entity
|
||||
* @param clazz
|
||||
* @param page
|
||||
*/
|
||||
public void collectConditionCheck(AuditLogThread thread, long start, NtcCollectVoipLog entity, Class clazz,
|
||||
Page page) {
|
||||
logger.info("请求参数校验开始----" + System.currentTimeMillis());
|
||||
try {
|
||||
if (!StringUtil.isBlank(entity.getSearchFoundEndTime())) {
|
||||
sdf.setLenient(false);
|
||||
sdf.parse(entity.getSearchFoundEndTime());
|
||||
}
|
||||
} catch (ParseException e) {
|
||||
thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
logger.error(e);
|
||||
throw new RestServiceException(thread, System.currentTimeMillis() - start, "searchFoundEndTime参数格式错误",
|
||||
RestBusinessCode.param_formate_error.getValue());
|
||||
} catch (Exception e) {
|
||||
thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
logger.error(e);
|
||||
throw new RestServiceException(thread, System.currentTimeMillis() - start, "searchFoundEndTime参数格式格式");
|
||||
}
|
||||
|
||||
try {
|
||||
if (!StringUtil.isBlank(entity.getSearchFoundStartTime())) {
|
||||
sdf.setLenient(false);
|
||||
sdf.parse(entity.getSearchFoundStartTime());
|
||||
}
|
||||
} catch (ParseException e) {
|
||||
thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
logger.error(e);
|
||||
throw new RestServiceException(thread, System.currentTimeMillis() - start, "searchFoundStartTime参数格式错误",
|
||||
RestBusinessCode.param_formate_error.getValue());
|
||||
} catch (Exception e) {
|
||||
thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
logger.error(e);
|
||||
throw new RestServiceException(thread, System.currentTimeMillis() - start, "searchFoundStartTime参数错误");
|
||||
}
|
||||
|
||||
try {
|
||||
checkCloumnIsExist(thread, start, clazz, page);
|
||||
} catch (RestServiceException e) {
|
||||
logger.error(e);
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
logger.error(e);
|
||||
throw new RestServiceException(thread, System.currentTimeMillis() - start, "请求参数错误");
|
||||
}
|
||||
logger.info("请求参数校验结束----" + System.currentTimeMillis());
|
||||
|
||||
}
|
||||
/**
|
||||
*
|
||||
@@ -295,6 +350,14 @@ public abstract class BaseLogService {
|
||||
logger.error(e);
|
||||
throw new RestServiceException(thread, System.currentTimeMillis() - start, "searchReportEndTime参数错误");
|
||||
}
|
||||
if (!StringUtil.isBlank(entity.getSearchBusinessType())&&!StringUtil.isNumeric(entity.getSearchBusinessType())) {
|
||||
logger.error(RestBusinessCode.param_formate_error.getErrorReason()+",searchBusinessType参数格式错误");
|
||||
thread.setExceptionInfo("searchBusinessType参数格式错误");
|
||||
throw new RestServiceException(thread,
|
||||
System.currentTimeMillis() - start,
|
||||
"searchBusinessType参数格式错误",
|
||||
RestBusinessCode.param_formate_error.getValue());
|
||||
}
|
||||
|
||||
logger.info("实时报表统计查询参数校验结束----" + System.currentTimeMillis());
|
||||
|
||||
@@ -367,7 +430,7 @@ public abstract class BaseLogService {
|
||||
logger.error(e);
|
||||
throw new RestServiceException(thread, System.currentTimeMillis() - start, "searchReportStartTime参数错误");
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
if (!StringUtil.isBlank(entity.getSearchReportEndTime())) {
|
||||
sdf.parse(entity.getSearchReportEndTime());
|
||||
@@ -391,8 +454,8 @@ public abstract class BaseLogService {
|
||||
"searchReportStartTime和searchReportEndTime参数必须同时填写",
|
||||
RestBusinessCode.config_integrity_error.getValue());
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
//根据用户查看IP趋势时(searchBusinessType=2),用户名必需填写
|
||||
if ("2".equals(entity.getSearchBusinessType())&&StringUtil.isEmpty(entity.getSearchAccount())) {
|
||||
logger.error(RestBusinessCode.config_integrity_error.getErrorReason()+",searchBusinessType=2时searchAccount参数必须填写");
|
||||
@@ -402,7 +465,7 @@ public abstract class BaseLogService {
|
||||
"searchBusinessType=2时searchAccount参数必须填写",
|
||||
RestBusinessCode.config_integrity_error.getValue());
|
||||
}
|
||||
|
||||
|
||||
//根据用户查看IP趋势时(searchBusinessType=3),用户名必需填写
|
||||
if ("3".equals(entity.getSearchBusinessType())&&StringUtil.isEmpty(entity.getSearchNasIp())) {
|
||||
logger.error(RestBusinessCode.config_integrity_error.getErrorReason()+",searchBusinessType=3时searchNasIp参数必须填写");
|
||||
@@ -412,11 +475,11 @@ public abstract class BaseLogService {
|
||||
"searchBusinessType=3时searchNasIp参数必须填写",
|
||||
RestBusinessCode.config_integrity_error.getValue());
|
||||
}
|
||||
|
||||
|
||||
logger.info("用户行为日志统计参数校验结束----" + System.currentTimeMillis());
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @Title: getJedisKey
|
||||
|
||||
@@ -1,625 +0,0 @@
|
||||
package com.nis.web.service;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.Statement;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.ibatis.mapping.ResultMap;
|
||||
import org.apache.ibatis.mapping.ResultMapping;
|
||||
import org.apache.ibatis.session.SqlSessionFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.alibaba.druid.pool.DruidDataSource;
|
||||
import com.nis.domain.Page;
|
||||
import com.nis.util.Constants;
|
||||
import com.nis.util.LogJDBCByDruid;
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
import com.nis.util.redis.SaveRedisThread;
|
||||
|
||||
public class HiveSqlService {
|
||||
private final static Logger logger = LoggerFactory.getLogger(HiveSqlService.class);
|
||||
static DruidDataSource datasource = null;
|
||||
Connection conn = null;
|
||||
ResultSet rs = null;
|
||||
Statement st = null;
|
||||
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
|
||||
private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd");
|
||||
|
||||
private static Map<String, String> getFiledsType(Object o) {
|
||||
Field[] fields = o.getClass().getSuperclass().getDeclaredFields();
|
||||
Field[] superfields = o.getClass().getDeclaredFields();
|
||||
Map<String, String> infoMap = new HashMap<String, String>();
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
infoMap.put(fields[i].getName(), fields[i].getType().toString().replace("class", ""));
|
||||
}
|
||||
for (int i = 0; i < superfields.length; i++) {
|
||||
infoMap.put(superfields[i].getName(), superfields[i].getType().toString().replace("class", ""));
|
||||
}
|
||||
return infoMap;
|
||||
}
|
||||
|
||||
public static String getSql(Page page, Object bean, String tableName, Map<String, Map<String, String>> col2col,
|
||||
String orderBy, String searchActiveSys) throws Exception {
|
||||
if (Constants.ISUSECLICKHOUSE) {
|
||||
return getSqlByClickHouse(page, bean, tableName, col2col, orderBy, searchActiveSys);
|
||||
} else {
|
||||
return getSqlByHive(page, bean, tableName, col2col, orderBy, searchActiveSys);
|
||||
}
|
||||
}
|
||||
|
||||
public static String getSqlByClickHouse(Page page, Object bean, String tableName,
|
||||
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
|
||||
tableName = tableName.toLowerCase();
|
||||
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
|
||||
StringBuffer sql = new StringBuffer();
|
||||
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
|
||||
if (null == showColmun || showColmun.equals("")) {
|
||||
for (String key : filedAndColumnMap.keySet()) {
|
||||
if (!filedAndColumnMap.get(key).toLowerCase().equals("id")) {
|
||||
sql.append(filedAndColumnMap.get(key) + ",");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sql.append(showColmun);
|
||||
}
|
||||
String sqlTrim = sql.toString().trim();
|
||||
if (sqlTrim.endsWith(",")) {
|
||||
sqlTrim = sqlTrim.substring(0, sqlTrim.length() - 1);
|
||||
}
|
||||
sql.setLength(0);
|
||||
sql.append(" select " + sqlTrim.toLowerCase() + " from " + tableName.toLowerCase() + " t ");
|
||||
StringBuffer whereSB = new StringBuffer();
|
||||
if (!StringUtil.isEmpty(bean)) {
|
||||
Class<?> clazz = bean.getClass();
|
||||
Map<String, String> filedsType = null;
|
||||
filedsType = getFiledsType(bean);
|
||||
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
|
||||
Field[] fields = clazz.getDeclaredFields();
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
|
||||
Field f = fields[i];
|
||||
String key = f.getName();// 获取字段名
|
||||
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
|
||||
Object value = getFieldValue(bean, key);
|
||||
if (!StringUtil.isEmpty(value)) {
|
||||
setFieldValue(bean, key, value.toString().trim());
|
||||
if (key.endsWith("Time")) {// 日期开始或结束的字段
|
||||
if (col2col.containsKey(key)) {
|
||||
Long partition = Long.parseLong(sdf2.format(sdf.parse(value.toString().trim())));
|
||||
value = sdf.parse(value.toString().trim()).getTime() / 1000;
|
||||
if (col2col.get(key).get("start") != null) {
|
||||
whereSB.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("start")).toLowerCase()
|
||||
+ ">=" + value);
|
||||
} else {
|
||||
whereSB.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("end")).toLowerCase() + "<"
|
||||
+ value);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (key.toLowerCase().startsWith("search")) {
|
||||
key = key.replace("search", "");
|
||||
key = key.substring(0, 1).toLowerCase() + key.substring(1);
|
||||
}
|
||||
|
||||
// clickhouse写法
|
||||
String type = filedsType.get(key).trim();
|
||||
if (type.equals("java.lang.String")) {
|
||||
String field = filedAndColumnMap.get(key).toLowerCase();
|
||||
if (field.equals("url")) {
|
||||
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
|
||||
} else {
|
||||
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
|
||||
}
|
||||
} else if (type.equals("java.lang.Integer") || type.equals("int")
|
||||
|| type.equals("java.lang.Long") || type.equals("long")) {
|
||||
whereSB.append(" and " + filedAndColumnMap.get(key).toLowerCase() + "="
|
||||
+ value.toString().trim());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
if (whereSB.length() > 0) {
|
||||
int indexOf = whereSB.indexOf("and") + "and".length();
|
||||
sql.append(" where " + whereSB.substring(indexOf));
|
||||
}
|
||||
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
|
||||
if (orderBy.toLowerCase().contains("asc") || orderBy.toLowerCase().contains("desc")) {
|
||||
sql.append(" order by " + orderBy.toLowerCase());
|
||||
} else {
|
||||
sql.append(" order by " + orderBy.toLowerCase() + " desc");
|
||||
}
|
||||
sql.append(" limit " + startNum + "," + page.getPageSize());// clickhouse的分页与mysql相同
|
||||
|
||||
return sql.toString();
|
||||
}
|
||||
|
||||
public static String getSqlByHive(Page page, Object bean, String tableName,
|
||||
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
|
||||
tableName = tableName.toLowerCase();
|
||||
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
|
||||
StringBuffer sql = new StringBuffer();
|
||||
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
|
||||
if (null == showColmun || showColmun.equals("")) {
|
||||
for (String key : filedAndColumnMap.keySet()) {
|
||||
if (!filedAndColumnMap.get(key).toLowerCase().equals("id")) {
|
||||
sql.append(filedAndColumnMap.get(key) + ",");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sql.append(showColmun);
|
||||
}
|
||||
String sqlTrim = sql.toString().trim();
|
||||
if (sqlTrim.endsWith(",")) {
|
||||
sqlTrim = sqlTrim.substring(0, sqlTrim.length() - 1);
|
||||
}
|
||||
if (orderBy.toLowerCase().contains("asc") || orderBy.toLowerCase().contains("desc")) {
|
||||
orderBy = " order by " + orderBy;
|
||||
} else {
|
||||
orderBy = " order by " + orderBy + " desc ";
|
||||
}
|
||||
sql.setLength(0);
|
||||
sql.append(" select " + sqlTrim.toLowerCase() + " from (select " + sqlTrim.toLowerCase()
|
||||
+ ",row_number() over(partition by found_time_partition " + orderBy + ") as row_num from "
|
||||
+ tableName.toLowerCase() + " ");
|
||||
StringBuffer whereSB = new StringBuffer();
|
||||
if (!StringUtil.isEmpty(bean)) {
|
||||
Class<?> clazz = bean.getClass();
|
||||
Map<String, String> filedsType = null;
|
||||
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
|
||||
// 获取所有的字段包括public,private,protected,private
|
||||
// Field[] fields = bean.getClass().getDeclaredFields();
|
||||
Field[] fields = clazz.getDeclaredFields();
|
||||
Long foundTimePartStart = null;
|
||||
Long foundTimePartEnd = null;
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
|
||||
Field f = fields[i];
|
||||
String key = f.getName();// 获取字段名
|
||||
String typeName = f.getType().getName();
|
||||
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
|
||||
Object value = getFieldValue(bean, key);
|
||||
if (!StringUtil.isEmpty(value)) {
|
||||
setFieldValue(bean, key, value.toString().trim());
|
||||
if (key.endsWith("Time")) {// 日期开始或结束的字段
|
||||
if (col2col.containsKey(key)) {
|
||||
Long partition = Long.parseLong(sdf2.format(sdf.parse(value.toString().trim())));
|
||||
value = sdf.parse(value.toString().trim()).getTime() / 1000;
|
||||
if (key.toLowerCase().equals("searchfoundstarttime")) {
|
||||
foundTimePartStart = partition;
|
||||
}
|
||||
if (key.toLowerCase().equals("searchfoundendtime")) {
|
||||
foundTimePartEnd = partition;
|
||||
}
|
||||
if (col2col.get(key).get("start") != null) {
|
||||
// sql.append(" and " +
|
||||
// filedAndColumnMap.get(col2col.get(key).get("start"))
|
||||
// + ">=to_date('" +
|
||||
// value.toString().trim()
|
||||
// + "','yyyy-mm-dd HH24:mi:ss')");
|
||||
whereSB.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("start")).toLowerCase()
|
||||
+ ">=" + value);
|
||||
} else {
|
||||
// sql.append(" and " +
|
||||
// filedAndColumnMap.get(col2col.get(key).get("end"))
|
||||
// + "<=to_date('" +
|
||||
// value.toString().trim()
|
||||
// + "','yyyy-mm-dd HH24:mi:ss')");
|
||||
whereSB.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("end")).toLowerCase() + "<"
|
||||
+ value);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (key.toLowerCase().startsWith("search")) {
|
||||
key = key.replace("search", "");
|
||||
key = key.substring(0, 1).toLowerCase() + key.substring(1);
|
||||
}
|
||||
|
||||
if (typeName.equals("java.lang.String")) {
|
||||
String field = filedAndColumnMap.get(key);
|
||||
if (field.equals("url")) {
|
||||
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
|
||||
} else {
|
||||
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
|
||||
}
|
||||
} else if (typeName.equals("java.lang.Integer") || typeName.equals("int")) {
|
||||
whereSB.append(
|
||||
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim());
|
||||
|
||||
} else if (typeName.equals("java.lang.Long") || typeName.equals("long")) {
|
||||
whereSB.append(
|
||||
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim() + "L");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
if (null != foundTimePartStart) {
|
||||
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
|
||||
whereSB.append(" and found_time_partition>=" + foundTimePartStart);
|
||||
}
|
||||
if (null != foundTimePartEnd) {
|
||||
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
|
||||
whereSB.append(" and found_time_partition<=" + foundTimePartEnd);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
if (whereSB.length() > 0) {
|
||||
int indexOf = whereSB.indexOf("and") + "and".length();
|
||||
sql.append(" where " + whereSB.substring(indexOf));
|
||||
}
|
||||
Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
|
||||
Integer endNum = startNum - 1 + page.getPageSize();
|
||||
sql.append(" ) t where row_Num between " + startNum + " and " + endNum);
|
||||
logger.info("获取数据中心日志sql==================={}", sql);
|
||||
return sql.toString();
|
||||
}
|
||||
|
||||
public static Long getLogCount(Object bean, String tableName, Map<String, Map<String, String>> col2col) throws Exception {
|
||||
if (Constants.ISUSECLICKHOUSE) {
|
||||
return getLogCountFromClickHouse(bean, tableName, col2col);
|
||||
} else {
|
||||
return getLogCountFromHive(bean, tableName, col2col);
|
||||
}
|
||||
}
|
||||
|
||||
public static Long getLogCountFromClickHouse(Object bean, String tableName,
|
||||
Map<String, Map<String, String>> col2col) throws Exception {
|
||||
tableName = tableName.toLowerCase();
|
||||
StringBuffer sql = new StringBuffer();
|
||||
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
|
||||
sql.append("select count(1) from " + tableName + " ");
|
||||
StringBuffer whereSB = new StringBuffer();
|
||||
if (!StringUtil.isEmpty(bean)) {
|
||||
Class<?> clazz = bean.getClass();
|
||||
Map<String, String> filedsType = null;
|
||||
filedsType = getFiledsType(bean);
|
||||
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
|
||||
Field[] fields = clazz.getDeclaredFields();
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
|
||||
Field f = fields[i];
|
||||
String key = f.getName();// 获取字段名
|
||||
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
|
||||
Object value = getFieldValue(bean, key);
|
||||
if (!StringUtil.isEmpty(value)) {
|
||||
setFieldValue(bean, key, value.toString().trim());
|
||||
if (key.endsWith("Time")) {// 日期开始或结束的字段
|
||||
if (col2col.containsKey(key)) {
|
||||
Long partition = Long.parseLong(sdf2.format(sdf.parse(value.toString().trim())));
|
||||
value = sdf.parse(value.toString().trim()).getTime() / 1000;
|
||||
if (col2col.get(key).get("start") != null) {
|
||||
whereSB.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("start")).toLowerCase()
|
||||
+ ">=" + value);
|
||||
} else {
|
||||
whereSB.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("end")).toLowerCase() + "<"
|
||||
+ value);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (key.toLowerCase().startsWith("search")) {
|
||||
key = key.replace("search", "");
|
||||
key = key.substring(0, 1).toLowerCase() + key.substring(1);
|
||||
}
|
||||
|
||||
// clickhouse写法
|
||||
String type = filedsType.get(key).trim();
|
||||
if (type.equals("java.lang.String")) {
|
||||
String field = filedAndColumnMap.get(key).toLowerCase();
|
||||
if (field.equals("url")) {
|
||||
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
|
||||
} else {
|
||||
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
|
||||
}
|
||||
} else if (type.equals("java.lang.Integer") || type.equals("int")
|
||||
|| type.equals("java.lang.Long") || type.equals("long")) {
|
||||
whereSB.append(" and " + filedAndColumnMap.get(key).toLowerCase() + "="
|
||||
+ value.toString().trim());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
if (whereSB.length() > 0) {
|
||||
int indexOf = whereSB.indexOf("and") + "and".length();
|
||||
sql.append(" where " + whereSB.substring(indexOf));
|
||||
}
|
||||
logger.info("获取数据中心日志总条数sql==================" + sql.toString());
|
||||
Long count = new LogJDBCByDruid().getCount(sql.toString());
|
||||
return count;
|
||||
}
|
||||
|
||||
public static Long getLogCountFromHive(Object bean, String tableName, Map<String, Map<String, String>> col2col)
|
||||
throws Exception {
|
||||
tableName = tableName.toLowerCase();
|
||||
StringBuffer sql = new StringBuffer();
|
||||
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
|
||||
sql.append("select count(1) from " + tableName + " ");
|
||||
StringBuffer whereSB = new StringBuffer();
|
||||
if (bean != null) {
|
||||
Class<?> clazz = bean.getClass();
|
||||
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
|
||||
// 获取所有的字段包括public,private,protected,private
|
||||
// Field[] fields = bean.getClass().getDeclaredFields();
|
||||
Field[] fields = clazz.getDeclaredFields();
|
||||
Long foundTimePartStart = null;
|
||||
Long foundTimePartEnd = null;
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
|
||||
Field f = fields[i];
|
||||
String key = f.getName();// 获取字段名
|
||||
String typeName = f.getType().getName();
|
||||
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
|
||||
Object value = getFieldValue(bean, key);
|
||||
if (!StringUtil.isEmpty(value)) {
|
||||
setFieldValue(bean, key, value.toString().trim());
|
||||
if (key.endsWith("Time")) {// 日期开始或结束的字段
|
||||
if (col2col.containsKey(key)) {
|
||||
Long partition = Long.parseLong(sdf2.format(sdf.parse(value.toString().trim())));
|
||||
value = sdf.parse(value.toString().trim()).getTime() / 1000;
|
||||
if (key.toLowerCase().equals("searchfoundstarttime")) {
|
||||
foundTimePartStart = partition;
|
||||
}
|
||||
if (key.toLowerCase().equals("searchfoundendtime")) {
|
||||
foundTimePartEnd = partition;
|
||||
}
|
||||
if (col2col.get(key).get("start") != null) {
|
||||
// sql.append(" and " +
|
||||
// filedAndColumnMap.get(col2col.get(key).get("start"))
|
||||
// + ">=to_date('" +
|
||||
// value.toString().trim()
|
||||
// + "','yyyy-mm-dd HH24:mi:ss')");
|
||||
whereSB.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("start")).toLowerCase()
|
||||
+ ">=" + value);
|
||||
} else {
|
||||
// sql.append(" and " +
|
||||
// filedAndColumnMap.get(col2col.get(key).get("end"))
|
||||
// + "<=to_date('" +
|
||||
// value.toString().trim()
|
||||
// + "','yyyy-mm-dd HH24:mi:ss')");
|
||||
whereSB.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("end")).toLowerCase() + "<"
|
||||
+ value);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (key.toLowerCase().startsWith("search")) {
|
||||
key = key.replace("search", "");
|
||||
key = key.substring(0, 1).toLowerCase() + key.substring(1);
|
||||
}
|
||||
|
||||
if (typeName.equals("java.lang.String")) {
|
||||
String field = filedAndColumnMap.get(key);
|
||||
if (field.equals("url")) {
|
||||
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
|
||||
} else {
|
||||
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
|
||||
}
|
||||
} else if (typeName.equals("java.lang.Integer") || typeName.equals("int")) {
|
||||
whereSB.append(
|
||||
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim());
|
||||
|
||||
} else if (typeName.equals("java.lang.Long") || typeName.equals("long")) {
|
||||
whereSB.append(
|
||||
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim() + "L");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
if (null != foundTimePartStart) {
|
||||
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
|
||||
whereSB.append(" and found_time_partition>=" + foundTimePartStart);
|
||||
}
|
||||
if (null != foundTimePartEnd) {
|
||||
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
|
||||
whereSB.append(" and found_time_partition<=" + foundTimePartEnd);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
if (whereSB.length() > 0) {
|
||||
int indexOf = whereSB.indexOf("and") + "and".length();
|
||||
sql.append(" where " + whereSB.substring(indexOf));
|
||||
}
|
||||
logger.info("获取数据中心日志总条数sql==================" + sql.toString());
|
||||
Long count = new LogJDBCByDruid().getCount(sql.toString());
|
||||
return count;
|
||||
}
|
||||
|
||||
public static String getFiledsSql(String mapName, String fileds) throws Exception {
|
||||
String[] fieldsColoumn = null;
|
||||
// 所有字段名
|
||||
List<String> columnList = new ArrayList<String>();
|
||||
// 所有属性名
|
||||
List<String> propertyList = new ArrayList<String>();
|
||||
// 属性名称为key,字段名称为value
|
||||
Map<String, String> columnMap = new HashMap<String, String>();
|
||||
|
||||
if (!StringUtil.isBlank(fileds)) {
|
||||
// 解析Fileds的字段/属性名称
|
||||
fieldsColoumn = fileds.split(",");
|
||||
|
||||
// 从resultMap中获取字段名称和属性名称
|
||||
if (fieldsColoumn != null) {
|
||||
SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class);
|
||||
ResultMap map = sqlSessionFactory.getConfiguration().getResultMap(mapName + "Map");
|
||||
List<ResultMapping> mapping = map.getResultMappings();
|
||||
for (ResultMapping mapp : mapping) {
|
||||
columnList.add(mapp.getColumn().toLowerCase());
|
||||
propertyList.add(mapp.getProperty());
|
||||
columnMap.put(mapp.getProperty(), mapp.getColumn());
|
||||
}
|
||||
}
|
||||
if (fieldsColoumn != null) {
|
||||
fileds = "";
|
||||
for (String column : fieldsColoumn) {
|
||||
if (!StringUtil.isBlank(column)) {
|
||||
column = column.trim();
|
||||
if (columnList.contains(column)) {
|
||||
fileds += "," + column;
|
||||
} else if (propertyList.contains(column)) {
|
||||
fileds += "," + columnMap.get(column).toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!StringUtil.isBlank(fileds)) {
|
||||
fileds = fileds.substring(1);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
return fileds;
|
||||
}
|
||||
|
||||
public static Map<String, String> getFiledAndColumnMap(Class<?> clazz) {
|
||||
Map<String, String> map = new HashMap<String, String>();
|
||||
SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class);
|
||||
ResultMap resultMap = sqlSessionFactory.getConfiguration().getResultMap(clazz.getSimpleName() + "Map");
|
||||
List<ResultMapping> mapping = resultMap.getResultMappings();
|
||||
for (ResultMapping mapp : mapping) {
|
||||
map.put(mapp.getProperty(), mapp.getColumn().toLowerCase());
|
||||
}
|
||||
return map;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* 利用反射通过get方法获取bean中字段fieldName的值
|
||||
*
|
||||
* @param bean
|
||||
* @param fieldName
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
private static Object getFieldValue(Object bean, String fieldName) throws Exception {
|
||||
StringBuffer result = new StringBuffer();
|
||||
String methodName = result.append("get").append(fieldName.substring(0, 1).toUpperCase())
|
||||
.append(fieldName.substring(1)).toString();
|
||||
|
||||
Object rObject = null;
|
||||
Method method = null;
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
Class[] classArr = new Class[0];
|
||||
method = bean.getClass().getMethod(methodName, classArr);
|
||||
rObject = method.invoke(bean, new Object[0]);
|
||||
|
||||
return rObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* 利用发射调用bean.set方法将value设置到字段
|
||||
*
|
||||
* @param bean
|
||||
* @param fieldName
|
||||
* @param value
|
||||
* @throws Exception
|
||||
*/
|
||||
private static void setFieldValue(Object bean, String fieldName, Object value) throws Exception {
|
||||
StringBuffer result = new StringBuffer();
|
||||
String methodName = result.append("set").append(fieldName.substring(0, 1).toUpperCase())
|
||||
.append(fieldName.substring(1)).toString();
|
||||
/**
|
||||
* 利用发射调用bean.set方法将value设置到字段
|
||||
*/
|
||||
Class[] classArr = new Class[1];
|
||||
classArr[0] = "java.lang.String".getClass();
|
||||
Method method = bean.getClass().getMethod(methodName, classArr);
|
||||
method.invoke(bean, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* 比较开始时间或者结束时间是否比当前系统时间早48小时
|
||||
*
|
||||
* @param startTime
|
||||
* 开始时间
|
||||
* @param endTime
|
||||
* 结束时间
|
||||
* @return
|
||||
* @throws ParseException
|
||||
*/
|
||||
public static boolean ifTimeGreaterThan48(String startTime, String endTime) throws Exception {
|
||||
logger.info("ifTimeGreaterThan48方法开始" + System.currentTimeMillis());
|
||||
if (null != startTime && !startTime.equals("") && null != endTime && !endTime.equals("")) {// 开始和结束时间都不为空
|
||||
Date startDate = sdf.parse(startTime);
|
||||
Date endDate = sdf.parse(endTime);
|
||||
if (startDate.getTime() < endDate.getTime()) {// 开始时间比结束时间早
|
||||
logger.info("ifTimeGreaterThan48方法结束" + System.currentTimeMillis());
|
||||
return gt48(endTime);
|
||||
} else {// 开始时间比结束时间晚,不符合一般情况
|
||||
logger.info("ifTimeGreaterThan48方法结束" + System.currentTimeMillis());
|
||||
return false;
|
||||
}
|
||||
} else if (null != endTime && !endTime.equals("")) {// 开始时间为空,结束时间不为空
|
||||
logger.info("ifTimeGreaterThan48方法结束" + System.currentTimeMillis());
|
||||
return gt48(endTime);
|
||||
} else if (null != startTime && !startTime.equals("")) {// 结束时间为空,开始时间不为空
|
||||
logger.info("ifTimeGreaterThan48方法结束" + System.currentTimeMillis());
|
||||
return gt48(startTime);
|
||||
} else {// 开始和结束时间都为空
|
||||
logger.info("ifTimeGreaterThan48方法结束" + System.currentTimeMillis());
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static boolean gt48(String eqTime) throws ParseException {
|
||||
logger.info("gt48方法开始" + System.currentTimeMillis());
|
||||
Date eqDate = sdf.parse(eqTime);
|
||||
Long dateNum = eqDate.getTime();
|
||||
Long currentDate = new Date().getTime();
|
||||
Long time = 0l;
|
||||
if (dateNum < currentDate) {
|
||||
time = currentDate - dateNum;// 获取结束时间与当前系统时间的时间差毫秒数
|
||||
} else {
|
||||
logger.info("gt48方法结束" + System.currentTimeMillis());
|
||||
return false;// 结束时间比当前系统时间晚,不符合从数据中心查询数据要求(当前日期48小时前数据从数据中心查询)
|
||||
}
|
||||
double hours = time.doubleValue() / (1000 * 60 * 60);
|
||||
Long datacenterTime = Constants.DATACENTER_TIME;
|
||||
double doubleValue = datacenterTime.doubleValue();
|
||||
if (hours > doubleValue) {
|
||||
logger.info("gt48方法结束" + System.currentTimeMillis());
|
||||
return true;// 符合要求
|
||||
} else {
|
||||
logger.info("gt48方法结束" + System.currentTimeMillis());
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
512
src/main/java/com/nis/web/service/LogDataService.java
Normal file
512
src/main/java/com/nis/web/service/LogDataService.java
Normal file
@@ -0,0 +1,512 @@
|
||||
package com.nis.web.service;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.Statement;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.ibatis.mapping.ResultMap;
|
||||
import org.apache.ibatis.mapping.ResultMapping;
|
||||
import org.apache.ibatis.session.SqlSessionFactory;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.alibaba.druid.pool.DruidDataSource;
|
||||
import com.nis.domain.Page;
|
||||
import com.nis.util.Configurations;
|
||||
import com.nis.util.Constants;
|
||||
import com.nis.util.LogJDBCByDruid;
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
|
||||
/**
|
||||
* 从clickhouse或者hive中查询数据并set到page.list返回给界面
|
||||
*
|
||||
* @author rkg
|
||||
*
|
||||
*/
|
||||
@Service
|
||||
public class LogDataService {
|
||||
// private final static Logger logger =
|
||||
// LoggerFactory.getLogger(LogDataService.class);
|
||||
static DruidDataSource datasource = null;
|
||||
Connection conn = null;
|
||||
ResultSet rs = null;
|
||||
Statement st = null;
|
||||
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
|
||||
private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd");
|
||||
private static Map<String, Map<String, String>> col2col = new HashMap<String, Map<String, String>>();
|
||||
static {
|
||||
Map<String, String> startMap = new HashMap<String, String>();
|
||||
startMap.put("start", "foundTime");
|
||||
col2col.put("searchFoundStartTime", startMap);
|
||||
Map<String, String> endMap = new HashMap<String, String>();
|
||||
endMap.put("end", "foundTime");
|
||||
col2col.put("searchFoundEndTime", endMap);
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据类名加对应的标识获取hive或者clickhouse中对应的表名
|
||||
*
|
||||
* @param key
|
||||
* @param defaultTableName
|
||||
* @return
|
||||
*/
|
||||
private String getTableName(String key, String defaultTableName) {
|
||||
if (Constants.ISUSECLICKHOUSE) {
|
||||
key = key.replace("HiveTable", "ClickHouseTable");
|
||||
}
|
||||
return Configurations.getStringProperty(key, defaultTableName);
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据page及obj对象中的属性值,利用反射拼接对应的查询sql(支持分页,排序) 和计算总量的sql并去对应的数据库中查询,
|
||||
* 并将结果set到page对象中,返回给界面展示
|
||||
*
|
||||
* @param page
|
||||
* @param obj
|
||||
* @throws Exception
|
||||
*/
|
||||
public <T> void getData(Page<T> page, Object obj) throws Exception {
|
||||
String className = obj.getClass().getSimpleName();
|
||||
String tableName = getTableName(className + "HiveTable", "");
|
||||
if (tableName == null || tableName.trim().equals("")) {
|
||||
throw new RuntimeException("日志类" + className + "对应的表名为空,请检查配置文件");
|
||||
}
|
||||
String orderBy = " order by ";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = orderBy + Page.getOrderBySql(className, page.getOrderBy());
|
||||
} else {
|
||||
orderBy = orderBy + "found_Time desc ";
|
||||
}
|
||||
if (Constants.ISUSECLICKHOUSE) {
|
||||
getDataFromClickHouse(page, obj, tableName, className, orderBy.toLowerCase());
|
||||
} else {
|
||||
getDataFromHive(page, obj, tableName, className, orderBy.toLowerCase());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* 从clickhouse中查询数据,注意clickhouse区分大小写,目前和百分点商定都是用小写
|
||||
*
|
||||
* @param page
|
||||
* 里面含有pagesize和pageno,order by
|
||||
* @param bean
|
||||
* 日志类对象(从DfLogSearchDao.xml中获取对应的map,类名+Map),用来获取各个属性对应的数据库字段名
|
||||
* @param tableName
|
||||
* 表名
|
||||
* @param className
|
||||
* 类名
|
||||
* @param orderBy
|
||||
* orderby条件
|
||||
* @throws Exception
|
||||
*/
|
||||
private <T> void getDataFromClickHouse(Page<T> page, Object bean, String tableName, String className,
|
||||
String orderBy) throws Exception {
|
||||
tableName = tableName.toLowerCase();
|
||||
String showColmun = getFiledsSql(className, page.getFields());
|
||||
StringBuffer sql = new StringBuffer();
|
||||
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
|
||||
if (null == showColmun || showColmun.equals("")) {
|
||||
for (String key : filedAndColumnMap.keySet()) {
|
||||
if (!filedAndColumnMap.get(key).toLowerCase().equals("id")) {
|
||||
sql.append(filedAndColumnMap.get(key) + ",");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sql.append(showColmun);
|
||||
}
|
||||
String sqlTrim = sql.toString().trim();
|
||||
if (sqlTrim.endsWith(",")) {
|
||||
sqlTrim = sqlTrim.substring(0, sqlTrim.length() - 1);
|
||||
}
|
||||
sql.setLength(0);
|
||||
sql.append(" select " + sqlTrim.toLowerCase() + " from " + tableName.toLowerCase() + " t where ");
|
||||
StringBuffer whereFoundTime = new StringBuffer();
|
||||
StringBuffer countSql = new StringBuffer();
|
||||
countSql.append("select count(1) from " + tableName + " where ");
|
||||
|
||||
StringBuffer whereSB = new StringBuffer();
|
||||
if (!StringUtil.isEmpty(bean)) {
|
||||
Class<?> clazz = bean.getClass();
|
||||
Map<String, String> filedsType = null;
|
||||
filedsType = getFiledsType(bean);
|
||||
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
|
||||
Field[] fields = clazz.getDeclaredFields();
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
|
||||
Field f = fields[i];
|
||||
String key = f.getName();// 获取字段名
|
||||
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
|
||||
Object value = getFieldValue(bean, key);
|
||||
if (!StringUtil.isEmpty(value)) {
|
||||
setFieldValue(bean, key, value.toString().trim());
|
||||
if (key.endsWith("Time")) {// 日期开始或结束的字段
|
||||
if (col2col.containsKey(key)) {
|
||||
value = sdf.parse(value.toString().trim()).getTime() / 1000;
|
||||
if (col2col.get(key).get("start") != null) {
|
||||
whereFoundTime.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("start")).toLowerCase()
|
||||
+ ">=" + value);
|
||||
} else {
|
||||
whereFoundTime.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("end")).toLowerCase() + "<"
|
||||
+ value);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (key.toLowerCase().startsWith("search")) {
|
||||
key = key.replace("search", "");
|
||||
key = key.substring(0, 1).toLowerCase() + key.substring(1);
|
||||
}
|
||||
// clickhouse写法
|
||||
String type = filedsType.get(key).trim();
|
||||
if (type.equals("java.lang.String")) {
|
||||
String field = filedAndColumnMap.get(key).toLowerCase();
|
||||
if (field.equals("url")) {
|
||||
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
|
||||
} else {
|
||||
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
|
||||
}
|
||||
} else if (type.equals("java.lang.Integer") || type.equals("int")
|
||||
|| type.equals("java.lang.Long") || type.equals("long")) {
|
||||
whereSB.append(" and " + filedAndColumnMap.get(key).toLowerCase() + "="
|
||||
+ value.toString().trim());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
|
||||
StringBuffer foundTimeSql = new StringBuffer();
|
||||
foundTimeSql.append("select distinct found_time from " + tableName + " where ");
|
||||
if (whereSB.length() == 0) {// 没有其他查询条件只有默认的found_time条件
|
||||
if (whereFoundTime.length() > 0) {
|
||||
int indexOf = whereFoundTime.indexOf("and") + "and".length();
|
||||
countSql.append(whereFoundTime.substring(indexOf));
|
||||
|
||||
foundTimeSql.append(whereFoundTime.substring(indexOf) + orderBy.toLowerCase() + " limit " + startNum
|
||||
+ "," + page.getPageSize());
|
||||
sql.append(" found_time in(" + foundTimeSql + ") ");
|
||||
} else {
|
||||
throw new RuntimeException("从clickhouse的" + tableName + "表查询时,必须要有一个where条件");
|
||||
}
|
||||
} else {
|
||||
int foundIndexOf = whereFoundTime.append(whereSB).indexOf("and") + "and".length();
|
||||
countSql.append(whereFoundTime.substring(foundIndexOf));
|
||||
|
||||
foundTimeSql.append(whereFoundTime.substring(foundIndexOf) + orderBy.toLowerCase() + " limit " + startNum
|
||||
+ "," + page.getPageSize());
|
||||
|
||||
int indexOf = whereSB.indexOf("and") + "and".length();
|
||||
sql.append(whereSB.substring(indexOf) + " and found_time in(" + foundTimeSql + ") ");
|
||||
}
|
||||
sql.append(orderBy.toLowerCase() + " limit " + startNum + "," + page.getPageSize());// clickhouse的分页与mysql相同
|
||||
searchFromDataCenter(page, bean, sql, countSql);
|
||||
}
|
||||
|
||||
/**
|
||||
* 从hive中查询数据
|
||||
*
|
||||
* @param page
|
||||
* 里面含有pagesize和pageno,order by
|
||||
* @param bean
|
||||
* 日志类对象(从DfLogSearchDao.xml中获取对应的map,类名+Map),用来获取各个属性对应的数据库字段名
|
||||
* @param tableName
|
||||
* 表名
|
||||
* @param className
|
||||
* 类名
|
||||
* @param orderBy
|
||||
* orderby条件
|
||||
* @throws Exception
|
||||
*/
|
||||
private <T> void getDataFromHive(Page<T> page, Object bean, String tableName, String className, String orderBy)
|
||||
throws Exception {
|
||||
tableName = tableName.toLowerCase();
|
||||
String showColmun = getFiledsSql(className, page.getFields());
|
||||
StringBuffer sql = new StringBuffer();
|
||||
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
|
||||
if (null == showColmun || showColmun.equals("")) {
|
||||
for (String key : filedAndColumnMap.keySet()) {
|
||||
if (!filedAndColumnMap.get(key).toLowerCase().equals("id")) {
|
||||
sql.append(filedAndColumnMap.get(key) + ",");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sql.append(showColmun);
|
||||
}
|
||||
String sqlTrim= sql.toString().trim();
|
||||
if (sqlTrim.endsWith(",")) {
|
||||
sqlTrim = sqlTrim.substring(0, sqlTrim.length() - 1);
|
||||
}
|
||||
sql.setLength(0);
|
||||
sql.append(" select " + sqlTrim.toLowerCase() + " from (select " + sqlTrim.toLowerCase()
|
||||
+ ",row_number() over(partition by found_time_partition " + orderBy + ") as row_num from "
|
||||
+ tableName.toLowerCase() + " ");
|
||||
StringBuffer countSql = new StringBuffer();
|
||||
countSql.append("select count(1) from " + tableName + " ");
|
||||
|
||||
StringBuffer whereSB = new StringBuffer();
|
||||
if (!StringUtil.isEmpty(bean)) {
|
||||
Class<?> clazz = bean.getClass();
|
||||
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
|
||||
// 获取所有的字段包括public,private,protected,private
|
||||
// Field[] fields = bean.getClass().getDeclaredFields();
|
||||
Field[] fields = clazz.getDeclaredFields();
|
||||
Long foundTimePartStart = null;
|
||||
Long foundTimePartEnd = null;
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
|
||||
Field f = fields[i];
|
||||
String key = f.getName();// 获取字段名
|
||||
String typeName = f.getType().getName();
|
||||
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
|
||||
Object value = getFieldValue(bean, key);
|
||||
if (!StringUtil.isEmpty(value)) {
|
||||
setFieldValue(bean, key, value.toString().trim());
|
||||
if (key.endsWith("Time")) {// 日期开始或结束的字段
|
||||
if (col2col.containsKey(key)) {
|
||||
Long partition = Long.parseLong(sdf2.format(sdf.parse(value.toString().trim())));
|
||||
value = sdf.parse(value.toString().trim()).getTime() / 1000;
|
||||
if (key.toLowerCase().equals("searchfoundstarttime")) {
|
||||
foundTimePartStart = partition;
|
||||
}
|
||||
if (key.toLowerCase().equals("searchfoundendtime")) {
|
||||
foundTimePartEnd = partition;
|
||||
}
|
||||
if (col2col.get(key).get("start") != null) {
|
||||
// sql.append(" and " +
|
||||
// filedAndColumnMap.get(col2col.get(key).get("start"))
|
||||
// + ">=to_date('" +
|
||||
// value.toString().trim()
|
||||
// + "','yyyy-mm-dd HH24:mi:ss')");
|
||||
whereSB.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("start")).toLowerCase()
|
||||
+ ">=" + value);
|
||||
} else {
|
||||
// sql.append(" and " +
|
||||
// filedAndColumnMap.get(col2col.get(key).get("end"))
|
||||
// + "<=to_date('" +
|
||||
// value.toString().trim()
|
||||
// + "','yyyy-mm-dd HH24:mi:ss')");
|
||||
whereSB.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("end")).toLowerCase() + "<"
|
||||
+ value);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (key.toLowerCase().startsWith("search")) {
|
||||
key = key.replace("search", "");
|
||||
key = key.substring(0, 1).toLowerCase() + key.substring(1);
|
||||
}
|
||||
|
||||
if (typeName.equals("java.lang.String")) {
|
||||
String field = filedAndColumnMap.get(key);
|
||||
if (field.equals("url")) {
|
||||
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
|
||||
} else {
|
||||
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
|
||||
}
|
||||
} else if (typeName.equals("java.lang.Integer") || typeName.equals("int")) {
|
||||
whereSB.append(
|
||||
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim());
|
||||
|
||||
} else if (typeName.equals("java.lang.Long") || typeName.equals("long")) {
|
||||
whereSB.append(
|
||||
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim() + "L");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
if (null != foundTimePartStart) {
|
||||
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
|
||||
whereSB.append(" and found_time_partition>=" + foundTimePartStart);
|
||||
}
|
||||
if (null != foundTimePartEnd) {
|
||||
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
|
||||
whereSB.append(" and found_time_partition<=" + foundTimePartEnd);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
if (whereSB.length() > 0) {
|
||||
int indexOf = whereSB.indexOf("and") + "and".length();
|
||||
sql.append(" where " + whereSB.substring(indexOf));
|
||||
countSql.append(" where " + whereSB.substring(indexOf));
|
||||
|
||||
}
|
||||
Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
|
||||
Integer endNum = startNum - 1 + page.getPageSize();
|
||||
sql.append(" ) t where row_Num between " + startNum + " and " + endNum);
|
||||
searchFromDataCenter(page, bean, sql, countSql);
|
||||
}
|
||||
|
||||
/**
|
||||
* 执行sql
|
||||
*
|
||||
* @param page
|
||||
* @param bean
|
||||
* @param selSql
|
||||
* @param countSql
|
||||
* @throws Exception
|
||||
*/
|
||||
private <T> void searchFromDataCenter(Page<T> page, Object bean, StringBuffer selSql, StringBuffer countSql)
|
||||
throws Exception {
|
||||
new LogJDBCByDruid().getTableData(page, selSql.toString(), bean.getClass());
|
||||
if (Constants.ISOPENLOGCOUNTANDLAST) {
|
||||
if (page.getList() != null && page.getList().size() > 0) {
|
||||
new LogJDBCByDruid().getCount(page, countSql.toString().toLowerCase());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* 利用反射获取class中各个属性的数据类型,key是属性名称,value是数据类型
|
||||
*
|
||||
* @param obj
|
||||
* @return
|
||||
*/
|
||||
private static Map<String, String> getFiledsType(Object obj) {
|
||||
Field[] fields = obj.getClass().getSuperclass().getDeclaredFields();
|
||||
Field[] superfields = obj.getClass().getDeclaredFields();
|
||||
Map<String, String> infoMap = new HashMap<String, String>();
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
infoMap.put(fields[i].getName(), fields[i].getType().toString().replace("class", ""));
|
||||
}
|
||||
for (int i = 0; i < superfields.length; i++) {
|
||||
infoMap.put(superfields[i].getName(), superfields[i].getType().toString().replace("class", ""));
|
||||
}
|
||||
return infoMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* 将fileds中的字段根据DfLogSearchDao.xml中对应的resultMap转换为数据库中的字段
|
||||
*
|
||||
* @param mapName
|
||||
* @param fileds与界面商定好的是传日志类中的对象名(界面没有表结构不知道对象属性对应的数据库字段名称是什么),不是数据库中的字段名
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
private static String getFiledsSql(String mapName, String fileds) throws Exception {
|
||||
if (!StringUtil.isBlank(fileds)) {
|
||||
String[] fieldsColoumn = null;
|
||||
// 所有字段名
|
||||
List<String> columnList = new ArrayList<String>();
|
||||
// 所有属性名
|
||||
List<String> propertyList = new ArrayList<String>();
|
||||
// 属性名称为key,字段名称为value
|
||||
Map<String, String> columnMap = new HashMap<String, String>();
|
||||
// 解析Fileds的字段/属性名称
|
||||
fieldsColoumn = fileds.split(",");
|
||||
|
||||
// 从resultMap中获取字段名称和属性名称
|
||||
if (fieldsColoumn != null) {
|
||||
SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class);
|
||||
ResultMap map = sqlSessionFactory.getConfiguration().getResultMap(mapName + "Map");
|
||||
List<ResultMapping> mapping = map.getResultMappings();
|
||||
for (ResultMapping mapp : mapping) {
|
||||
columnList.add(mapp.getColumn().toLowerCase());
|
||||
propertyList.add(mapp.getProperty());
|
||||
columnMap.put(mapp.getProperty(), mapp.getColumn());
|
||||
}
|
||||
}
|
||||
if (fieldsColoumn != null) {
|
||||
fileds = "";
|
||||
for (String column : fieldsColoumn) {
|
||||
if (!StringUtil.isBlank(column)) {
|
||||
column = column.trim();
|
||||
if (columnList.contains(column)) {
|
||||
fileds += "," + column;
|
||||
} else if (propertyList.contains(column)) {
|
||||
fileds += "," + columnMap.get(column).toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!StringUtil.isBlank(fileds)) {
|
||||
fileds = fileds.substring(1);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
return fileds;
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据class从DfLogSearchDao.xml中获取对应的resultMap里column与property的关系,key是property
|
||||
*
|
||||
* @param clazz
|
||||
* @return
|
||||
*/
|
||||
private static Map<String, String> getFiledAndColumnMap(Class<?> clazz) {
|
||||
Map<String, String> map = new HashMap<String, String>();
|
||||
SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class);
|
||||
ResultMap resultMap = sqlSessionFactory.getConfiguration().getResultMap(clazz.getSimpleName() + "Map");
|
||||
List<ResultMapping> mapping = resultMap.getResultMappings();
|
||||
for (ResultMapping mapp : mapping) {
|
||||
map.put(mapp.getProperty(), mapp.getColumn().toLowerCase());
|
||||
}
|
||||
return map;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* 利用反射通过get方法获取bean中字段fieldName的值
|
||||
*
|
||||
* @param bean
|
||||
* @param fieldName
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
private static Object getFieldValue(Object bean, String fieldName) throws Exception {
|
||||
StringBuffer result = new StringBuffer();
|
||||
String methodName = result.append("get").append(fieldName.substring(0, 1).toUpperCase())
|
||||
.append(fieldName.substring(1)).toString();
|
||||
|
||||
Object rObject = null;
|
||||
Method method = null;
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
Class[] classArr = new Class[0];
|
||||
method = bean.getClass().getMethod(methodName, classArr);
|
||||
rObject = method.invoke(bean, new Object[0]);
|
||||
|
||||
return rObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* 利用反射调用bean.set方法将value设置到字段
|
||||
*
|
||||
* @param bean
|
||||
* @param fieldName
|
||||
* @param value
|
||||
* @throws Exception
|
||||
*/
|
||||
private static void setFieldValue(Object bean, String fieldName, Object value) throws Exception {
|
||||
StringBuffer result = new StringBuffer();
|
||||
String methodName = result.append("set").append(fieldName.substring(0, 1).toUpperCase())
|
||||
.append(fieldName.substring(1)).toString();
|
||||
/**
|
||||
* 利用发射调用bean.set方法将value设置到字段
|
||||
*/
|
||||
Class[] classArr = new Class[1];
|
||||
classArr[0] = "java.lang.String".getClass();
|
||||
Method method = bean.getClass().getMethod(methodName, classArr);
|
||||
method.invoke(bean, value);
|
||||
}
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user