为日志添加clickhouse数据源及添加开关切换

This commit is contained in:
RenKaiGe-Office
2018-09-04 09:24:22 +08:00
parent 4ec5282ebd
commit 7c8e32de8b
12 changed files with 197 additions and 137 deletions

View File

@@ -1,11 +1,9 @@
package com.nis.util;
import java.io.File;
import java.util.Map;
import java.util.Properties;
public final class Configurations {
private static Properties prop = new Properties();
@@ -13,10 +11,11 @@ public final class Configurations {
try {
prop.load(Configurations.class.getResourceAsStream("/nis.properties"));
prop.load(Configurations.class.getResourceAsStream("/applicationLog-hive.properties"));
prop.load(Configurations.class.getResourceAsStream("/applicationLog-clickhouse.properties"));
prop.load(Configurations.class.getResourceAsStream("/applicationConfig-maatOracle.properties"));
prop.load(Configurations.class.getResourceAsStream("/applicationConfig-rule.properties"));
prop.load(Configurations.class.getResourceAsStream("/fastdfs-client.properties"));
} catch (Exception e) {
prop = null;
System.err.println("未知nis.properties,请确定文件是否存在!");
@@ -24,36 +23,35 @@ public final class Configurations {
}
public static String getStringProperty(String key, String defaultValue) {
if (prop==null||StringUtil.isBlank(prop.getProperty(key))) {
if (prop == null || StringUtil.isBlank(prop.getProperty(key))) {
return defaultValue;
}
return prop.getProperty(key).trim();
}
public static int getIntProperty(String key, int defaultValue) {
if (prop==null||StringUtil.isBlank(prop.getProperty(key))) {
if (prop == null || StringUtil.isBlank(prop.getProperty(key))) {
return defaultValue;
}
return Integer.parseInt(prop.getProperty(key).trim());
}
public static long getLongProperty(String key, long defaultValue) {
if (prop==null||StringUtil.isBlank(prop.getProperty(key))) {
if (prop == null || StringUtil.isBlank(prop.getProperty(key))) {
return defaultValue;
}
return Long.parseLong(prop.getProperty(key).trim());
}
public static boolean getBooleanProperty(String key, boolean defaultValue) {
if (prop==null||StringUtil.isBlank(prop.getProperty(key))) {
if (prop == null || StringUtil.isBlank(prop.getProperty(key))) {
return defaultValue;
}
return prop.getProperty(key).toLowerCase().trim().equals("true");
}
public static String getFileDirPathProperty(String key,
String defaultValue) {
if (prop==null||StringUtil.isBlank(prop.getProperty(key))) {
public static String getFileDirPathProperty(String key, String defaultValue) {
if (prop == null || StringUtil.isBlank(prop.getProperty(key))) {
return defaultValue;
}
String path = prop.getProperty(key).trim();
@@ -70,16 +68,9 @@ public final class Configurations {
}
return true;
}
public static Map getProp() {
return prop;
}
}

View File

@@ -1,6 +1,5 @@
package com.nis.util;
import java.text.SimpleDateFormat;
import java.util.Date;
public final class Constants {
@@ -187,7 +186,15 @@ public final class Constants {
/**
* 数据中心数据库名称,程序中每次查询时使用的数据库名称 use HIVEDBNAME
*/
public static final String HIVEDBNAME = Configurations.getStringProperty("jdbc.hive.Name", "xa_dfbhit_hive");
public static final String HIVEDBNAME = Configurations.getStringProperty("jdbc.hive.DBName", "maat");
/**
* clickhouse数据库名称
*/
public static final String CLICKHOUSEDBNAME = Configurations.getStringProperty("jdbc.clickHouse.DBName", "k18_ods");
/**
* 日志查询是否使用clickhouse,否则使用hive
*/
public static final Boolean ISUSECLICKHOUSE = Configurations.getBooleanProperty("isUseClickHouse", true);
public static final String DIGEST_GEN_TOOL_PATH = Configurations.getStringProperty("digest.gen.tool.path", "maat-redis/digest_gen");

View File

@@ -24,6 +24,7 @@ import org.slf4j.LoggerFactory;
import com.alibaba.druid.pool.DruidDataSource;
import com.nis.domain.Page;
import com.nis.web.service.SpringContextHolder;
/**
*
* <p>Title: HiveJDBCByDruid</p>
@@ -42,8 +43,11 @@ public class LogJDBCByDruid {
public static Connection getConnection() throws SQLException {
if (datasource == null) {
datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid");
// datasource = (DruidDataSource) SpringContextHolder.getBean("ClickHouseDataSourceByDruid");
if (Constants.ISUSECLICKHOUSE) {
datasource = (DruidDataSource) SpringContextHolder.getBean("ClickHouseDataSourceByDruid");
} else {
datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid");
}
}
return datasource.getConnection();
}
@@ -61,8 +65,8 @@ public class LogJDBCByDruid {
* @return
* @throws Exception
*/
public Map<String, List<Object>> tableMapping(Page<?> page, String redisKey, String sql, Class<?> entityClass, Object... obj)
throws Exception {
public Map<String, List<Object>> tableMapping(Page<?> page, String redisKey, String sql, Class<?> entityClass,
Object... obj) throws Exception {
Map<String, List<Object>> mapList = new HashMap<String, List<Object>>();
try {
Map<String, String> filedAndColumnMap = getColumn2FiledMap(entityClass);
@@ -73,11 +77,11 @@ public class LogJDBCByDruid {
conn = getConnection();
logger.info("连接数据中心日志库成功--------------------------");
st = conn.createStatement();
logger.info("开始执行日志查询语句sql={}", sql);
rs = st.executeQuery(sql);
logger.info("执行查询语句成功sql={}", sql);
ResultSetMetaData metaData = rs.getMetaData();
while (rs.next()) {
Map<String,Object> map = new HashMap<String,Object>();
Map<String, Object> map = new HashMap<String, Object>();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
Object value = rs.getObject(i);
String filedName = filedAndColumnMap.get(metaData.getColumnName(i).toString().toLowerCase());
@@ -104,34 +108,38 @@ public class LogJDBCByDruid {
if (null == listString || listString.size() == 0 || null == listObject || listObject.size() == 0) {
return null;
} else {
//暂时没有往缓存写的操作
// if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
// new SaveRedisListThread(redisKey, listString, Constants.HIVE_EXPIRE).start();
// }
// 暂时没有往缓存写的操作
// if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
// new SaveRedisListThread(redisKey, listString, Constants.HIVE_EXPIRE).start();
// }
}
// sublist包前不包后,0-30实际获取的是0-29的数据
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
Integer endNum = startNum - 1 + page.getPageSize() + 1;
if (listString.size() >= startNum) {
if (listString.size() >= endNum) {
mapList.put("str", listString.subList(startNum, endNum));
} else {
mapList.put("str", listString.subList(startNum, listString.size()));
}
if (Constants.ISUSECLICKHOUSE) {// sql查询时已经分页了
mapList.put("str", listString);
mapList.put("obj", listObject);
} else {
mapList.put("str", new ArrayList<Object>());
}
if (listObject.size() >= startNum) {
if (listObject.size() >= endNum) {
mapList.put("obj", listObject.subList(startNum, endNum));
} else {
mapList.put("obj", listObject.subList(startNum, listObject.size()));
}
} else {
mapList.put("obj", new ArrayList<Object>());
}
// sublist包前不包后,0-30实际获取的是0-29的数据
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
Integer endNum = startNum - 1 + page.getPageSize() + 1;
if (listString.size() >= startNum) {
if (listString.size() >= endNum) {
mapList.put("str", listString.subList(startNum, endNum));
} else {
mapList.put("str", listString.subList(startNum, listString.size()));
}
} else {
mapList.put("str", new ArrayList<Object>());
}
if (listObject.size() >= startNum) {
if (listObject.size() >= endNum) {
mapList.put("obj", listObject.subList(startNum, endNum));
} else {
mapList.put("obj", listObject.subList(startNum, listObject.size()));
}
} else {
mapList.put("obj", new ArrayList<Object>());
}
}
} finally {
closeConn();
}
@@ -221,8 +229,6 @@ public class LogJDBCByDruid {
return obj;
}
public static Map<String, String> getColumn2FiledMap(Class<?> clazz) {
Map<String, String> map = new HashMap<String, String>();
SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class);

View File

@@ -17,7 +17,6 @@ import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.nis.web.service.AuditLogThread;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.log4j.Logger;
import org.springframework.http.HttpStatus;
@@ -30,8 +29,10 @@ import com.nis.restful.RestBusinessCode;
import com.nis.restful.RestConstants;
import com.nis.restful.RestResult;
import com.nis.restful.RestServiceException;
import com.nis.util.Configurations;
import com.nis.util.Constants;
import com.nis.util.DateUtils;
import com.nis.web.service.AuditLogThread;
import com.nis.web.service.ServicesRequestLogService;
/**
@@ -44,6 +45,7 @@ import com.nis.web.service.ServicesRequestLogService;
public class BaseRestController {
protected final Logger logger = Logger.getLogger(this.getClass());
private SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmssSSS");
/**
*
* @Title: serviceResponse
@@ -64,7 +66,7 @@ public class BaseRestController {
*/
public Map serviceResponse(AuditLogThread thread, long time, HttpServletRequest request,
HttpServletResponse response, String msg) {
HttpServletResponse response, String msg) {
RestResult restResult = new RestResult();
String requestMethod = request.getMethod();
if (requestMethod.equals(RequestMethod.GET.name())) {
@@ -110,7 +112,7 @@ public class BaseRestController {
* @version V1.0
*/
public Map serviceResponse(AuditLogThread thread, long time, HttpServletRequest request,
HttpServletResponse response, String msg, Object data) {
HttpServletResponse response, String msg, Object data) {
RestResult restResult = new RestResult();
String requestMethod = request.getMethod();
if (requestMethod.equals(RequestMethod.GET.name())) {
@@ -135,10 +137,7 @@ public class BaseRestController {
new Thread(thread).start();
return convert(restResult);
}
/**
* 不将日志插入到数据库中,只返回到前台
* @param time
@@ -148,8 +147,8 @@ public class BaseRestController {
* @param data
* @return
*/
public Map testServiceResponse(long time, HttpServletRequest request,
HttpServletResponse response, String msg, Object data) {
public Map testServiceResponse(long time, HttpServletRequest request, HttpServletResponse response, String msg,
Object data) {
RestResult restResult = new RestResult();
String requestMethod = request.getMethod();
if (requestMethod.equals(RequestMethod.GET.name())) {
@@ -192,7 +191,7 @@ public class BaseRestController {
* @version V1.0
*/
public Map compileServiceResponse(AuditLogThread thread, long time, HttpServletRequest request,
HttpServletResponse response, String msg, Object data) {
HttpServletResponse response, String msg, Object data) {
RestResult restResult = new RestResult();
String requestMethod = request.getMethod();
if (requestMethod.equals(RequestMethod.GET.name())) {
@@ -219,18 +218,18 @@ public class BaseRestController {
return convert(restResult);
}
/**
* 日志结果响应格式规范
* @param auditLogThread
* @param executedTime
* @param request
* @param msg
* @param data
* @param logSource
* @return
*/
/**
* 日志结果响应格式规范
* @param auditLogThread
* @param executedTime
* @param request
* @param msg
* @param data
* @param logSource
* @return
*/
public Map serviceLogResponse(AuditLogThread auditLogThread, long executedTime, HttpServletRequest request,
String msg, Object data, Integer logSource) {
String msg, Object data, Integer logSource) {
RestResult restResult = new RestResult();
String requestMethod = request.getMethod();
if (requestMethod.equals(RequestMethod.GET.name())) {
@@ -251,8 +250,8 @@ public class BaseRestController {
restResult.setMsg(msg);
restResult.setLogSource(logSource);
restResult.setTraceCode(auditLogThread.getTraceCode());
auditLogThread.setConsumerTime(executedTime);
auditLogThread.setBusinessCode(restResult.getBusinessCode().getValue());
auditLogThread.setConsumerTime(executedTime);
auditLogThread.setBusinessCode(restResult.getBusinessCode().getValue());
new Thread(auditLogThread).start();
return convert(restResult);
}
@@ -359,19 +358,19 @@ public class BaseRestController {
}
}
protected AuditLogThread saveRequestLog(ServicesRequestLogService service, int opAction,
HttpServletRequest request, Object data) {
protected AuditLogThread saveRequestLog(ServicesRequestLogService service, int opAction, HttpServletRequest request,
Object data) {
logger.debug("SaveRequestLogThread初始化开始----" + System.currentTimeMillis());
AuditLogThread thread = AuditLogThread.getNewSaveRequestLogThread(request);
thread.setService(service);
thread.setOpAction(opAction);
thread.setTraceCode(sdf.format(new Date())+(Math.round((Math.random()*9+1)*10000)+""));
thread.setTraceCode(sdf.format(new Date()) + (Math.round((Math.random() * 9 + 1) * 10000) + ""));
if (data != null && ConfigCommonSource.class.isAssignableFrom(data.getClass())) {
ConfigCommonSource source = (ConfigCommonSource) data;
thread.setOperator(source.getOperator());
thread.setVersion(source.getVersion());
thread.setOpTime(source.getOpTime());
}else{
} else {
thread.setOperator("admin");
thread.setOpTime(new Date());
}
@@ -381,13 +380,13 @@ public class BaseRestController {
}
// 文件上传用
protected AuditLogThread saveRequestLog(ServicesRequestLogService service, int opAction,
HttpServletRequest request, Object data, Object fileInfo) {
protected AuditLogThread saveRequestLog(ServicesRequestLogService service, int opAction, HttpServletRequest request,
Object data, Object fileInfo) {
AuditLogThread thread = AuditLogThread.getNewSaveRequestLogThread(request);
thread.setService(service);
// thread.setContent(fileInfo);
// thread.setContent(fileInfo);
thread.setOpAction(opAction);
thread.setTraceCode(sdf.format(new Date())+(Math.round((Math.random()*9+1)*10000)+""));
thread.setTraceCode(sdf.format(new Date()) + (Math.round((Math.random() * 9 + 1) * 10000) + ""));
if (data != null && ConfigCommonSource.class.isAssignableFrom(data.getClass())) {
System.out.println(data.getClass().getSimpleName());
ConfigCommonSource source = (ConfigCommonSource) data;
@@ -398,4 +397,12 @@ public class BaseRestController {
return thread;
}
protected String getTableName(String key, String defaultTableName) {
if (Constants.ISUSECLICKHOUSE) {
key = key.replace("HiveTable", "ClickHouseTable");
}
return Configurations.getStringProperty(key, defaultTableName);
}
}

View File

@@ -74,7 +74,7 @@ public class LogController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, dkBehaviorLog,
Configurations.getStringProperty(DkBehaviorLog.class.getSimpleName() + "HiveTable", "DK_BEHAVIOR_LOG"),
getTableName(DkBehaviorLog.class.getSimpleName() + "HiveTable", "DK_BEHAVIOR_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, DkBehaviorLog.class, "foundTime",
"recvTime");
@@ -127,7 +127,7 @@ public class LogController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, pxyHttpLog,
Configurations.getStringProperty(PxyHttpLog.class.getSimpleName() + "HiveTable", "PXY_HTTP_LOG"),
getTableName(PxyHttpLog.class.getSimpleName() + "HiveTable", "PXY_HTTP_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, PxyHttpLog.class, "foundTime",
"recvTime");

View File

@@ -83,7 +83,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, mmAvIpLog,
Configurations.getStringProperty(MmAvIpLog.class.getSimpleName() + "HiveTable", "MM_AV_IP_LOG"),
getTableName(MmAvIpLog.class.getSimpleName() + "HiveTable", "MM_AV_IP_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmAvIpLog.class, "foundTime",
"recvTime");
@@ -136,7 +136,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, mmAvUrlLog,
Configurations.getStringProperty(MmAvUrlLog.class.getSimpleName() + "HiveTable", "MM_AV_URL_LOG"),
getTableName(MmAvUrlLog.class.getSimpleName() + "HiveTable", "MM_AV_URL_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmAvUrlLog.class, "foundTime",
"recvTime");
@@ -189,7 +189,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, mmPicIpLog,
Configurations.getStringProperty(MmPicIpLog.class.getSimpleName() + "HiveTable", "MM_PIC_IP_LOG"),
getTableName(MmPicIpLog.class.getSimpleName() + "HiveTable", "MM_PIC_IP_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPicIpLog.class, "foundTime",
"recvTime");
@@ -242,7 +242,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, mmPicUrlLog,
Configurations.getStringProperty(MmPicUrlLog.class.getSimpleName() + "HiveTable", "MM_PIC_URL_LOG"),
getTableName(MmPicUrlLog.class.getSimpleName() + "HiveTable", "MM_PIC_URL_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPicUrlLog.class, "foundTime",
"recvTime");
@@ -296,7 +296,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, mmVoipIpLog,
Configurations.getStringProperty(MmVoipIpLog.class.getSimpleName() + "HiveTable", "MM_VOIP_IP_LOG"),
getTableName(MmVoipIpLog.class.getSimpleName() + "HiveTable", "MM_VOIP_IP_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmVoipIpLog.class, "foundTime",
"recvTime");
@@ -349,7 +349,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, mmVoipLog,
Configurations.getStringProperty(MmVoipAccountLog.class.getSimpleName() + "HiveTable", "MM_VOIP_ACCOUNT_LOG"),
getTableName(MmVoipAccountLog.class.getSimpleName() + "HiveTable", "MM_VOIP_ACCOUNT_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmVoipAccountLog.class, "foundTime",
"recvTime");
@@ -403,7 +403,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, mmSampleAudioLog,
Configurations.getStringProperty(MmSampleAudioLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_AUDIO_LOG"),
getTableName(MmSampleAudioLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_AUDIO_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleAudioLog.class, "foundTime",
"recvTime");
@@ -456,7 +456,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, mmSampleVideoLog,
Configurations.getStringProperty(MmSampleVideoLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VIDEO_LOG"),
getTableName(MmSampleVideoLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VIDEO_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleVideoLog.class, "foundTime",
"recvTime");
@@ -509,7 +509,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, mmPornAudioLevelLog,
Configurations.getStringProperty(MmPornAudioLevelLog.class.getSimpleName() + "HiveTable", "MM_PORN_AUDIO_LEVEL_LOG"),
getTableName(MmPornAudioLevelLog.class.getSimpleName() + "HiveTable", "MM_PORN_AUDIO_LEVEL_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPornAudioLevelLog.class, "foundTime",
"recvTime");
@@ -562,7 +562,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, mmPornVideoLevelLog,
Configurations.getStringProperty(MmPornVideoLevelLog.class.getSimpleName() + "HiveTable", "MM_PRON_VIDEO_LOG"),
getTableName(MmPornVideoLevelLog.class.getSimpleName() + "HiveTable", "MM_PRON_VIDEO_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPornVideoLevelLog.class, "foundTime",
"recvTime");
@@ -616,7 +616,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, mmSamplePicLog,
Configurations.getStringProperty(MmSamplePicLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_PIC_LOG"),
getTableName(MmSamplePicLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_PIC_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSamplePicLog.class, "foundTime",
"recvTime");
@@ -669,7 +669,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, mmSampleVoipLog,
Configurations.getStringProperty(MmSampleVoipLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VOIP_LOG"),
getTableName(MmSampleVoipLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VOIP_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleVoipLog.class, "foundTime",
"recvTime");

View File

@@ -84,12 +84,12 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
// ResultSet rs = HiveSqlService.getResultSet(page, ntcIpLog,
// Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
// getTableName(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
// getCol2Col(), orderBy, null);
// Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpLog.class, "foundTime",
// "recvTime");
String sql = HiveSqlService.getSql(page, ntcIpLog,
Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
getTableName(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null,sql , NtcIpLog.class, "foundTime",
"recvTime");
@@ -112,7 +112,7 @@ public class NtcLogSearchController extends BaseRestController {
// ntcIpLogPage
// .setCount(
// HiveSqlService.getHivePageCount(ntcIpLog, null,
// Configurations.getStringProperty(
// getTableName(
// NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
// getCol2Col(), null));
} else {
@@ -155,7 +155,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcHttpLog,
Configurations.getStringProperty(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"),
getTableName(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcHttpLog.class, "foundTime",
"recvTime");
@@ -215,7 +215,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcDnsLog,
Configurations.getStringProperty(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"),
getTableName(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDnsLog.class, "foundTime",
"recvTime");
@@ -275,7 +275,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcMailLog,
Configurations.getStringProperty(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"),
getTableName(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcMailLog.class, "foundTime",
"recvTime");
@@ -335,7 +335,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcSslLog,
Configurations.getStringProperty(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"),
getTableName(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSslLog.class, "foundTime",
"recvTime");
@@ -395,7 +395,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcPptpLog,
Configurations.getStringProperty(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"),
getTableName(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcPptpLog.class, "foundTime",
"recvTime");
@@ -454,7 +454,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcL2tpLog,
Configurations.getStringProperty(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"),
getTableName(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcL2tpLog.class, "foundTime",
"recvTime");
@@ -514,7 +514,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcOpenvpnLog,
Configurations.getStringProperty(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"),
getTableName(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcOpenvpnLog.class, "foundTime",
"recvTime");
@@ -574,7 +574,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcIpsecLog,
Configurations.getStringProperty(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"),
getTableName(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcIpsecLog.class, "foundTime",
"recvTime");
@@ -634,7 +634,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcSshLog,
Configurations.getStringProperty(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"),
getTableName(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSshLog.class, "foundTime",
"recvTime");
@@ -694,7 +694,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcFtpLog,
Configurations.getStringProperty(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"),
getTableName(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcFtpLog.class, "foundTime",
"recvTime");
@@ -752,7 +752,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcAppLog,
Configurations.getStringProperty(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"),
getTableName(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcAppLog.class, "foundTime",
"recvTime");
@@ -810,7 +810,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcDdosLog,
Configurations.getStringProperty(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"),
getTableName(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDdosLog.class, "foundTime",
"recvTime");
@@ -870,7 +870,7 @@ public class NtcLogSearchController extends BaseRestController {
}
String sql = HiveSqlService.getSql(page, ntcP2pLog,
Configurations.getStringProperty(NtcP2pLog.class.getSimpleName() + "HiveTable", "ntc_p2p_log"),
getTableName(NtcP2pLog.class.getSimpleName() + "HiveTable", "ntc_p2p_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcP2pLog.class, "foundTime",
@@ -931,7 +931,7 @@ public class NtcLogSearchController extends BaseRestController {
}
String sql = HiveSqlService.getSql(page, ntcBgpLog,
Configurations.getStringProperty(NtcBgpLog.class.getSimpleName() + "HiveTable", "ntc_bgp_log"),
getTableName(NtcBgpLog.class.getSimpleName() + "HiveTable", "ntc_bgp_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcBgpLog.class, "foundTime",

View File

@@ -4,7 +4,6 @@ import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.ParseException;
import java.text.SimpleDateFormat;
@@ -34,13 +33,6 @@ public class HiveSqlService {
Connection conn = null;
ResultSet rs = null;
Statement st = null;
public static Connection getConnection() throws SQLException {
if (datasource == null) {
datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid");
}
return datasource.getConnection();
}
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd");
@@ -142,13 +134,23 @@ public class HiveSqlService {
}
}
Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
Integer endNum = startNum - 1 + page.getPageSize();
// sql.append(" order by " + orderBy + " limit 10000) t1) t2 where
// row_Num between " + startNum + " and " + endNum);
sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM);
logger.info("获取数据中心日志sql==================={}", sql);
return sql.toString();
if(Constants.ISUSECLICKHOUSE) {
//Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
//Integer endNum = startNum - 1 + page.getPageSize();
sql.append(" order by " + orderBy );
sql.append(" limit " + startNum+","+page.getPageSize());//clickhouse的分页与mysql相同
}else {
// sql.append(" order by " + orderBy + " limit 10000) t1) t2 where
// row_Num between " + startNum + " and " + endNum);
sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM);
logger.info("获取数据中心日志sql==================={}", sql);
}
return sql.toString().toUpperCase();
}
public static Long getHivePageCount(Object bean, String countKey, String tableName,