为日志添加clickhouse数据源及添加开关切换

This commit is contained in:
RenKaiGe-Office
2018-09-04 09:24:22 +08:00
parent 4ec5282ebd
commit 7c8e32de8b
12 changed files with 197 additions and 137 deletions

View File

@@ -1,11 +1,9 @@
package com.nis.util; package com.nis.util;
import java.io.File; import java.io.File;
import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;
public final class Configurations { public final class Configurations {
private static Properties prop = new Properties(); private static Properties prop = new Properties();
@@ -13,6 +11,7 @@ public final class Configurations {
try { try {
prop.load(Configurations.class.getResourceAsStream("/nis.properties")); prop.load(Configurations.class.getResourceAsStream("/nis.properties"));
prop.load(Configurations.class.getResourceAsStream("/applicationLog-hive.properties")); prop.load(Configurations.class.getResourceAsStream("/applicationLog-hive.properties"));
prop.load(Configurations.class.getResourceAsStream("/applicationLog-clickhouse.properties"));
prop.load(Configurations.class.getResourceAsStream("/applicationConfig-maatOracle.properties")); prop.load(Configurations.class.getResourceAsStream("/applicationConfig-maatOracle.properties"));
prop.load(Configurations.class.getResourceAsStream("/applicationConfig-rule.properties")); prop.load(Configurations.class.getResourceAsStream("/applicationConfig-rule.properties"));
prop.load(Configurations.class.getResourceAsStream("/fastdfs-client.properties")); prop.load(Configurations.class.getResourceAsStream("/fastdfs-client.properties"));
@@ -24,36 +23,35 @@ public final class Configurations {
} }
public static String getStringProperty(String key, String defaultValue) { public static String getStringProperty(String key, String defaultValue) {
if (prop==null||StringUtil.isBlank(prop.getProperty(key))) { if (prop == null || StringUtil.isBlank(prop.getProperty(key))) {
return defaultValue; return defaultValue;
} }
return prop.getProperty(key).trim(); return prop.getProperty(key).trim();
} }
public static int getIntProperty(String key, int defaultValue) { public static int getIntProperty(String key, int defaultValue) {
if (prop==null||StringUtil.isBlank(prop.getProperty(key))) { if (prop == null || StringUtil.isBlank(prop.getProperty(key))) {
return defaultValue; return defaultValue;
} }
return Integer.parseInt(prop.getProperty(key).trim()); return Integer.parseInt(prop.getProperty(key).trim());
} }
public static long getLongProperty(String key, long defaultValue) { public static long getLongProperty(String key, long defaultValue) {
if (prop==null||StringUtil.isBlank(prop.getProperty(key))) { if (prop == null || StringUtil.isBlank(prop.getProperty(key))) {
return defaultValue; return defaultValue;
} }
return Long.parseLong(prop.getProperty(key).trim()); return Long.parseLong(prop.getProperty(key).trim());
} }
public static boolean getBooleanProperty(String key, boolean defaultValue) { public static boolean getBooleanProperty(String key, boolean defaultValue) {
if (prop==null||StringUtil.isBlank(prop.getProperty(key))) { if (prop == null || StringUtil.isBlank(prop.getProperty(key))) {
return defaultValue; return defaultValue;
} }
return prop.getProperty(key).toLowerCase().trim().equals("true"); return prop.getProperty(key).toLowerCase().trim().equals("true");
} }
public static String getFileDirPathProperty(String key, public static String getFileDirPathProperty(String key, String defaultValue) {
String defaultValue) { if (prop == null || StringUtil.isBlank(prop.getProperty(key))) {
if (prop==null||StringUtil.isBlank(prop.getProperty(key))) {
return defaultValue; return defaultValue;
} }
String path = prop.getProperty(key).trim(); String path = prop.getProperty(key).trim();
@@ -71,15 +69,8 @@ public final class Configurations {
return true; return true;
} }
public static Map getProp() { public static Map getProp() {
return prop; return prop;
} }
} }

View File

@@ -1,6 +1,5 @@
package com.nis.util; package com.nis.util;
import java.text.SimpleDateFormat;
import java.util.Date; import java.util.Date;
public final class Constants { public final class Constants {
@@ -187,7 +186,15 @@ public final class Constants {
/** /**
* 数据中心数据库名称,程序中每次查询时使用的数据库名称 use HIVEDBNAME * 数据中心数据库名称,程序中每次查询时使用的数据库名称 use HIVEDBNAME
*/ */
public static final String HIVEDBNAME = Configurations.getStringProperty("jdbc.hive.Name", "xa_dfbhit_hive"); public static final String HIVEDBNAME = Configurations.getStringProperty("jdbc.hive.DBName", "maat");
/**
* clickhouse数据库名称
*/
public static final String CLICKHOUSEDBNAME = Configurations.getStringProperty("jdbc.clickHouse.DBName", "k18_ods");
/**
* 日志查询是否使用clickhouse,否则使用hive
*/
public static final Boolean ISUSECLICKHOUSE = Configurations.getBooleanProperty("isUseClickHouse", true);
public static final String DIGEST_GEN_TOOL_PATH = Configurations.getStringProperty("digest.gen.tool.path", "maat-redis/digest_gen"); public static final String DIGEST_GEN_TOOL_PATH = Configurations.getStringProperty("digest.gen.tool.path", "maat-redis/digest_gen");

View File

@@ -24,6 +24,7 @@ import org.slf4j.LoggerFactory;
import com.alibaba.druid.pool.DruidDataSource; import com.alibaba.druid.pool.DruidDataSource;
import com.nis.domain.Page; import com.nis.domain.Page;
import com.nis.web.service.SpringContextHolder; import com.nis.web.service.SpringContextHolder;
/** /**
* *
* <p>Title: HiveJDBCByDruid</p> * <p>Title: HiveJDBCByDruid</p>
@@ -42,8 +43,11 @@ public class LogJDBCByDruid {
public static Connection getConnection() throws SQLException { public static Connection getConnection() throws SQLException {
if (datasource == null) { if (datasource == null) {
if (Constants.ISUSECLICKHOUSE) {
datasource = (DruidDataSource) SpringContextHolder.getBean("ClickHouseDataSourceByDruid");
} else {
datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid"); datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid");
// datasource = (DruidDataSource) SpringContextHolder.getBean("ClickHouseDataSourceByDruid"); }
} }
return datasource.getConnection(); return datasource.getConnection();
} }
@@ -61,8 +65,8 @@ public class LogJDBCByDruid {
* @return * @return
* @throws Exception * @throws Exception
*/ */
public Map<String, List<Object>> tableMapping(Page<?> page, String redisKey, String sql, Class<?> entityClass, Object... obj) public Map<String, List<Object>> tableMapping(Page<?> page, String redisKey, String sql, Class<?> entityClass,
throws Exception { Object... obj) throws Exception {
Map<String, List<Object>> mapList = new HashMap<String, List<Object>>(); Map<String, List<Object>> mapList = new HashMap<String, List<Object>>();
try { try {
Map<String, String> filedAndColumnMap = getColumn2FiledMap(entityClass); Map<String, String> filedAndColumnMap = getColumn2FiledMap(entityClass);
@@ -73,11 +77,11 @@ public class LogJDBCByDruid {
conn = getConnection(); conn = getConnection();
logger.info("连接数据中心日志库成功--------------------------"); logger.info("连接数据中心日志库成功--------------------------");
st = conn.createStatement(); st = conn.createStatement();
logger.info("开始执行日志查询语句sql={}", sql);
rs = st.executeQuery(sql); rs = st.executeQuery(sql);
logger.info("执行查询语句成功sql={}", sql);
ResultSetMetaData metaData = rs.getMetaData(); ResultSetMetaData metaData = rs.getMetaData();
while (rs.next()) { while (rs.next()) {
Map<String,Object> map = new HashMap<String,Object>(); Map<String, Object> map = new HashMap<String, Object>();
for (int i = 1; i <= metaData.getColumnCount(); i++) { for (int i = 1; i <= metaData.getColumnCount(); i++) {
Object value = rs.getObject(i); Object value = rs.getObject(i);
String filedName = filedAndColumnMap.get(metaData.getColumnName(i).toString().toLowerCase()); String filedName = filedAndColumnMap.get(metaData.getColumnName(i).toString().toLowerCase());
@@ -104,11 +108,15 @@ public class LogJDBCByDruid {
if (null == listString || listString.size() == 0 || null == listObject || listObject.size() == 0) { if (null == listString || listString.size() == 0 || null == listObject || listObject.size() == 0) {
return null; return null;
} else { } else {
//暂时没有往缓存写的操作 // 暂时没有往缓存写的操作
// if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) { // if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
// new SaveRedisListThread(redisKey, listString, Constants.HIVE_EXPIRE).start(); // new SaveRedisListThread(redisKey, listString, Constants.HIVE_EXPIRE).start();
// } // }
} }
if (Constants.ISUSECLICKHOUSE) {// sql查询时已经分页了
mapList.put("str", listString);
mapList.put("obj", listObject);
} else {
// sublist包前不包后,0-30实际获取的是0-29的数据 // sublist包前不包后,0-30实际获取的是0-29的数据
Integer startNum = (page.getPageNo() - 1) * page.getPageSize(); Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
Integer endNum = startNum - 1 + page.getPageSize() + 1; Integer endNum = startNum - 1 + page.getPageSize() + 1;
@@ -131,7 +139,7 @@ public class LogJDBCByDruid {
} else { } else {
mapList.put("obj", new ArrayList<Object>()); mapList.put("obj", new ArrayList<Object>());
} }
}
} finally { } finally {
closeConn(); closeConn();
} }
@@ -221,8 +229,6 @@ public class LogJDBCByDruid {
return obj; return obj;
} }
public static Map<String, String> getColumn2FiledMap(Class<?> clazz) { public static Map<String, String> getColumn2FiledMap(Class<?> clazz) {
Map<String, String> map = new HashMap<String, String>(); Map<String, String> map = new HashMap<String, String>();
SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class); SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class);

View File

@@ -17,7 +17,6 @@ import java.util.Map;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import com.nis.web.service.AuditLogThread;
import org.apache.commons.lang3.StringEscapeUtils; import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.springframework.http.HttpStatus; import org.springframework.http.HttpStatus;
@@ -30,8 +29,10 @@ import com.nis.restful.RestBusinessCode;
import com.nis.restful.RestConstants; import com.nis.restful.RestConstants;
import com.nis.restful.RestResult; import com.nis.restful.RestResult;
import com.nis.restful.RestServiceException; import com.nis.restful.RestServiceException;
import com.nis.util.Configurations;
import com.nis.util.Constants; import com.nis.util.Constants;
import com.nis.util.DateUtils; import com.nis.util.DateUtils;
import com.nis.web.service.AuditLogThread;
import com.nis.web.service.ServicesRequestLogService; import com.nis.web.service.ServicesRequestLogService;
/** /**
@@ -44,6 +45,7 @@ import com.nis.web.service.ServicesRequestLogService;
public class BaseRestController { public class BaseRestController {
protected final Logger logger = Logger.getLogger(this.getClass()); protected final Logger logger = Logger.getLogger(this.getClass());
private SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmssSSS"); private SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmssSSS");
/** /**
* *
* @Title: serviceResponse * @Title: serviceResponse
@@ -136,9 +138,6 @@ public class BaseRestController {
return convert(restResult); return convert(restResult);
} }
/** /**
* 不将日志插入到数据库中,只返回到前台 * 不将日志插入到数据库中,只返回到前台
* @param time * @param time
@@ -148,8 +147,8 @@ public class BaseRestController {
* @param data * @param data
* @return * @return
*/ */
public Map testServiceResponse(long time, HttpServletRequest request, public Map testServiceResponse(long time, HttpServletRequest request, HttpServletResponse response, String msg,
HttpServletResponse response, String msg, Object data) { Object data) {
RestResult restResult = new RestResult(); RestResult restResult = new RestResult();
String requestMethod = request.getMethod(); String requestMethod = request.getMethod();
if (requestMethod.equals(RequestMethod.GET.name())) { if (requestMethod.equals(RequestMethod.GET.name())) {
@@ -359,19 +358,19 @@ public class BaseRestController {
} }
} }
protected AuditLogThread saveRequestLog(ServicesRequestLogService service, int opAction, protected AuditLogThread saveRequestLog(ServicesRequestLogService service, int opAction, HttpServletRequest request,
HttpServletRequest request, Object data) { Object data) {
logger.debug("SaveRequestLogThread初始化开始----" + System.currentTimeMillis()); logger.debug("SaveRequestLogThread初始化开始----" + System.currentTimeMillis());
AuditLogThread thread = AuditLogThread.getNewSaveRequestLogThread(request); AuditLogThread thread = AuditLogThread.getNewSaveRequestLogThread(request);
thread.setService(service); thread.setService(service);
thread.setOpAction(opAction); thread.setOpAction(opAction);
thread.setTraceCode(sdf.format(new Date())+(Math.round((Math.random()*9+1)*10000)+"")); thread.setTraceCode(sdf.format(new Date()) + (Math.round((Math.random() * 9 + 1) * 10000) + ""));
if (data != null && ConfigCommonSource.class.isAssignableFrom(data.getClass())) { if (data != null && ConfigCommonSource.class.isAssignableFrom(data.getClass())) {
ConfigCommonSource source = (ConfigCommonSource) data; ConfigCommonSource source = (ConfigCommonSource) data;
thread.setOperator(source.getOperator()); thread.setOperator(source.getOperator());
thread.setVersion(source.getVersion()); thread.setVersion(source.getVersion());
thread.setOpTime(source.getOpTime()); thread.setOpTime(source.getOpTime());
}else{ } else {
thread.setOperator("admin"); thread.setOperator("admin");
thread.setOpTime(new Date()); thread.setOpTime(new Date());
} }
@@ -381,13 +380,13 @@ public class BaseRestController {
} }
// 文件上传用 // 文件上传用
protected AuditLogThread saveRequestLog(ServicesRequestLogService service, int opAction, protected AuditLogThread saveRequestLog(ServicesRequestLogService service, int opAction, HttpServletRequest request,
HttpServletRequest request, Object data, Object fileInfo) { Object data, Object fileInfo) {
AuditLogThread thread = AuditLogThread.getNewSaveRequestLogThread(request); AuditLogThread thread = AuditLogThread.getNewSaveRequestLogThread(request);
thread.setService(service); thread.setService(service);
// thread.setContent(fileInfo); // thread.setContent(fileInfo);
thread.setOpAction(opAction); thread.setOpAction(opAction);
thread.setTraceCode(sdf.format(new Date())+(Math.round((Math.random()*9+1)*10000)+"")); thread.setTraceCode(sdf.format(new Date()) + (Math.round((Math.random() * 9 + 1) * 10000) + ""));
if (data != null && ConfigCommonSource.class.isAssignableFrom(data.getClass())) { if (data != null && ConfigCommonSource.class.isAssignableFrom(data.getClass())) {
System.out.println(data.getClass().getSimpleName()); System.out.println(data.getClass().getSimpleName());
ConfigCommonSource source = (ConfigCommonSource) data; ConfigCommonSource source = (ConfigCommonSource) data;
@@ -398,4 +397,12 @@ public class BaseRestController {
return thread; return thread;
} }
protected String getTableName(String key, String defaultTableName) {
if (Constants.ISUSECLICKHOUSE) {
key = key.replace("HiveTable", "ClickHouseTable");
}
return Configurations.getStringProperty(key, defaultTableName);
}
} }

View File

@@ -74,7 +74,7 @@ public class LogController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, dkBehaviorLog, String sql = HiveSqlService.getSql(page, dkBehaviorLog,
Configurations.getStringProperty(DkBehaviorLog.class.getSimpleName() + "HiveTable", "DK_BEHAVIOR_LOG"), getTableName(DkBehaviorLog.class.getSimpleName() + "HiveTable", "DK_BEHAVIOR_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, DkBehaviorLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, DkBehaviorLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -127,7 +127,7 @@ public class LogController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, pxyHttpLog, String sql = HiveSqlService.getSql(page, pxyHttpLog,
Configurations.getStringProperty(PxyHttpLog.class.getSimpleName() + "HiveTable", "PXY_HTTP_LOG"), getTableName(PxyHttpLog.class.getSimpleName() + "HiveTable", "PXY_HTTP_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, PxyHttpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, PxyHttpLog.class, "foundTime",
"recvTime"); "recvTime");

View File

@@ -83,7 +83,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, mmAvIpLog, String sql = HiveSqlService.getSql(page, mmAvIpLog,
Configurations.getStringProperty(MmAvIpLog.class.getSimpleName() + "HiveTable", "MM_AV_IP_LOG"), getTableName(MmAvIpLog.class.getSimpleName() + "HiveTable", "MM_AV_IP_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmAvIpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmAvIpLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -136,7 +136,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, mmAvUrlLog, String sql = HiveSqlService.getSql(page, mmAvUrlLog,
Configurations.getStringProperty(MmAvUrlLog.class.getSimpleName() + "HiveTable", "MM_AV_URL_LOG"), getTableName(MmAvUrlLog.class.getSimpleName() + "HiveTable", "MM_AV_URL_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmAvUrlLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmAvUrlLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -189,7 +189,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, mmPicIpLog, String sql = HiveSqlService.getSql(page, mmPicIpLog,
Configurations.getStringProperty(MmPicIpLog.class.getSimpleName() + "HiveTable", "MM_PIC_IP_LOG"), getTableName(MmPicIpLog.class.getSimpleName() + "HiveTable", "MM_PIC_IP_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPicIpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPicIpLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -242,7 +242,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, mmPicUrlLog, String sql = HiveSqlService.getSql(page, mmPicUrlLog,
Configurations.getStringProperty(MmPicUrlLog.class.getSimpleName() + "HiveTable", "MM_PIC_URL_LOG"), getTableName(MmPicUrlLog.class.getSimpleName() + "HiveTable", "MM_PIC_URL_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPicUrlLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPicUrlLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -296,7 +296,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, mmVoipIpLog, String sql = HiveSqlService.getSql(page, mmVoipIpLog,
Configurations.getStringProperty(MmVoipIpLog.class.getSimpleName() + "HiveTable", "MM_VOIP_IP_LOG"), getTableName(MmVoipIpLog.class.getSimpleName() + "HiveTable", "MM_VOIP_IP_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmVoipIpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmVoipIpLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -349,7 +349,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, mmVoipLog, String sql = HiveSqlService.getSql(page, mmVoipLog,
Configurations.getStringProperty(MmVoipAccountLog.class.getSimpleName() + "HiveTable", "MM_VOIP_ACCOUNT_LOG"), getTableName(MmVoipAccountLog.class.getSimpleName() + "HiveTable", "MM_VOIP_ACCOUNT_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmVoipAccountLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmVoipAccountLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -403,7 +403,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, mmSampleAudioLog, String sql = HiveSqlService.getSql(page, mmSampleAudioLog,
Configurations.getStringProperty(MmSampleAudioLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_AUDIO_LOG"), getTableName(MmSampleAudioLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_AUDIO_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleAudioLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleAudioLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -456,7 +456,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, mmSampleVideoLog, String sql = HiveSqlService.getSql(page, mmSampleVideoLog,
Configurations.getStringProperty(MmSampleVideoLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VIDEO_LOG"), getTableName(MmSampleVideoLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VIDEO_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleVideoLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleVideoLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -509,7 +509,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, mmPornAudioLevelLog, String sql = HiveSqlService.getSql(page, mmPornAudioLevelLog,
Configurations.getStringProperty(MmPornAudioLevelLog.class.getSimpleName() + "HiveTable", "MM_PORN_AUDIO_LEVEL_LOG"), getTableName(MmPornAudioLevelLog.class.getSimpleName() + "HiveTable", "MM_PORN_AUDIO_LEVEL_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPornAudioLevelLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPornAudioLevelLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -562,7 +562,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, mmPornVideoLevelLog, String sql = HiveSqlService.getSql(page, mmPornVideoLevelLog,
Configurations.getStringProperty(MmPornVideoLevelLog.class.getSimpleName() + "HiveTable", "MM_PRON_VIDEO_LOG"), getTableName(MmPornVideoLevelLog.class.getSimpleName() + "HiveTable", "MM_PRON_VIDEO_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPornVideoLevelLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPornVideoLevelLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -616,7 +616,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, mmSamplePicLog, String sql = HiveSqlService.getSql(page, mmSamplePicLog,
Configurations.getStringProperty(MmSamplePicLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_PIC_LOG"), getTableName(MmSamplePicLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_PIC_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSamplePicLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSamplePicLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -669,7 +669,7 @@ public class MmLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, mmSampleVoipLog, String sql = HiveSqlService.getSql(page, mmSampleVoipLog,
Configurations.getStringProperty(MmSampleVoipLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VOIP_LOG"), getTableName(MmSampleVoipLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VOIP_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleVoipLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleVoipLog.class, "foundTime",
"recvTime"); "recvTime");

View File

@@ -84,12 +84,12 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
// ResultSet rs = HiveSqlService.getResultSet(page, ntcIpLog, // ResultSet rs = HiveSqlService.getResultSet(page, ntcIpLog,
// Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"), // getTableName(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
// getCol2Col(), orderBy, null); // getCol2Col(), orderBy, null);
// Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpLog.class, "foundTime", // Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpLog.class, "foundTime",
// "recvTime"); // "recvTime");
String sql = HiveSqlService.getSql(page, ntcIpLog, String sql = HiveSqlService.getSql(page, ntcIpLog,
Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"), getTableName(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null,sql , NtcIpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null,sql , NtcIpLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -112,7 +112,7 @@ public class NtcLogSearchController extends BaseRestController {
// ntcIpLogPage // ntcIpLogPage
// .setCount( // .setCount(
// HiveSqlService.getHivePageCount(ntcIpLog, null, // HiveSqlService.getHivePageCount(ntcIpLog, null,
// Configurations.getStringProperty( // getTableName(
// NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"), // NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
// getCol2Col(), null)); // getCol2Col(), null));
} else { } else {
@@ -155,7 +155,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcHttpLog, String sql = HiveSqlService.getSql(page, ntcHttpLog,
Configurations.getStringProperty(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"), getTableName(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcHttpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcHttpLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -215,7 +215,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcDnsLog, String sql = HiveSqlService.getSql(page, ntcDnsLog,
Configurations.getStringProperty(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"), getTableName(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDnsLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDnsLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -275,7 +275,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcMailLog, String sql = HiveSqlService.getSql(page, ntcMailLog,
Configurations.getStringProperty(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"), getTableName(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcMailLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcMailLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -335,7 +335,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcSslLog, String sql = HiveSqlService.getSql(page, ntcSslLog,
Configurations.getStringProperty(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"), getTableName(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSslLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSslLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -395,7 +395,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcPptpLog, String sql = HiveSqlService.getSql(page, ntcPptpLog,
Configurations.getStringProperty(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"), getTableName(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcPptpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcPptpLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -454,7 +454,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcL2tpLog, String sql = HiveSqlService.getSql(page, ntcL2tpLog,
Configurations.getStringProperty(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"), getTableName(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcL2tpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcL2tpLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -514,7 +514,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcOpenvpnLog, String sql = HiveSqlService.getSql(page, ntcOpenvpnLog,
Configurations.getStringProperty(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"), getTableName(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcOpenvpnLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcOpenvpnLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -574,7 +574,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcIpsecLog, String sql = HiveSqlService.getSql(page, ntcIpsecLog,
Configurations.getStringProperty(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"), getTableName(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcIpsecLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcIpsecLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -634,7 +634,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcSshLog, String sql = HiveSqlService.getSql(page, ntcSshLog,
Configurations.getStringProperty(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"), getTableName(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSshLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSshLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -694,7 +694,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcFtpLog, String sql = HiveSqlService.getSql(page, ntcFtpLog,
Configurations.getStringProperty(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"), getTableName(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcFtpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcFtpLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -752,7 +752,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcAppLog, String sql = HiveSqlService.getSql(page, ntcAppLog,
Configurations.getStringProperty(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"), getTableName(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcAppLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcAppLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -810,7 +810,7 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcDdosLog, String sql = HiveSqlService.getSql(page, ntcDdosLog,
Configurations.getStringProperty(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"), getTableName(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDdosLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDdosLog.class, "foundTime",
"recvTime"); "recvTime");
@@ -870,7 +870,7 @@ public class NtcLogSearchController extends BaseRestController {
} }
String sql = HiveSqlService.getSql(page, ntcP2pLog, String sql = HiveSqlService.getSql(page, ntcP2pLog,
Configurations.getStringProperty(NtcP2pLog.class.getSimpleName() + "HiveTable", "ntc_p2p_log"), getTableName(NtcP2pLog.class.getSimpleName() + "HiveTable", "ntc_p2p_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcP2pLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcP2pLog.class, "foundTime",
@@ -931,7 +931,7 @@ public class NtcLogSearchController extends BaseRestController {
} }
String sql = HiveSqlService.getSql(page, ntcBgpLog, String sql = HiveSqlService.getSql(page, ntcBgpLog,
Configurations.getStringProperty(NtcBgpLog.class.getSimpleName() + "HiveTable", "ntc_bgp_log"), getTableName(NtcBgpLog.class.getSimpleName() + "HiveTable", "ntc_bgp_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcBgpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcBgpLog.class, "foundTime",

View File

@@ -4,7 +4,6 @@ import java.lang.reflect.Field;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.sql.Connection; import java.sql.Connection;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement; import java.sql.Statement;
import java.text.ParseException; import java.text.ParseException;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
@@ -34,13 +33,6 @@ public class HiveSqlService {
Connection conn = null; Connection conn = null;
ResultSet rs = null; ResultSet rs = null;
Statement st = null; Statement st = null;
public static Connection getConnection() throws SQLException {
if (datasource == null) {
datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid");
}
return datasource.getConnection();
}
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd"); private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd");
@@ -142,13 +134,23 @@ public class HiveSqlService {
} }
} }
Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
Integer endNum = startNum - 1 + page.getPageSize(); if(Constants.ISUSECLICKHOUSE) {
//Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
//Integer endNum = startNum - 1 + page.getPageSize();
sql.append(" order by " + orderBy );
sql.append(" limit " + startNum+","+page.getPageSize());//clickhouse的分页与mysql相同
}else {
// sql.append(" order by " + orderBy + " limit 10000) t1) t2 where // sql.append(" order by " + orderBy + " limit 10000) t1) t2 where
// row_Num between " + startNum + " and " + endNum); // row_Num between " + startNum + " and " + endNum);
sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM); sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM);
logger.info("获取数据中心日志sql==================={}", sql); logger.info("获取数据中心日志sql==================={}", sql);
return sql.toString(); }
return sql.toString().toUpperCase();
} }
public static Long getHivePageCount(Object bean, String countKey, String tableName, public static Long getHivePageCount(Object bean, String countKey, String tableName,

View File

@@ -104,7 +104,7 @@
<!-- 配置一个连接在池中最小生存的时间,单位是毫秒 --> <!-- 配置一个连接在池中最小生存的时间,单位是毫秒 -->
<property name="minEvictableIdleTimeMillis" value="${druid.log.minEvictableIdleTimeMillis}" /> <property name="minEvictableIdleTimeMillis" value="${druid.log.minEvictableIdleTimeMillis}" />
<!--用来检测连接是否有效的sql要求是一个查询语句。 --> <!--用来检测连接是否有效的sql要求是一个查询语句。 -->
<property name="validationQuery" value="${druid.log.validationQuery}" /> <property name="validationQuery" value="${druid.log.hive.validationQuery}" />
<!--申请连接的时候检测如果空闲时间大于timeBetweenEvictionRunsMillis执行validationQuery检测连接是否有效。 --> <!--申请连接的时候检测如果空闲时间大于timeBetweenEvictionRunsMillis执行validationQuery检测连接是否有效。 -->
<property name="testWhileIdle" value="${druid.log.testWhileIdle}" /> <property name="testWhileIdle" value="${druid.log.testWhileIdle}" />
<!--申请连接时执行validationQuery检测连接是否有效 --> <!--申请连接时执行validationQuery检测连接是否有效 -->
@@ -141,7 +141,7 @@
<!-- 配置一个连接在池中最小生存的时间,单位是毫秒 --> <!-- 配置一个连接在池中最小生存的时间,单位是毫秒 -->
<property name="minEvictableIdleTimeMillis" value="${druid.log.minEvictableIdleTimeMillis}" /> <property name="minEvictableIdleTimeMillis" value="${druid.log.minEvictableIdleTimeMillis}" />
<!--用来检测连接是否有效的sql要求是一个查询语句。 --> <!--用来检测连接是否有效的sql要求是一个查询语句。 -->
<property name="validationQuery" value="${druid.log.validationQuery}" /> <property name="validationQuery" value="${druid.log.clickHouse.validationQuery}" />
<!--申请连接的时候检测如果空闲时间大于timeBetweenEvictionRunsMillis执行validationQuery检测连接是否有效。 --> <!--申请连接的时候检测如果空闲时间大于timeBetweenEvictionRunsMillis执行validationQuery检测连接是否有效。 -->
<property name="testWhileIdle" value="${druid.log.testWhileIdle}" /> <property name="testWhileIdle" value="${druid.log.testWhileIdle}" />
<!--申请连接时执行validationQuery检测连接是否有效 --> <!--申请连接时执行validationQuery检测连接是否有效 -->

View File

@@ -0,0 +1,43 @@
#由于数据中心ClickHouse日志表的表名会变动,所以本系统中将日志的表名提取到配置文件中方便后期修改,key是bean的名称+ClickHouseTable,value是ClickHouseTable的实际表名
NtcIpLogClickHouseTable=TBS_ODS_NTC_IP_LOG
NtcHttpLogClickHouseTable=TBS_ODS_NTC_HTTP_LOG
NtcDnsLogClickHouseTable=TBS_ODS_NTC_DNS_LOG
NtcMailLogClickHouseTable=TBS_ODS_NTC_MAIL_LOG
NtcSslLogClickHouseTable=TBS_ODS_NTC_SSL_LOG
NtcPptpLogClickHouseTable=TBS_ODS_NTC_PPTP_LOG
NtcL2tpLogClickHouseTable=TBS_ODS_NTC_L2TP_LOG
NtcOpenvpnLogClickHouseTable=TBS_ODS_NTC_OPENVPN_LOG
NtcIpsecLogClickHouseTable=TBS_ODS_NTC_IPSEC_LOG
NtcSshLogClickHouseTable=TBS_ODS_NTC_SSH_LOG
NtcFtpLogClickHouseTable=TBS_ODS_NTC_FTP_LOG
NtcAppLogClickHouseTable=TBS_ODS_NTC_APP_LOG
NtcDdosLogClickHouseTable=TBS_ODS_NTC_DDOS_LOG
NtcP2pLogClickHouseTable=TBS_ODS_NTC_P2P_LOG
NtcBgpLogClickHouseTable=TBS_ODS_NTC_BGP_LOG
DkBehaviorLogClickHouseTable=TBS_ODS_DK_BEHAVIOR_LOG
MmAvIpLogClickHouseTable=TBS_ODS_MM_AV_IP_LOG
MmAvUrlLogClickHouseTable=TBS_ODS_MM_AV_URL_LOG
MmPicIpLogClickHouseTable=TBS_ODS_MM_PIC_IP_LOG
MmPicUrlLogClickHouseTable=TBS_ODS_MM_PIC_URL_LOG
MmVoipIpLogClickHouseTable=TBS_ODS_MM_VOIP_IP_LOG
MmVoipAccountLogClickHouseTable=TBS_ODS_MM_VOIP_ACCOUNT_LOG
MmSampleAudioLogClickHouseTable=TBS_ODS_MM_SAMPLE_AUDIO_LOG
MmSampleVideoLogLogClickHouseTable=TBS_ODS_MM_SAMPLE_VIDEO_LOG
MmPornAudioLevelLogClickHouseTable=TBS_ODS_MM_PORN_AUDIO_LEVEL_LOG
MmPornVideoLevelLogClickHouseTable=TBS_ODS_MM_PORN_VIDEO_LEVEL_LOG
MmSamplePicLogClickHouseTable=TBS_ODS_MM_SAMPLE_PIC_LOG
MmSampleVoipLogClickHouseTable=TBS_ODS_MM_SAMPLE_VOIP_LOG
PxyHttpLogClickHouseTable=TBS_ODS_PXY_HTTP_LOG

View File

@@ -94,9 +94,9 @@ druid.log.timeBetweenEvictionRunsMillis=60000
#配置一个连接在池中最小生存的时间,单位是毫秒 #配置一个连接在池中最小生存的时间,单位是毫秒
druid.log.minEvictableIdleTimeMillis=300000 druid.log.minEvictableIdleTimeMillis=300000
#用来检测连接是否有效的sql要求是一个查询语句,下面是hive的 #用来检测连接是否有效的sql要求是一个查询语句,下面是hive的
druid.log.validationQuery=select unix_timestamp() druid.log.hive.validationQuery=select unix_timestamp()
#用来检测连接是否有效的sql要求是一个查询语句,下面是clickhouse的 #用来检测连接是否有效的sql要求是一个查询语句,下面是clickhouse的
#druid.log.validationQuery=show tables druid.log.clickHouse.validationQuery=show tables
#申请连接的时候检测如果空闲时间大于timeBetweenEvictionRunsMillis执行validationQuery检测连接是否有效 #申请连接的时候检测如果空闲时间大于timeBetweenEvictionRunsMillis执行validationQuery检测连接是否有效
druid.log.testWhileIdle=true druid.log.testWhileIdle=true
#申请连接时执行validationQuery检测连接是否有效 #申请连接时执行validationQuery检测连接是否有效
@@ -128,7 +128,7 @@ redis.host=10.0.6.249
#redis.host=192.168.10.205 #redis.host=192.168.10.205
#亦庄 #亦庄
#redis.host=192.168.10.215 #redis.host=192.168.10.215
redis.port=6379 redis.port=6381
redis.pass= redis.pass=
redis.maxIdle=5 redis.maxIdle=5
redis.maxTotal=250 redis.maxTotal=250

View File

@@ -171,11 +171,15 @@ isUseES=false
#日志查询是否使用clickhouse,否则使用hive
isUseClickHouse=true
##clickhouse数据库名称
jdbc.clickHouse.DBName=k18_ods
#数据中心hive日志库数据库名称,程序中每次查询时使用的数据库名称 use dbName #数据中心hive日志库数据库名称,程序中每次查询时使用的数据库名称 use dbName
jdbc.hive.Name=maat jdbc.hive.DBName=maat
maxPageSize=100000 maxPageSize=100000