1:为日志查询添加clickhouse数据源
2:为nis.properties中添加 存储分组复用域配置的redisdb属性
This commit is contained in:
13
pom.xml
13
pom.xml
@@ -567,9 +567,16 @@
|
|||||||
|
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.alibaba</groupId>
|
<groupId>com.alibaba</groupId>
|
||||||
<artifactId>druid</artifactId>
|
<artifactId>druid</artifactId>
|
||||||
<version>1.1.10</version>
|
<version>1.1.10</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- clickhouse-jdbc -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>ru.yandex.clickhouse</groupId>
|
||||||
|
<artifactId>clickhouse-jdbc</artifactId>
|
||||||
|
<version>0.1.40</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|||||||
@@ -33,8 +33,8 @@ import com.nis.web.service.SpringContextHolder;
|
|||||||
* @date 2018年8月20日
|
* @date 2018年8月20日
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public class HiveJDBCByDruid {
|
public class LogJDBCByDruid {
|
||||||
private final static Logger logger = LoggerFactory.getLogger(HiveJDBCByDruid.class);
|
private final static Logger logger = LoggerFactory.getLogger(LogJDBCByDruid.class);
|
||||||
static DruidDataSource datasource = null;
|
static DruidDataSource datasource = null;
|
||||||
Connection conn = null;
|
Connection conn = null;
|
||||||
ResultSet rs = null;
|
ResultSet rs = null;
|
||||||
@@ -43,6 +43,7 @@ public class HiveJDBCByDruid {
|
|||||||
public static Connection getConnection() throws SQLException {
|
public static Connection getConnection() throws SQLException {
|
||||||
if (datasource == null) {
|
if (datasource == null) {
|
||||||
datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid");
|
datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid");
|
||||||
|
// datasource = (DruidDataSource) SpringContextHolder.getBean("ClickHouseDataSourceByDruid");
|
||||||
}
|
}
|
||||||
return datasource.getConnection();
|
return datasource.getConnection();
|
||||||
}
|
}
|
||||||
@@ -91,6 +91,14 @@ public class PropertyPlaceholderConfigurerCrypt extends PropertyPlaceholderConfi
|
|||||||
new String(AESUtil.decrypt(Base64.decodeBase64(clusterPassword), clusterScretKey)));
|
new String(AESUtil.decrypt(Base64.decodeBase64(clusterPassword), clusterScretKey)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//clickhouse
|
||||||
|
String clickHousePassword = props.getProperty("jdbc.clickhouse.password");
|
||||||
|
String clickHouseScretKey = props.getProperty("jdbc.clickhouse.key");
|
||||||
|
if (null != clickHousePassword) {
|
||||||
|
props.setProperty("jdbc.clickhouse.password",
|
||||||
|
new String(AESUtil.decrypt(Base64.decodeBase64(clickHousePassword), clickHouseScretKey)));
|
||||||
|
}
|
||||||
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
package com.nis.web.controller.restful;
|
package com.nis.web.controller.restful;
|
||||||
|
|
||||||
import java.text.SimpleDateFormat;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@@ -23,7 +22,7 @@ import com.nis.restful.RestServiceException;
|
|||||||
import com.nis.util.Configurations;
|
import com.nis.util.Configurations;
|
||||||
import com.nis.util.Constants;
|
import com.nis.util.Constants;
|
||||||
import com.nis.util.DateUtils;
|
import com.nis.util.DateUtils;
|
||||||
import com.nis.util.HiveJDBCByDruid;
|
import com.nis.util.LogJDBCByDruid;
|
||||||
import com.nis.util.JsonMapper;
|
import com.nis.util.JsonMapper;
|
||||||
import com.nis.util.StringUtil;
|
import com.nis.util.StringUtil;
|
||||||
import com.nis.web.controller.BaseRestController;
|
import com.nis.web.controller.BaseRestController;
|
||||||
@@ -77,7 +76,7 @@ public class LogController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, dkBehaviorLog,
|
String sql = HiveSqlService.getSql(page, dkBehaviorLog,
|
||||||
Configurations.getStringProperty(DkBehaviorLog.class.getSimpleName() + "HiveTable", "DK_BEHAVIOR_LOG"),
|
Configurations.getStringProperty(DkBehaviorLog.class.getSimpleName() + "HiveTable", "DK_BEHAVIOR_LOG"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, DkBehaviorLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, DkBehaviorLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
logPage.setList(new ArrayList());
|
logPage.setList(new ArrayList());
|
||||||
@@ -130,7 +129,7 @@ public class LogController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, pxyHttpLog,
|
String sql = HiveSqlService.getSql(page, pxyHttpLog,
|
||||||
Configurations.getStringProperty(PxyHttpLog.class.getSimpleName() + "HiveTable", "PXY_HTTP_LOG"),
|
Configurations.getStringProperty(PxyHttpLog.class.getSimpleName() + "HiveTable", "PXY_HTTP_LOG"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, PxyHttpLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, PxyHttpLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
logPage.setList(new ArrayList());
|
logPage.setList(new ArrayList());
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ import com.nis.restful.RestServiceException;
|
|||||||
import com.nis.util.Configurations;
|
import com.nis.util.Configurations;
|
||||||
import com.nis.util.Constants;
|
import com.nis.util.Constants;
|
||||||
import com.nis.util.DateUtils;
|
import com.nis.util.DateUtils;
|
||||||
import com.nis.util.HiveJDBCByDruid;
|
import com.nis.util.LogJDBCByDruid;
|
||||||
import com.nis.util.JsonMapper;
|
import com.nis.util.JsonMapper;
|
||||||
import com.nis.util.StringUtil;
|
import com.nis.util.StringUtil;
|
||||||
import com.nis.web.controller.BaseRestController;
|
import com.nis.web.controller.BaseRestController;
|
||||||
@@ -85,7 +85,7 @@ public class MmLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, mmAvIpLog,
|
String sql = HiveSqlService.getSql(page, mmAvIpLog,
|
||||||
Configurations.getStringProperty(MmAvIpLog.class.getSimpleName() + "HiveTable", "MM_AV_IP_LOG"),
|
Configurations.getStringProperty(MmAvIpLog.class.getSimpleName() + "HiveTable", "MM_AV_IP_LOG"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmAvIpLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmAvIpLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
logPage.setList(new ArrayList());
|
logPage.setList(new ArrayList());
|
||||||
@@ -138,7 +138,7 @@ public class MmLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, mmAvUrlLog,
|
String sql = HiveSqlService.getSql(page, mmAvUrlLog,
|
||||||
Configurations.getStringProperty(MmAvUrlLog.class.getSimpleName() + "HiveTable", "MM_AV_URL_LOG"),
|
Configurations.getStringProperty(MmAvUrlLog.class.getSimpleName() + "HiveTable", "MM_AV_URL_LOG"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmAvUrlLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmAvUrlLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
logPage.setList(new ArrayList());
|
logPage.setList(new ArrayList());
|
||||||
@@ -191,7 +191,7 @@ public class MmLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, mmPicIpLog,
|
String sql = HiveSqlService.getSql(page, mmPicIpLog,
|
||||||
Configurations.getStringProperty(MmPicIpLog.class.getSimpleName() + "HiveTable", "MM_PIC_IP_LOG"),
|
Configurations.getStringProperty(MmPicIpLog.class.getSimpleName() + "HiveTable", "MM_PIC_IP_LOG"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPicIpLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPicIpLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
logPage.setList(new ArrayList());
|
logPage.setList(new ArrayList());
|
||||||
@@ -244,7 +244,7 @@ public class MmLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, mmPicUrlLog,
|
String sql = HiveSqlService.getSql(page, mmPicUrlLog,
|
||||||
Configurations.getStringProperty(MmPicUrlLog.class.getSimpleName() + "HiveTable", "MM_PIC_URL_LOG"),
|
Configurations.getStringProperty(MmPicUrlLog.class.getSimpleName() + "HiveTable", "MM_PIC_URL_LOG"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPicUrlLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPicUrlLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
logPage.setList(new ArrayList());
|
logPage.setList(new ArrayList());
|
||||||
@@ -298,7 +298,7 @@ public class MmLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, mmVoipIpLog,
|
String sql = HiveSqlService.getSql(page, mmVoipIpLog,
|
||||||
Configurations.getStringProperty(MmVoipIpLog.class.getSimpleName() + "HiveTable", "MM_VOIP_IP_LOG"),
|
Configurations.getStringProperty(MmVoipIpLog.class.getSimpleName() + "HiveTable", "MM_VOIP_IP_LOG"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmVoipIpLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmVoipIpLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
logPage.setList(new ArrayList());
|
logPage.setList(new ArrayList());
|
||||||
@@ -351,7 +351,7 @@ public class MmLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, mmVoipLog,
|
String sql = HiveSqlService.getSql(page, mmVoipLog,
|
||||||
Configurations.getStringProperty(MmVoipAccountLog.class.getSimpleName() + "HiveTable", "MM_VOIP_ACCOUNT_LOG"),
|
Configurations.getStringProperty(MmVoipAccountLog.class.getSimpleName() + "HiveTable", "MM_VOIP_ACCOUNT_LOG"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmVoipAccountLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmVoipAccountLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
logPage.setList(new ArrayList());
|
logPage.setList(new ArrayList());
|
||||||
@@ -405,7 +405,7 @@ public class MmLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, mmSampleAudioLog,
|
String sql = HiveSqlService.getSql(page, mmSampleAudioLog,
|
||||||
Configurations.getStringProperty(MmSampleAudioLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_AUDIO_LOG"),
|
Configurations.getStringProperty(MmSampleAudioLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_AUDIO_LOG"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSampleAudioLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleAudioLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
logPage.setList(new ArrayList());
|
logPage.setList(new ArrayList());
|
||||||
@@ -458,7 +458,7 @@ public class MmLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, mmSampleVideoLog,
|
String sql = HiveSqlService.getSql(page, mmSampleVideoLog,
|
||||||
Configurations.getStringProperty(MmSampleVideoLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VIDEO_LOG"),
|
Configurations.getStringProperty(MmSampleVideoLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VIDEO_LOG"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSampleVideoLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleVideoLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
logPage.setList(new ArrayList());
|
logPage.setList(new ArrayList());
|
||||||
@@ -511,7 +511,7 @@ public class MmLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, mmPornAudioLevelLog,
|
String sql = HiveSqlService.getSql(page, mmPornAudioLevelLog,
|
||||||
Configurations.getStringProperty(MmPornAudioLevelLog.class.getSimpleName() + "HiveTable", "MM_PORN_AUDIO_LEVEL_LOG"),
|
Configurations.getStringProperty(MmPornAudioLevelLog.class.getSimpleName() + "HiveTable", "MM_PORN_AUDIO_LEVEL_LOG"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPornAudioLevelLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPornAudioLevelLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
logPage.setList(new ArrayList());
|
logPage.setList(new ArrayList());
|
||||||
@@ -564,7 +564,7 @@ public class MmLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, mmPornVideoLevelLog,
|
String sql = HiveSqlService.getSql(page, mmPornVideoLevelLog,
|
||||||
Configurations.getStringProperty(MmPornVideoLevelLog.class.getSimpleName() + "HiveTable", "MM_PRON_VIDEO_LOG"),
|
Configurations.getStringProperty(MmPornVideoLevelLog.class.getSimpleName() + "HiveTable", "MM_PRON_VIDEO_LOG"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPornVideoLevelLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPornVideoLevelLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
logPage.setList(new ArrayList());
|
logPage.setList(new ArrayList());
|
||||||
@@ -618,7 +618,7 @@ public class MmLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, mmSamplePicLog,
|
String sql = HiveSqlService.getSql(page, mmSamplePicLog,
|
||||||
Configurations.getStringProperty(MmSamplePicLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_PIC_LOG"),
|
Configurations.getStringProperty(MmSamplePicLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_PIC_LOG"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSamplePicLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSamplePicLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
logPage.setList(new ArrayList());
|
logPage.setList(new ArrayList());
|
||||||
@@ -671,7 +671,7 @@ public class MmLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, mmSampleVoipLog,
|
String sql = HiveSqlService.getSql(page, mmSampleVoipLog,
|
||||||
Configurations.getStringProperty(MmSampleVoipLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VOIP_LOG"),
|
Configurations.getStringProperty(MmSampleVoipLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VOIP_LOG"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSampleVoipLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleVoipLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
logPage.setList(new ArrayList());
|
logPage.setList(new ArrayList());
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ import com.nis.restful.RestServiceException;
|
|||||||
import com.nis.util.Configurations;
|
import com.nis.util.Configurations;
|
||||||
import com.nis.util.Constants;
|
import com.nis.util.Constants;
|
||||||
import com.nis.util.DateUtils;
|
import com.nis.util.DateUtils;
|
||||||
import com.nis.util.HiveJDBCByDruid;
|
import com.nis.util.LogJDBCByDruid;
|
||||||
import com.nis.util.JsonMapper;
|
import com.nis.util.JsonMapper;
|
||||||
import com.nis.util.StringUtil;
|
import com.nis.util.StringUtil;
|
||||||
import com.nis.web.controller.BaseRestController;
|
import com.nis.web.controller.BaseRestController;
|
||||||
@@ -91,7 +91,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, ntcIpLog,
|
String sql = HiveSqlService.getSql(page, ntcIpLog,
|
||||||
Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
|
Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null,sql , NtcIpLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null,sql , NtcIpLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcIpLogPage.setList(new ArrayList<NtcIpLog>());
|
ntcIpLogPage.setList(new ArrayList<NtcIpLog>());
|
||||||
@@ -157,7 +157,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, ntcHttpLog,
|
String sql = HiveSqlService.getSql(page, ntcHttpLog,
|
||||||
Configurations.getStringProperty(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"),
|
Configurations.getStringProperty(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcHttpLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcHttpLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcHttpLogPage.setList(new ArrayList());
|
ntcHttpLogPage.setList(new ArrayList());
|
||||||
@@ -217,7 +217,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, ntcDnsLog,
|
String sql = HiveSqlService.getSql(page, ntcDnsLog,
|
||||||
Configurations.getStringProperty(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"),
|
Configurations.getStringProperty(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcDnsLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDnsLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcDnsLogPage.setList(new ArrayList());
|
ntcDnsLogPage.setList(new ArrayList());
|
||||||
@@ -277,7 +277,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, ntcMailLog,
|
String sql = HiveSqlService.getSql(page, ntcMailLog,
|
||||||
Configurations.getStringProperty(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"),
|
Configurations.getStringProperty(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcMailLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcMailLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcMailLogPage.setList(new ArrayList());
|
ntcMailLogPage.setList(new ArrayList());
|
||||||
@@ -337,7 +337,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, ntcSslLog,
|
String sql = HiveSqlService.getSql(page, ntcSslLog,
|
||||||
Configurations.getStringProperty(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"),
|
Configurations.getStringProperty(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcSslLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSslLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcSslLogPage.setList(new ArrayList());
|
ntcSslLogPage.setList(new ArrayList());
|
||||||
@@ -397,7 +397,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, ntcPptpLog,
|
String sql = HiveSqlService.getSql(page, ntcPptpLog,
|
||||||
Configurations.getStringProperty(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"),
|
Configurations.getStringProperty(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcPptpLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcPptpLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcPptpLogPage.setList(new ArrayList());
|
ntcPptpLogPage.setList(new ArrayList());
|
||||||
@@ -456,7 +456,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, ntcL2tpLog,
|
String sql = HiveSqlService.getSql(page, ntcL2tpLog,
|
||||||
Configurations.getStringProperty(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"),
|
Configurations.getStringProperty(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcL2tpLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcL2tpLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcL2tpLogPage.setList(new ArrayList());
|
ntcL2tpLogPage.setList(new ArrayList());
|
||||||
@@ -516,7 +516,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, ntcOpenvpnLog,
|
String sql = HiveSqlService.getSql(page, ntcOpenvpnLog,
|
||||||
Configurations.getStringProperty(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"),
|
Configurations.getStringProperty(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcOpenvpnLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcOpenvpnLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcOpenvpnLogPage.setList(new ArrayList());
|
ntcOpenvpnLogPage.setList(new ArrayList());
|
||||||
@@ -576,7 +576,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, ntcIpsecLog,
|
String sql = HiveSqlService.getSql(page, ntcIpsecLog,
|
||||||
Configurations.getStringProperty(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"),
|
Configurations.getStringProperty(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcIpsecLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcIpsecLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcIpsecLogPage.setList(new ArrayList());
|
ntcIpsecLogPage.setList(new ArrayList());
|
||||||
@@ -636,7 +636,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, ntcSshLog,
|
String sql = HiveSqlService.getSql(page, ntcSshLog,
|
||||||
Configurations.getStringProperty(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"),
|
Configurations.getStringProperty(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcSshLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSshLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcSshLogPage.setList(new ArrayList());
|
ntcSshLogPage.setList(new ArrayList());
|
||||||
@@ -696,7 +696,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, ntcFtpLog,
|
String sql = HiveSqlService.getSql(page, ntcFtpLog,
|
||||||
Configurations.getStringProperty(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"),
|
Configurations.getStringProperty(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcFtpLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcFtpLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcFtpLogPage.setList(new ArrayList());
|
ntcFtpLogPage.setList(new ArrayList());
|
||||||
@@ -754,7 +754,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, ntcAppLog,
|
String sql = HiveSqlService.getSql(page, ntcAppLog,
|
||||||
Configurations.getStringProperty(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"),
|
Configurations.getStringProperty(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcAppLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcAppLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcAppLogPage.setList(new ArrayList());
|
ntcAppLogPage.setList(new ArrayList());
|
||||||
@@ -812,7 +812,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
String sql = HiveSqlService.getSql(page, ntcDdosLog,
|
String sql = HiveSqlService.getSql(page, ntcDdosLog,
|
||||||
Configurations.getStringProperty(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"),
|
Configurations.getStringProperty(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcDdosLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDdosLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcDdosLogPage.setList(new ArrayList());
|
ntcDdosLogPage.setList(new ArrayList());
|
||||||
@@ -873,7 +873,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
Configurations.getStringProperty(NtcP2pLog.class.getSimpleName() + "HiveTable", "ntc_p2p_log"),
|
Configurations.getStringProperty(NtcP2pLog.class.getSimpleName() + "HiveTable", "ntc_p2p_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
|
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcP2pLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcP2pLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcP2pLogPage.setList(new ArrayList());
|
ntcP2pLogPage.setList(new ArrayList());
|
||||||
@@ -934,7 +934,7 @@ public class NtcLogSearchController extends BaseRestController {
|
|||||||
Configurations.getStringProperty(NtcBgpLog.class.getSimpleName() + "HiveTable", "ntc_bgp_log"),
|
Configurations.getStringProperty(NtcBgpLog.class.getSimpleName() + "HiveTable", "ntc_bgp_log"),
|
||||||
getCol2Col(), orderBy, null);
|
getCol2Col(), orderBy, null);
|
||||||
|
|
||||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcBgpLog.class, "foundTime",
|
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcBgpLog.class, "foundTime",
|
||||||
"recvTime");
|
"recvTime");
|
||||||
if (tableMapping == null) {
|
if (tableMapping == null) {
|
||||||
ntcBgpLogPage.setList(new ArrayList());
|
ntcBgpLogPage.setList(new ArrayList());
|
||||||
|
|||||||
@@ -90,33 +90,70 @@
|
|||||||
<property name="username" value="${jdbc.hive.username}" />
|
<property name="username" value="${jdbc.hive.username}" />
|
||||||
<property name="password" value="${jdbc.hive.password}" />
|
<property name="password" value="${jdbc.hive.password}" />
|
||||||
<!-- 配置初始化连接池数量 -->
|
<!-- 配置初始化连接池数量 -->
|
||||||
<property name="initialSize" value="${druid.hive.initialSize}" />
|
<property name="initialSize" value="${druid.log.initialSize}" />
|
||||||
<!-- 配置最小连接池数量 -->
|
<!-- 配置最小连接池数量 -->
|
||||||
<property name="minIdle" value="${druid.hive.minIdle}" />
|
<property name="minIdle" value="${druid.log.minIdle}" />
|
||||||
<!-- 配置最大连接池数量 -->
|
<!-- 配置最大连接池数量 -->
|
||||||
<property name="maxActive" value="${druid.hive.maxActive}" />
|
<property name="maxActive" value="${druid.log.maxActive}" />
|
||||||
<!-- 配置获取连接等待超时的时间 单位毫秒 -->
|
<!-- 配置获取连接等待超时的时间 单位毫秒 -->
|
||||||
<property name="maxWait" value="${druid.hive.maxWait}" />
|
<property name="maxWait" value="${druid.log.maxWait}" />
|
||||||
<!--使用非公平锁 -->
|
<!--使用非公平锁 -->
|
||||||
<property name="useUnfairLock" value="${druid.hive.useUnfairLock}" />
|
<property name="useUnfairLock" value="${druid.log.useUnfairLock}" />
|
||||||
<!-- 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒 -->
|
<!-- 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒 -->
|
||||||
<property name="timeBetweenEvictionRunsMillis" value="${druid.hive.timeBetweenEvictionRunsMillis}" />
|
<property name="timeBetweenEvictionRunsMillis" value="${druid.log.timeBetweenEvictionRunsMillis}" />
|
||||||
<!-- 配置一个连接在池中最小生存的时间,单位是毫秒 -->
|
<!-- 配置一个连接在池中最小生存的时间,单位是毫秒 -->
|
||||||
<property name="minEvictableIdleTimeMillis" value="${druid.hive.minEvictableIdleTimeMillis}" />
|
<property name="minEvictableIdleTimeMillis" value="${druid.log.minEvictableIdleTimeMillis}" />
|
||||||
<!--用来检测连接是否有效的sql,要求是一个查询语句。 -->
|
<!--用来检测连接是否有效的sql,要求是一个查询语句。 -->
|
||||||
<property name="validationQuery" value="${druid.hive.validationQuery}" />
|
<property name="validationQuery" value="${druid.log.validationQuery}" />
|
||||||
<!--申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效。 -->
|
<!--申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效。 -->
|
||||||
<property name="testWhileIdle" value="${druid.hive.testWhileIdle}" />
|
<property name="testWhileIdle" value="${druid.log.testWhileIdle}" />
|
||||||
<!--申请连接时执行validationQuery检测连接是否有效, -->
|
<!--申请连接时执行validationQuery检测连接是否有效, -->
|
||||||
<property name="testOnBorrow" value="${druid.hive.testOnBorrow}" />
|
<property name="testOnBorrow" value="${druid.log.testOnBorrow}" />
|
||||||
<!--归还连接时执行validationQuery检测连接是否有效, -->
|
<!--归还连接时执行validationQuery检测连接是否有效, -->
|
||||||
<property name="testOnReturn" value="${druid.hive.testOnReturn}" />
|
<property name="testOnReturn" value="${druid.log.testOnReturn}" />
|
||||||
<!-- 打开PSCache,并且指定每个连接上PSCache的大小 -->
|
<!-- 打开PSCache,并且指定每个连接上PSCache的大小 -->
|
||||||
<property name="poolPreparedStatements" value="${druid.hive.poolPreparedStatements}" />
|
<property name="poolPreparedStatements" value="${druid.log.poolPreparedStatements}" />
|
||||||
<!--要启用PSCache,必须配置大于0,当大于0时,poolPreparedStatements自动触发修改为true。在Druid中,不会存在Oracle下PSCache占用内存过多的问题,可以把这个数值配置大一些,比如说100 -->
|
<!--要启用PSCache,必须配置大于0,当大于0时,poolPreparedStatements自动触发修改为true。在Druid中,不会存在Oracle下PSCache占用内存过多的问题,可以把这个数值配置大一些,比如说100 -->
|
||||||
<property name="maxOpenPreparedStatements" value="${druid.hive.maxOpenPreparedStatements}" />
|
<property name="maxOpenPreparedStatements" value="${druid.log.maxOpenPreparedStatements}" />
|
||||||
<!-- 配置监控统计拦截的filters,去掉后监控界面sql无法统计 -->
|
<!-- 配置监控统计拦截的filters,去掉后监控界面sql无法统计 -->
|
||||||
<property name="filters" value="${druid.hive.filters}" />
|
<property name="filters" value="${druid.log.filters}" />
|
||||||
|
|
||||||
|
</bean>
|
||||||
|
|
||||||
|
<bean id="ClickHouseDataSourceByDruid" class="com.alibaba.druid.pool.DruidDataSource"
|
||||||
|
init-method="init" destroy-method="close">
|
||||||
|
<property name="driverClassName" value="${jdbc.clickhouse.driver}" />
|
||||||
|
<property name="url" value="${jdbc.clickhouse.url}" />
|
||||||
|
<property name="username" value="${jdbc.clickhouse.username}" />
|
||||||
|
<property name="password" value="${jdbc.clickhouse.password}" />
|
||||||
|
<!-- 配置初始化连接池数量 -->
|
||||||
|
<property name="initialSize" value="${druid.log.initialSize}" />
|
||||||
|
<!-- 配置最小连接池数量 -->
|
||||||
|
<property name="minIdle" value="${druid.log.minIdle}" />
|
||||||
|
<!-- 配置最大连接池数量 -->
|
||||||
|
<property name="maxActive" value="${druid.log.maxActive}" />
|
||||||
|
<!-- 配置获取连接等待超时的时间 单位毫秒 -->
|
||||||
|
<property name="maxWait" value="${druid.log.maxWait}" />
|
||||||
|
<!--使用非公平锁 -->
|
||||||
|
<property name="useUnfairLock" value="${druid.log.useUnfairLock}" />
|
||||||
|
<!-- 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒 -->
|
||||||
|
<property name="timeBetweenEvictionRunsMillis" value="${druid.log.timeBetweenEvictionRunsMillis}" />
|
||||||
|
<!-- 配置一个连接在池中最小生存的时间,单位是毫秒 -->
|
||||||
|
<property name="minEvictableIdleTimeMillis" value="${druid.log.minEvictableIdleTimeMillis}" />
|
||||||
|
<!--用来检测连接是否有效的sql,要求是一个查询语句。 -->
|
||||||
|
<property name="validationQuery" value="${druid.log.validationQuery}" />
|
||||||
|
<!--申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效。 -->
|
||||||
|
<property name="testWhileIdle" value="${druid.log.testWhileIdle}" />
|
||||||
|
<!--申请连接时执行validationQuery检测连接是否有效, -->
|
||||||
|
<property name="testOnBorrow" value="${druid.log.testOnBorrow}" />
|
||||||
|
<!--归还连接时执行validationQuery检测连接是否有效, -->
|
||||||
|
<property name="testOnReturn" value="${druid.log.testOnReturn}" />
|
||||||
|
<!-- 打开PSCache,并且指定每个连接上PSCache的大小 -->
|
||||||
|
<property name="poolPreparedStatements" value="${druid.log.poolPreparedStatements}" />
|
||||||
|
<!--要启用PSCache,必须配置大于0,当大于0时,poolPreparedStatements自动触发修改为true。在Druid中,不会存在Oracle下PSCache占用内存过多的问题,可以把这个数值配置大一些,比如说100 -->
|
||||||
|
<property name="maxOpenPreparedStatements" value="${druid.log.maxOpenPreparedStatements}" />
|
||||||
|
<!-- 配置监控统计拦截的filters,去掉后监控界面sql无法统计 -->
|
||||||
|
<property name="filters" value="${druid.log.filters}" />
|
||||||
|
|
||||||
</bean>
|
</bean>
|
||||||
|
|
||||||
|
|||||||
@@ -67,36 +67,48 @@ bonecp.hive.acquireIncrement=5
|
|||||||
bonecp.hive.statementsCacheSize=100
|
bonecp.hive.statementsCacheSize=100
|
||||||
|
|
||||||
|
|
||||||
|
#################日志查询clickhouse数据源信息#############
|
||||||
|
|
||||||
#########################配置hive使用druid连接池#########################################
|
jdbc.clickhouse.driver=ru.yandex.clickhouse.ClickHouseDriver
|
||||||
|
jdbc.clickhouse.url=jdbc:clickhouse://clickhouse.baifendian.com:80/k18_ods
|
||||||
|
jdbc.clickhouse.username=
|
||||||
|
jdbc.clickhouse.key=
|
||||||
|
jdbc.clickhouse.password=
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#########################配置日志查询使用druid连接池#########################################
|
||||||
#配置初始化连接池数量
|
#配置初始化连接池数量
|
||||||
druid.hive.initialSize=5
|
druid.log.initialSize=5
|
||||||
#配置最小连接池数量
|
#配置最小连接池数量
|
||||||
druid.hive.minIdle=1
|
druid.log.minIdle=1
|
||||||
#配置最大连接池数量
|
#配置最大连接池数量
|
||||||
druid.hive.maxActive=200
|
druid.log.maxActive=200
|
||||||
# 配置获取连接等待超时的时间 单位毫秒
|
# 配置获取连接等待超时的时间 单位毫秒
|
||||||
druid.hive.maxWait=600000
|
druid.log.maxWait=600000
|
||||||
#使用非公平锁
|
#使用非公平锁
|
||||||
druid.hive.useUnfairLock=true
|
druid.log.useUnfairLock=true
|
||||||
#配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
|
#配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
|
||||||
druid.hive.timeBetweenEvictionRunsMillis=60000
|
druid.log.timeBetweenEvictionRunsMillis=60000
|
||||||
#配置一个连接在池中最小生存的时间,单位是毫秒
|
#配置一个连接在池中最小生存的时间,单位是毫秒
|
||||||
druid.hive.minEvictableIdleTimeMillis=300000
|
druid.log.minEvictableIdleTimeMillis=300000
|
||||||
#用来检测连接是否有效的sql,要求是一个查询语句
|
#用来检测连接是否有效的sql,要求是一个查询语句,下面是hive的
|
||||||
druid.hive.validationQuery=select unix_timestamp()
|
druid.log.validationQuery=select unix_timestamp()
|
||||||
|
#用来检测连接是否有效的sql,要求是一个查询语句,下面是clickhouse的
|
||||||
|
#druid.log.validationQuery=show tables
|
||||||
#申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效
|
#申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效
|
||||||
druid.hive.testWhileIdle=true
|
druid.log.testWhileIdle=true
|
||||||
#申请连接时执行validationQuery检测连接是否有效
|
#申请连接时执行validationQuery检测连接是否有效
|
||||||
druid.hive.testOnBorrow=true
|
druid.log.testOnBorrow=true
|
||||||
#归还连接时执行validationQuery检测连接是否有效
|
#归还连接时执行validationQuery检测连接是否有效
|
||||||
druid.hive.testOnReturn=false
|
druid.log.testOnReturn=false
|
||||||
#打开PSCache,并且指定每个连接上PSCache的大小
|
#打开PSCache,并且指定每个连接上PSCache的大小
|
||||||
druid.hive.poolPreparedStatements=true
|
druid.log.poolPreparedStatements=true
|
||||||
#要启用PSCache,必须配置大于0,当大于0时,poolPreparedStatements自动触发修改为true。在Druid中,不会存在Oracle下PSCache占用内存过多的问题,可以把这个数值配置大一些,比如说100
|
#要启用PSCache,必须配置大于0,当大于0时,poolPreparedStatements自动触发修改为true。在Druid中,不会存在Oracle下PSCache占用内存过多的问题,可以把这个数值配置大一些,比如说100
|
||||||
druid.hive.maxOpenPreparedStatements=100
|
druid.log.maxOpenPreparedStatements=100
|
||||||
#配置监控统计拦截的filters,去掉后监控界面sql无法统计
|
#配置监控统计拦截的filters,去掉后监控界面sql无法统计
|
||||||
druid.hive.filters=stat
|
druid.log.filters=stat
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -190,6 +190,8 @@ digest.gen.tool.path=maat-redis/digest_gen
|
|||||||
maxRedisDBIndex=16
|
maxRedisDBIndex=16
|
||||||
##存放编译,分组,域配置id关系的redis数据库编号
|
##存放编译,分组,域配置id关系的redis数据库编号
|
||||||
idRelaRedisDBIndex=15
|
idRelaRedisDBIndex=15
|
||||||
|
#存储分组复用域配置的redisdb
|
||||||
|
tmpStorageReuseRegionDB=15
|
||||||
##阀门配置在redisdb的序号
|
##阀门配置在redisdb的序号
|
||||||
tapRedisDb=7
|
tapRedisDb=7
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user