1:修改hive使用druid连接池
2:去除某些方法中的警告
This commit is contained in:
8
pom.xml
8
pom.xml
@@ -637,6 +637,14 @@
|
||||
<artifactId>fastdfs-client-java</artifactId>
|
||||
<version>1.27-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>com.alibaba</groupId>
|
||||
<artifactId>druid</artifactId>
|
||||
<version>1.1.10</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
</project>
|
||||
|
||||
@@ -40,24 +40,16 @@ public class HiveJDBC {
|
||||
static {
|
||||
try {
|
||||
prop.load(Configurations.class.getResourceAsStream("/jdbc.properties"));
|
||||
driverName = prop.getProperty("jdbc.hive.driver").trim();
|
||||
url = prop.getProperty("jdbc.hive.url").trim();
|
||||
username = prop.getProperty("jdbc.hive.username").trim();
|
||||
password = prop.getProperty("jdbc.hive.password").trim();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public static void getConn(String searchActiveSys) throws Exception {
|
||||
|
||||
if (null != searchActiveSys && searchActiveSys.equals("4")) {
|
||||
driverName = prop.getProperty("jdbc.hiveA.driver").trim();
|
||||
url = prop.getProperty("jdbc.hiveA.url").trim();
|
||||
username = prop.getProperty("jdbc.hiveA.username").trim();
|
||||
password = prop.getProperty("jdbc.hiveA.password").trim();
|
||||
} else {
|
||||
driverName = prop.getProperty("jdbc.hiveB.driver").trim();
|
||||
url = prop.getProperty("jdbc.hiveB.url").trim();
|
||||
username = prop.getProperty("jdbc.hiveB.username").trim();
|
||||
password = prop.getProperty("jdbc.hiveB.password").trim();
|
||||
}
|
||||
public static void getConn() throws Exception {
|
||||
Class.forName(driverName);
|
||||
conn = DriverManager.getConnection(url, username, password);
|
||||
|
||||
@@ -65,7 +57,7 @@ public class HiveJDBC {
|
||||
|
||||
public static ResultSet query(String sql, String searchActiveSys) throws Exception {
|
||||
logger.info("开始连接数据中心日志库--------------------------");
|
||||
getConn(searchActiveSys);
|
||||
getConn();
|
||||
logger.info("连接数据中心日志库成功--------------------------");
|
||||
st = conn.createStatement();
|
||||
if (null != searchActiveSys && searchActiveSys.equals("4")) {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package com.nis.web.controller.restful;
|
||||
|
||||
import java.sql.ResultSet;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
@@ -24,7 +23,7 @@ import com.nis.restful.RestServiceException;
|
||||
import com.nis.util.Configurations;
|
||||
import com.nis.util.Constants;
|
||||
import com.nis.util.DateUtils;
|
||||
import com.nis.util.HiveJDBC;
|
||||
import com.nis.util.HiveJDBCByDruid;
|
||||
import com.nis.util.JsonMapper;
|
||||
import com.nis.web.controller.BaseRestController;
|
||||
import com.nis.web.service.AuditLogThread;
|
||||
@@ -74,10 +73,10 @@ public class LogController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, dkBehaviorLog,
|
||||
String sql = HiveSqlService.getSql(page, dkBehaviorLog,
|
||||
Configurations.getStringProperty(DkBehaviorLog.class.getSimpleName() + "HiveTable", "DK_BEHAVIOR_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, DkBehaviorLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, DkBehaviorLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -127,10 +126,10 @@ public class LogController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, pxyHttpLog,
|
||||
String sql = HiveSqlService.getSql(page, pxyHttpLog,
|
||||
Configurations.getStringProperty(PxyHttpLog.class.getSimpleName() + "HiveTable", "PXY_HTTP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, PxyHttpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, PxyHttpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package com.nis.web.controller.restful;
|
||||
|
||||
import java.sql.ResultSet;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
@@ -34,7 +33,7 @@ import com.nis.restful.RestServiceException;
|
||||
import com.nis.util.Configurations;
|
||||
import com.nis.util.Constants;
|
||||
import com.nis.util.DateUtils;
|
||||
import com.nis.util.HiveJDBC;
|
||||
import com.nis.util.HiveJDBCByDruid;
|
||||
import com.nis.util.JsonMapper;
|
||||
import com.nis.web.controller.BaseRestController;
|
||||
import com.nis.web.service.AuditLogThread;
|
||||
@@ -83,10 +82,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmAvIpLog,
|
||||
String sql = HiveSqlService.getSql(page, mmAvIpLog,
|
||||
Configurations.getStringProperty(MmAvIpLog.class.getSimpleName() + "HiveTable", "MM_AV_IP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmAvIpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmAvIpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -136,10 +135,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmAvUrlLog,
|
||||
String sql = HiveSqlService.getSql(page, mmAvUrlLog,
|
||||
Configurations.getStringProperty(MmAvUrlLog.class.getSimpleName() + "HiveTable", "MM_AV_URL_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmAvUrlLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmAvUrlLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -189,10 +188,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmPicIpLog,
|
||||
String sql = HiveSqlService.getSql(page, mmPicIpLog,
|
||||
Configurations.getStringProperty(MmPicIpLog.class.getSimpleName() + "HiveTable", "MM_PIC_IP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmPicIpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPicIpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -242,10 +241,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmPicUrlLog,
|
||||
String sql = HiveSqlService.getSql(page, mmPicUrlLog,
|
||||
Configurations.getStringProperty(MmPicUrlLog.class.getSimpleName() + "HiveTable", "MM_PIC_URL_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmPicUrlLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPicUrlLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -296,10 +295,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmVoipIpLog,
|
||||
String sql = HiveSqlService.getSql(page, mmVoipIpLog,
|
||||
Configurations.getStringProperty(MmVoipIpLog.class.getSimpleName() + "HiveTable", "MM_VOIP_IP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmVoipIpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmVoipIpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -349,10 +348,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmVoipLog,
|
||||
String sql = HiveSqlService.getSql(page, mmVoipLog,
|
||||
Configurations.getStringProperty(MmVoipAccountLog.class.getSimpleName() + "HiveTable", "MM_VOIP_ACCOUNT_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmVoipAccountLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmVoipAccountLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -403,10 +402,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmSampleAudioLog,
|
||||
String sql = HiveSqlService.getSql(page, mmSampleAudioLog,
|
||||
Configurations.getStringProperty(MmSampleAudioLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_AUDIO_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmSampleAudioLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSampleAudioLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -456,10 +455,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmSampleVideoLog,
|
||||
String sql = HiveSqlService.getSql(page, mmSampleVideoLog,
|
||||
Configurations.getStringProperty(MmSampleVideoLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VIDEO_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmSampleVideoLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSampleVideoLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -509,10 +508,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmPornAudioLevelLog,
|
||||
String sql = HiveSqlService.getSql(page, mmPornAudioLevelLog,
|
||||
Configurations.getStringProperty(MmPornAudioLevelLog.class.getSimpleName() + "HiveTable", "MM_PORN_AUDIO_LEVEL_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmPornAudioLevelLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPornAudioLevelLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -562,10 +561,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmPornVideoLevelLog,
|
||||
String sql = HiveSqlService.getSql(page, mmPornVideoLevelLog,
|
||||
Configurations.getStringProperty(MmPornVideoLevelLog.class.getSimpleName() + "HiveTable", "MM_PRON_VIDEO_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmPornVideoLevelLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPornVideoLevelLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -616,10 +615,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmSamplePicLog,
|
||||
String sql = HiveSqlService.getSql(page, mmSamplePicLog,
|
||||
Configurations.getStringProperty(MmSamplePicLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_PIC_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmSamplePicLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSamplePicLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -669,10 +668,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmSampleVoipLog,
|
||||
String sql = HiveSqlService.getSql(page, mmSampleVoipLog,
|
||||
Configurations.getStringProperty(MmSampleVoipLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VOIP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmSampleVoipLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSampleVoipLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
|
||||
@@ -36,6 +36,7 @@ import com.nis.util.Configurations;
|
||||
import com.nis.util.Constants;
|
||||
import com.nis.util.DateUtils;
|
||||
import com.nis.util.HiveJDBC;
|
||||
import com.nis.util.HiveJDBCByDruid;
|
||||
import com.nis.util.JsonMapper;
|
||||
import com.nis.web.controller.BaseRestController;
|
||||
import com.nis.web.service.AuditLogThread;
|
||||
@@ -82,15 +83,20 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcIpLog,
|
||||
// ResultSet rs = HiveSqlService.getResultSet(page, ntcIpLog,
|
||||
// Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
|
||||
// getCol2Col(), orderBy, null);
|
||||
// Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpLog.class, "foundTime",
|
||||
// "recvTime");
|
||||
String sql = HiveSqlService.getSql(page, ntcIpLog,
|
||||
Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null,sql , NtcIpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcIpLogPage.setList(new ArrayList());
|
||||
ntcIpLogPage.setList(new ArrayList<NtcIpLog>());
|
||||
} else {
|
||||
List list = new ArrayList();
|
||||
List<Object> list = new ArrayList<Object>();
|
||||
list = tableMapping.get("obj");
|
||||
// if (tableMapping.get("obj").size() > page.getPageSize()) {
|
||||
// list = tableMapping.get("obj").subList(0, page.getPageSize());
|
||||
@@ -148,10 +154,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcHttpLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcHttpLog,
|
||||
Configurations.getStringProperty(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcHttpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcHttpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcHttpLogPage.setList(new ArrayList());
|
||||
@@ -208,10 +214,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcDnsLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcDnsLog,
|
||||
Configurations.getStringProperty(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcDnsLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcDnsLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcDnsLogPage.setList(new ArrayList());
|
||||
@@ -268,10 +274,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcMailLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcMailLog,
|
||||
Configurations.getStringProperty(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcMailLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcMailLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcMailLogPage.setList(new ArrayList());
|
||||
@@ -328,10 +334,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcSslLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcSslLog,
|
||||
Configurations.getStringProperty(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcSslLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcSslLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcSslLogPage.setList(new ArrayList());
|
||||
@@ -388,10 +394,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcPptpLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcPptpLog,
|
||||
Configurations.getStringProperty(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcPptpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcPptpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcPptpLogPage.setList(new ArrayList());
|
||||
@@ -447,10 +453,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcL2tpLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcL2tpLog,
|
||||
Configurations.getStringProperty(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcL2tpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcL2tpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcL2tpLogPage.setList(new ArrayList());
|
||||
@@ -507,10 +513,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcOpenvpnLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcOpenvpnLog,
|
||||
Configurations.getStringProperty(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcOpenvpnLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcOpenvpnLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcOpenvpnLogPage.setList(new ArrayList());
|
||||
@@ -567,10 +573,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcIpsecLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcIpsecLog,
|
||||
Configurations.getStringProperty(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpsecLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcIpsecLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcIpsecLogPage.setList(new ArrayList());
|
||||
@@ -627,10 +633,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcSshLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcSshLog,
|
||||
Configurations.getStringProperty(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcSshLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcSshLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcSshLogPage.setList(new ArrayList());
|
||||
@@ -687,10 +693,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcFtpLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcFtpLog,
|
||||
Configurations.getStringProperty(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcFtpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcFtpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcFtpLogPage.setList(new ArrayList());
|
||||
@@ -745,10 +751,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcAppLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcAppLog,
|
||||
Configurations.getStringProperty(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcAppLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcAppLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcAppLogPage.setList(new ArrayList());
|
||||
@@ -803,10 +809,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcDdosLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcDdosLog,
|
||||
Configurations.getStringProperty(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcDdosLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcDdosLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcDdosLogPage.setList(new ArrayList());
|
||||
|
||||
@@ -2,7 +2,10 @@ package com.nis.web.service;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
@@ -17,6 +20,7 @@ import org.apache.ibatis.session.SqlSessionFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.alibaba.druid.pool.DruidDataSource;
|
||||
import com.nis.domain.Page;
|
||||
import com.nis.util.Configurations;
|
||||
import com.nis.util.Constants;
|
||||
@@ -26,9 +30,21 @@ import com.nis.util.redis.SaveRedisThread;
|
||||
|
||||
public class HiveSqlService {
|
||||
private final static Logger logger = LoggerFactory.getLogger(HiveSqlService.class);
|
||||
static DruidDataSource datasource = null;
|
||||
Connection conn = null;
|
||||
ResultSet rs = null;
|
||||
Statement st = null;
|
||||
|
||||
public static Connection getConnection() throws SQLException {
|
||||
if (datasource == null) {
|
||||
datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid");
|
||||
}
|
||||
return datasource.getConnection();
|
||||
}
|
||||
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
|
||||
private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd");
|
||||
public static ResultSet getResultSet(Page page, Object bean, String tableName,
|
||||
|
||||
public static String getSql(Page page, Object bean, String tableName,
|
||||
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
|
||||
tableName = tableName.toLowerCase();
|
||||
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
|
||||
@@ -132,10 +148,7 @@ public class HiveSqlService {
|
||||
// row_Num between " + startNum + " and " + endNum);
|
||||
sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM);
|
||||
logger.info("获取数据中心日志sql==================={}", sql);
|
||||
// ResultSet query = HiveJDBC.query(sql.toString());
|
||||
ResultSet query = HiveDataSource.query(sql.toString());
|
||||
logger.info("获取数据中心日志成功");
|
||||
return query;
|
||||
return sql.toString();
|
||||
}
|
||||
|
||||
public static Long getHivePageCount(Object bean, String countKey, String tableName,
|
||||
@@ -444,7 +457,6 @@ public class HiveSqlService {
|
||||
|
||||
public static String getFiledsSql(String mapName, String fileds) throws Exception {
|
||||
String[] fieldsColoumn = null;
|
||||
String orderByStr = "";
|
||||
// 所有字段名
|
||||
List<String> columnList = new ArrayList<String>();
|
||||
// 所有属性名
|
||||
@@ -489,7 +501,7 @@ public class HiveSqlService {
|
||||
return fileds;
|
||||
}
|
||||
|
||||
public static Map<String, String> getFiledAndColumnMap(Class clazz) {
|
||||
public static Map<String, String> getFiledAndColumnMap(Class<?> clazz) {
|
||||
Map<String, String> map = new HashMap<String, String>();
|
||||
SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class);
|
||||
ResultMap resultMap = sqlSessionFactory.getConfiguration().getResultMap(clazz.getSimpleName() + "Map");
|
||||
@@ -605,10 +617,4 @@ public class HiveSqlService {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
Long datacenterTime = Constants.DATACENTER_TIME;
|
||||
double doubleValue = datacenterTime.doubleValue();
|
||||
System.out.println(doubleValue);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,6 +81,35 @@
|
||||
</bean>
|
||||
|
||||
|
||||
|
||||
|
||||
<bean id="HiveDataSourceByDruid" class="com.alibaba.druid.pool.DruidDataSource" init-method="init" destroy-method="close">
|
||||
<property name="driverClassName" value="${jdbc.hive.driver}" />
|
||||
<property name="url" value="${jdbc.hive.url}" />
|
||||
<property name="username" value="${jdbc.hive.username}"/>
|
||||
<property name="password" value="${jdbc.hive.password}"/>
|
||||
<property name="initialSize" value="${druid.hive.initialSize}" /><!-- 配置初始化连接池数量-->
|
||||
<property name="minIdle" value="${druid.hive.minIdle}" /><!-- 配置最小连接池数量-->
|
||||
<property name="maxActive" value="${druid.hive.maxActive}" /><!-- 配置最大连接池数量-->
|
||||
<property name="maxWait" value="${druid.hive.maxWait}" /><!-- 配置获取连接等待超时的时间 单位毫秒-->
|
||||
<property name="useUnfairLock" value="${druid.hive.useUnfairLock}"/><!--使用非公平锁-->
|
||||
<property name="timeBetweenEvictionRunsMillis" value="${druid.hive.timeBetweenEvictionRunsMillis}" /><!-- 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒 -->
|
||||
<property name="minEvictableIdleTimeMillis" value="${druid.hive.minEvictableIdleTimeMillis}" /><!-- 配置一个连接在池中最小生存的时间,单位是毫秒 -->
|
||||
<property name="validationQuery" value="${druid.hive.validationQuery}" /><!--用来检测连接是否有效的sql,要求是一个查询语句。 -->
|
||||
<property name="testWhileIdle" value="${druid.hive.testWhileIdle}" /><!--申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效。-->
|
||||
<property name="testOnBorrow" value="${druid.hive.testOnBorrow}" /><!--申请连接时执行validationQuery检测连接是否有效,-->
|
||||
<property name="testOnReturn" value="${druid.hive.testOnReturn}" /><!--归还连接时执行validationQuery检测连接是否有效,-->
|
||||
<property name="poolPreparedStatements" value="${druid.hive.poolPreparedStatements}" /><!-- 打开PSCache,并且指定每个连接上PSCache的大小 -->
|
||||
<property name="maxOpenPreparedStatements" value="${druid.hive.maxOpenPreparedStatements}" /><!--要启用PSCache,必须配置大于0,当大于0时,poolPreparedStatements自动触发修改为true。在Druid中,不会存在Oracle下PSCache占用内存过多的问题,可以把这个数值配置大一些,比如说100 -->
|
||||
<property name="filters" value="${druid.hive.filters}" /><!-- 配置监控统计拦截的filters,去掉后监控界面sql无法统计 -->
|
||||
</bean>
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<bean id="dynamicDataSource" class="com.nis.datasource.DynamicDataSource">
|
||||
<property name="targetDataSources">
|
||||
<map key-type="java.lang.String">
|
||||
|
||||
@@ -66,6 +66,45 @@ bonecp.hive.partitionCount=3
|
||||
bonecp.hive.acquireIncrement=5
|
||||
bonecp.hive.statementsCacheSize=100
|
||||
|
||||
|
||||
|
||||
#########################配置hive使用druid连接池#########################################
|
||||
#配置初始化连接池数量
|
||||
druid.hive.initialSize=5
|
||||
#配置最小连接池数量
|
||||
druid.hive.minIdle=1
|
||||
#配置最大连接池数量
|
||||
druid.hive.maxActive=200
|
||||
# 配置获取连接等待超时的时间 单位毫秒
|
||||
druid.hive.maxWait=600000
|
||||
#使用非公平锁
|
||||
druid.hive.useUnfairLock=true
|
||||
#配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
|
||||
druid.hive.timeBetweenEvictionRunsMillis=60000
|
||||
#配置一个连接在池中最小生存的时间,单位是毫秒
|
||||
druid.hive.minEvictableIdleTimeMillis=300000
|
||||
#用来检测连接是否有效的sql,要求是一个查询语句
|
||||
druid.hive.validationQuery=select unix_timestamp()
|
||||
#申请连接的时候检测,如果空闲时间大于timeBetweenEvictionRunsMillis,执行validationQuery检测连接是否有效
|
||||
druid.hive.testWhileIdle=true
|
||||
#申请连接时执行validationQuery检测连接是否有效
|
||||
druid.hive.testOnBorrow=true
|
||||
#归还连接时执行validationQuery检测连接是否有效
|
||||
druid.hive.testOnReturn=false
|
||||
#打开PSCache,并且指定每个连接上PSCache的大小
|
||||
druid.hive.poolPreparedStatements=true
|
||||
#要启用PSCache,必须配置大于0,当大于0时,poolPreparedStatements自动触发修改为true。在Druid中,不会存在Oracle下PSCache占用内存过多的问题,可以把这个数值配置大一些,比如说100
|
||||
druid.hive.maxOpenPreparedStatements=100
|
||||
#配置监控统计拦截的filters,去掉后监控界面sql无法统计
|
||||
druid.hive.filters=stat
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#####################################################################################################################################
|
||||
##redis连接
|
||||
#####################################################################################################################################
|
||||
|
||||
@@ -176,6 +176,25 @@
|
||||
<!-- </listener-class> -->
|
||||
<!-- </listener> -->
|
||||
|
||||
|
||||
<servlet>
|
||||
<servlet-name>DruidStatView</servlet-name>
|
||||
<servlet-class>com.alibaba.druid.support.http.StatViewServlet</servlet-class>
|
||||
</servlet>
|
||||
<servlet-mapping>
|
||||
<servlet-name>DruidStatView</servlet-name>
|
||||
<url-pattern>/druid/*</url-pattern>
|
||||
</servlet-mapping>
|
||||
<filter>
|
||||
<filter-name>DruidWebStatFilter</filter-name>
|
||||
<filter-class>com.alibaba.druid.support.http.WebStatFilter</filter-class>
|
||||
<init-param>
|
||||
<param-name>exclusions</param-name>
|
||||
<param-value>*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*</param-value>
|
||||
</init-param>
|
||||
</filter>
|
||||
<filter-mapping>
|
||||
<filter-name>DruidWebStatFilter</filter-name>
|
||||
<url-pattern>/*</url-pattern>
|
||||
</filter-mapping>
|
||||
|
||||
</web-app>
|
||||
Reference in New Issue
Block a user