Merge remote-tracking branch 'origin/Release-1.1' into develop

This commit is contained in:
zhangdongxu
2018-08-20 17:42:34 +08:00
10 changed files with 433 additions and 99 deletions

View File

@@ -557,6 +557,14 @@
<artifactId>fastdfs-client-java</artifactId> <artifactId>fastdfs-client-java</artifactId>
<version>1.27-SNAPSHOT</version> <version>1.27-SNAPSHOT</version>
</dependency> </dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.1.10</version>
</dependency>
</dependencies> </dependencies>
</project> </project>

View File

@@ -40,24 +40,16 @@ public class HiveJDBC {
static { static {
try { try {
prop.load(Configurations.class.getResourceAsStream("/jdbc.properties")); prop.load(Configurations.class.getResourceAsStream("/jdbc.properties"));
driverName = prop.getProperty("jdbc.hive.driver").trim();
url = prop.getProperty("jdbc.hive.url").trim();
username = prop.getProperty("jdbc.hive.username").trim();
password = prop.getProperty("jdbc.hive.password").trim();
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();
} }
} }
public static void getConn(String searchActiveSys) throws Exception { public static void getConn() throws Exception {
if (null != searchActiveSys && searchActiveSys.equals("4")) {
driverName = prop.getProperty("jdbc.hiveA.driver").trim();
url = prop.getProperty("jdbc.hiveA.url").trim();
username = prop.getProperty("jdbc.hiveA.username").trim();
password = prop.getProperty("jdbc.hiveA.password").trim();
} else {
driverName = prop.getProperty("jdbc.hiveB.driver").trim();
url = prop.getProperty("jdbc.hiveB.url").trim();
username = prop.getProperty("jdbc.hiveB.username").trim();
password = prop.getProperty("jdbc.hiveB.password").trim();
}
Class.forName(driverName); Class.forName(driverName);
conn = DriverManager.getConnection(url, username, password); conn = DriverManager.getConnection(url, username, password);
@@ -65,7 +57,7 @@ public class HiveJDBC {
public static ResultSet query(String sql, String searchActiveSys) throws Exception { public static ResultSet query(String sql, String searchActiveSys) throws Exception {
logger.info("开始连接数据中心日志库--------------------------"); logger.info("开始连接数据中心日志库--------------------------");
getConn(searchActiveSys); getConn();
logger.info("连接数据中心日志库成功--------------------------"); logger.info("连接数据中心日志库成功--------------------------");
st = conn.createStatement(); st = conn.createStatement();
if (null != searchActiveSys && searchActiveSys.equals("4")) { if (null != searchActiveSys && searchActiveSys.equals("4")) {
@@ -135,15 +127,15 @@ public class HiveJDBC {
Map<String, String> filedAndColumnMap = getColumn2FiledMap(entityClass); Map<String, String> filedAndColumnMap = getColumn2FiledMap(entityClass);
List<String> listString = new ArrayList<String>(); List<String> listString = new ArrayList<String>();
List listObject = new ArrayList(); List listObject = new ArrayList();
//不从Object... obj中获取需要date类型的字段了,调用的时候容易漏写,改为反射获取date类型的字段 // 不从Object... obj中获取需要date类型的字段了,调用的时候容易漏写,改为反射获取date类型的字段
List<String> columnList =getDateColumn(entityClass); List<String> columnList = getDateColumn(entityClass);
// List<String> columnList =null; // List<String> columnList =null;
// if (null != obj && obj.length > 0) { // if (null != obj && obj.length > 0) {
// columnList = new ArrayList<String>(); // columnList = new ArrayList<String>();
// for (int i = 0; i < obj.length; i++) { // for (int i = 0; i < obj.length; i++) {
// columnList.add(obj[i].toString().toLowerCase()); // columnList.add(obj[i].toString().toLowerCase());
// } // }
// } // }
// ResultSet rs = HiveJDBC.query(sql.toString()); // ResultSet rs = HiveJDBC.query(sql.toString());
ResultSetMetaData metaData = rs.getMetaData(); ResultSetMetaData metaData = rs.getMetaData();
while (rs.next()) { while (rs.next()) {
@@ -152,11 +144,11 @@ public class HiveJDBC {
Object value = rs.getObject(i); Object value = rs.getObject(i);
String filedName = filedAndColumnMap.get(metaData.getColumnName(i).toString().toLowerCase()); String filedName = filedAndColumnMap.get(metaData.getColumnName(i).toString().toLowerCase());
if (!StringUtil.isEmpty(value)) { if (!StringUtil.isEmpty(value)) {
//如果是日期类型的属性需要把时间戳转换成日期如果时间戳为0直接把值设置为null // 如果是日期类型的属性需要把时间戳转换成日期如果时间戳为0直接把值设置为null
if (null != columnList && columnList.contains(filedName.toLowerCase())) { if (null != columnList && columnList.contains(filedName.toLowerCase())) {
long time = 0l; long time = 0l;
time = Long.parseLong(value.toString()); time = Long.parseLong(value.toString());
map.put(filedName, time==0l?null:new Date(time * 1000)); map.put(filedName, time == 0l ? null : new Date(time * 1000));
// map.put(filedName, new // map.put(filedName, new
// Date(Long.parseLong("1476583810000"))); // Date(Long.parseLong("1476583810000")));
} else { } else {

View File

@@ -0,0 +1,237 @@
package com.nis.util;
import java.beans.BeanInfo;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.ibatis.mapping.ResultMap;
import org.apache.ibatis.mapping.ResultMapping;
import org.apache.ibatis.session.SqlSessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.alibaba.druid.pool.DruidDataSource;
import com.nis.domain.Page;
import com.nis.web.service.SpringContextHolder;
/**
*
* <p>Title: HiveJDBCByDruid</p>
* <p>Description: 使用druid连接池对hive进行查询并解析结果</p>
* <p>Company: IIE</p>
* @author rkg
* @date 2018年8月20日
*
*/
public class HiveJDBCByDruid {
private final static Logger logger = LoggerFactory.getLogger(HiveJDBCByDruid.class);
static DruidDataSource datasource = null;
Connection conn = null;
ResultSet rs = null;
Statement st = null;
public static Connection getConnection() throws SQLException {
if (datasource == null) {
datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid");
}
return datasource.getConnection();
}
/**
* 将结果利用反射映射成对象集合
*
* @param rs
* resultSet
* @param entityClass
* 实体类
* @param obj
* 那些字段需要转换为date类型(由于数据中心表结构中没有date类型数据,其日期用long型表示,界面中需要显示yyyy-MM-dd
* hh:mm:ss形式,所以需要将long转换为date)
* @return
* @throws Exception
*/
public Map<String, List<Object>> tableMapping(Page<?> page, String redisKey, String sql, Class<?> entityClass, Object... obj)
throws Exception {
Map<String, List<Object>> mapList = new HashMap<String, List<Object>>();
try {
Map<String, String> filedAndColumnMap = getColumn2FiledMap(entityClass);
List<Object> listString = new ArrayList<Object>();
List<Object> listObject = new ArrayList<Object>();
// 不从Object... obj中获取需要date类型的字段了,调用的时候容易漏写,改为反射获取date类型的字段
List<String> columnList = getDateColumn(entityClass);
conn = getConnection();
logger.info("连接数据中心日志库成功--------------------------");
st = conn.createStatement();
rs = st.executeQuery(sql);
logger.info("执行查询语句成功sql={}", sql);
ResultSetMetaData metaData = rs.getMetaData();
while (rs.next()) {
Map<String,Object> map = new HashMap<String,Object>();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
Object value = rs.getObject(i);
String filedName = filedAndColumnMap.get(metaData.getColumnName(i).toString().toLowerCase());
if (!StringUtil.isEmpty(value)) {
// 如果是日期类型的属性需要把时间戳转换成日期如果时间戳为0直接把值设置为null
if (null != columnList && columnList.contains(filedName.toLowerCase())) {
long time = 0l;
time = Long.parseLong(value.toString());
map.put(filedName, time == 0l ? null : new Date(time * 1000));
// map.put(filedName, new
// Date(Long.parseLong("1476583810000")));
} else {
map.put(filedName, value);
}
} else {
map.put(filedName, null);
}
}
listString.add(JsonMapper.toJsonString(map2Obj(map, entityClass)));
listObject.add(map2Obj(map, entityClass));
}
logger.info("开始关闭数据中心连接");
HiveDataSource.closeConn();
if (null == listString || listString.size() == 0 || null == listObject || listObject.size() == 0) {
return null;
} else {
//暂时没有往缓存写的操作
// if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
// new SaveRedisListThread(redisKey, listString, Constants.HIVE_EXPIRE).start();
// }
}
// sublist包前不包后,0-30实际获取的是0-29的数据
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
Integer endNum = startNum - 1 + page.getPageSize() + 1;
if (listString.size() >= startNum) {
if (listString.size() >= endNum) {
mapList.put("str", listString.subList(startNum, endNum));
} else {
mapList.put("str", listString.subList(startNum, listString.size()));
}
} else {
mapList.put("str", new ArrayList<Object>());
}
if (listObject.size() >= startNum) {
if (listObject.size() >= endNum) {
mapList.put("obj", listObject.subList(startNum, endNum));
} else {
mapList.put("obj", listObject.subList(startNum, listObject.size()));
}
} else {
mapList.put("obj", new ArrayList<Object>());
}
} finally {
closeConn();
}
return mapList;
}
public void closeConn() {
try {
if (rs != null) {
rs.close();
rs = null;
}
if (st != null) {
st.close();
st = null;
}
if (conn != null) {
conn.close();
conn = null;
}
logger.info("关闭数据中心连接成功");
} catch (Exception e) {
e.printStackTrace();
logger.error("关闭数据中心连接失败,失败原因" + e);
}
}
/**
* 反射获取类中date类型的字段名称
* @param type
* @return
* @throws Exception
*/
public static List<String> getDateColumn(Class<?> type) throws Exception {
List<String> columnList = new ArrayList<String>();
BeanInfo beanInfo = Introspector.getBeanInfo(type);
PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
for (int i = 0; i < propertyDescriptors.length; i++) {
PropertyDescriptor propertyDescriptor = propertyDescriptors[i];
String name = propertyDescriptor.getName();
String fieldTypeName = propertyDescriptor.getPropertyType().getName();
if (fieldTypeName.equals("java.util.Date")) {
columnList.add(name.toLowerCase());
}
}
return columnList;
}
public static Object map2Obj(Map<String, Object> map, Class<?> beanClass) throws Exception {
BeanInfo beanInfo = Introspector.getBeanInfo(beanClass);
Object obj = beanClass.newInstance();
PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
for (int i = 0; i < propertyDescriptors.length; i++) {
PropertyDescriptor propertyDescriptor = propertyDescriptors[i];
String name = propertyDescriptor.getName();
String fieldTypeName = propertyDescriptor.getPropertyType().getName();
if (map.containsKey(name)) {
Object value = map.get(name);
if (!StringUtil.isEmpty(value)) {
if (fieldTypeName.equals("java.lang.String")) {
propertyDescriptor.getWriteMethod().invoke(obj, value.toString());
} else if (fieldTypeName.equals("java.lang.Integer")) {
propertyDescriptor.getWriteMethod().invoke(obj, Integer.parseInt(value.toString()));
} else if (fieldTypeName.equals("java.lang.Long")) {
propertyDescriptor.getWriteMethod().invoke(obj, Long.parseLong(value.toString()));
} else if (fieldTypeName.equals("java.lang.Boolean")) {
propertyDescriptor.getWriteMethod().invoke(obj, Boolean.parseBoolean(value.toString()));
} else if (fieldTypeName.equals("java.lang.Character")) {
propertyDescriptor.getWriteMethod().invoke(obj, value.toString().toCharArray());
} else if (fieldTypeName.equals("java.lang.Byte")) {
propertyDescriptor.getWriteMethod().invoke(obj, value.toString().getBytes());
} else if (fieldTypeName.equals("java.lang.Short")) {
propertyDescriptor.getWriteMethod().invoke(obj, Short.parseShort(value.toString()));
} else if (fieldTypeName.equals("java.lang.Float")) {
propertyDescriptor.getWriteMethod().invoke(obj, Float.parseFloat(value.toString()));
} else if (fieldTypeName.equals("java.lang.Double")) {
propertyDescriptor.getWriteMethod().invoke(obj, Double.parseDouble(value.toString()));
} else if (fieldTypeName.equals("java.math.BigDecimal")) {
propertyDescriptor.getWriteMethod().invoke(obj,
BigDecimal.valueOf(Long.parseLong(value.toString())));
} else if (fieldTypeName.equals("java.util.Date")) {
propertyDescriptor.getWriteMethod().invoke(obj, (Date) value);
}
}
}
}
return obj;
}
public static Map<String, String> getColumn2FiledMap(Class<?> clazz) {
Map<String, String> map = new HashMap<String, String>();
SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class);
ResultMap resultMap = sqlSessionFactory.getConfiguration().getResultMap(clazz.getSimpleName() + "Map");
List<ResultMapping> mapping = resultMap.getResultMappings();
for (ResultMapping mapp : mapping) {
map.put(mapp.getColumn().toLowerCase(), mapp.getProperty());
}
return map;
}
}

View File

@@ -1,6 +1,5 @@
package com.nis.web.controller.restful; package com.nis.web.controller.restful;
import java.sql.ResultSet;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
@@ -24,7 +23,7 @@ import com.nis.restful.RestServiceException;
import com.nis.util.Configurations; import com.nis.util.Configurations;
import com.nis.util.Constants; import com.nis.util.Constants;
import com.nis.util.DateUtils; import com.nis.util.DateUtils;
import com.nis.util.HiveJDBC; import com.nis.util.HiveJDBCByDruid;
import com.nis.util.JsonMapper; import com.nis.util.JsonMapper;
import com.nis.web.controller.BaseRestController; import com.nis.web.controller.BaseRestController;
import com.nis.web.service.AuditLogThread; import com.nis.web.service.AuditLogThread;
@@ -74,10 +73,10 @@ public class LogController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, dkBehaviorLog, String sql = HiveSqlService.getSql(page, dkBehaviorLog,
Configurations.getStringProperty(DkBehaviorLog.class.getSimpleName() + "HiveTable", "DK_BEHAVIOR_LOG"), Configurations.getStringProperty(DkBehaviorLog.class.getSimpleName() + "HiveTable", "DK_BEHAVIOR_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, DkBehaviorLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, DkBehaviorLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
@@ -127,10 +126,10 @@ public class LogController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, pxyHttpLog, String sql = HiveSqlService.getSql(page, pxyHttpLog,
Configurations.getStringProperty(PxyHttpLog.class.getSimpleName() + "HiveTable", "PXY_HTTP_LOG"), Configurations.getStringProperty(PxyHttpLog.class.getSimpleName() + "HiveTable", "PXY_HTTP_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, PxyHttpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, PxyHttpLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());

View File

@@ -1,6 +1,5 @@
package com.nis.web.controller.restful; package com.nis.web.controller.restful;
import java.sql.ResultSet;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
@@ -34,7 +33,7 @@ import com.nis.restful.RestServiceException;
import com.nis.util.Configurations; import com.nis.util.Configurations;
import com.nis.util.Constants; import com.nis.util.Constants;
import com.nis.util.DateUtils; import com.nis.util.DateUtils;
import com.nis.util.HiveJDBC; import com.nis.util.HiveJDBCByDruid;
import com.nis.util.JsonMapper; import com.nis.util.JsonMapper;
import com.nis.web.controller.BaseRestController; import com.nis.web.controller.BaseRestController;
import com.nis.web.service.AuditLogThread; import com.nis.web.service.AuditLogThread;
@@ -83,10 +82,10 @@ public class MmLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, mmAvIpLog, String sql = HiveSqlService.getSql(page, mmAvIpLog,
Configurations.getStringProperty(MmAvIpLog.class.getSimpleName() + "HiveTable", "MM_AV_IP_LOG"), Configurations.getStringProperty(MmAvIpLog.class.getSimpleName() + "HiveTable", "MM_AV_IP_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmAvIpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmAvIpLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
@@ -136,10 +135,10 @@ public class MmLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, mmAvUrlLog, String sql = HiveSqlService.getSql(page, mmAvUrlLog,
Configurations.getStringProperty(MmAvUrlLog.class.getSimpleName() + "HiveTable", "MM_AV_URL_LOG"), Configurations.getStringProperty(MmAvUrlLog.class.getSimpleName() + "HiveTable", "MM_AV_URL_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmAvUrlLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmAvUrlLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
@@ -189,10 +188,10 @@ public class MmLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, mmPicIpLog, String sql = HiveSqlService.getSql(page, mmPicIpLog,
Configurations.getStringProperty(MmPicIpLog.class.getSimpleName() + "HiveTable", "MM_PIC_IP_LOG"), Configurations.getStringProperty(MmPicIpLog.class.getSimpleName() + "HiveTable", "MM_PIC_IP_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmPicIpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPicIpLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
@@ -242,10 +241,10 @@ public class MmLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, mmPicUrlLog, String sql = HiveSqlService.getSql(page, mmPicUrlLog,
Configurations.getStringProperty(MmPicUrlLog.class.getSimpleName() + "HiveTable", "MM_PIC_URL_LOG"), Configurations.getStringProperty(MmPicUrlLog.class.getSimpleName() + "HiveTable", "MM_PIC_URL_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmPicUrlLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPicUrlLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
@@ -296,10 +295,10 @@ public class MmLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, mmVoipIpLog, String sql = HiveSqlService.getSql(page, mmVoipIpLog,
Configurations.getStringProperty(MmVoipIpLog.class.getSimpleName() + "HiveTable", "MM_VOIP_IP_LOG"), Configurations.getStringProperty(MmVoipIpLog.class.getSimpleName() + "HiveTable", "MM_VOIP_IP_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmVoipIpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmVoipIpLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
@@ -349,10 +348,10 @@ public class MmLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, mmVoipLog, String sql = HiveSqlService.getSql(page, mmVoipLog,
Configurations.getStringProperty(MmVoipAccountLog.class.getSimpleName() + "HiveTable", "MM_VOIP_ACCOUNT_LOG"), Configurations.getStringProperty(MmVoipAccountLog.class.getSimpleName() + "HiveTable", "MM_VOIP_ACCOUNT_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmVoipAccountLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmVoipAccountLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
@@ -403,10 +402,10 @@ public class MmLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, mmSampleAudioLog, String sql = HiveSqlService.getSql(page, mmSampleAudioLog,
Configurations.getStringProperty(MmSampleAudioLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_AUDIO_LOG"), Configurations.getStringProperty(MmSampleAudioLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_AUDIO_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmSampleAudioLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSampleAudioLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
@@ -456,10 +455,10 @@ public class MmLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, mmSampleVideoLog, String sql = HiveSqlService.getSql(page, mmSampleVideoLog,
Configurations.getStringProperty(MmSampleVideoLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VIDEO_LOG"), Configurations.getStringProperty(MmSampleVideoLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VIDEO_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmSampleVideoLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSampleVideoLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
@@ -509,10 +508,10 @@ public class MmLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, mmPornAudioLevelLog, String sql = HiveSqlService.getSql(page, mmPornAudioLevelLog,
Configurations.getStringProperty(MmPornAudioLevelLog.class.getSimpleName() + "HiveTable", "MM_PORN_AUDIO_LEVEL_LOG"), Configurations.getStringProperty(MmPornAudioLevelLog.class.getSimpleName() + "HiveTable", "MM_PORN_AUDIO_LEVEL_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmPornAudioLevelLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPornAudioLevelLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
@@ -562,10 +561,10 @@ public class MmLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, mmPornVideoLevelLog, String sql = HiveSqlService.getSql(page, mmPornVideoLevelLog,
Configurations.getStringProperty(MmPornVideoLevelLog.class.getSimpleName() + "HiveTable", "MM_PRON_VIDEO_LOG"), Configurations.getStringProperty(MmPornVideoLevelLog.class.getSimpleName() + "HiveTable", "MM_PRON_VIDEO_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmPornVideoLevelLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPornVideoLevelLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
@@ -616,10 +615,10 @@ public class MmLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, mmSamplePicLog, String sql = HiveSqlService.getSql(page, mmSamplePicLog,
Configurations.getStringProperty(MmSamplePicLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_PIC_LOG"), Configurations.getStringProperty(MmSamplePicLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_PIC_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmSamplePicLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSamplePicLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
@@ -669,10 +668,10 @@ public class MmLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, mmSampleVoipLog, String sql = HiveSqlService.getSql(page, mmSampleVoipLog,
Configurations.getStringProperty(MmSampleVoipLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VOIP_LOG"), Configurations.getStringProperty(MmSampleVoipLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VOIP_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmSampleVoipLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSampleVoipLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());

View File

@@ -37,6 +37,7 @@ import com.nis.util.Configurations;
import com.nis.util.Constants; import com.nis.util.Constants;
import com.nis.util.DateUtils; import com.nis.util.DateUtils;
import com.nis.util.HiveJDBC; import com.nis.util.HiveJDBC;
import com.nis.util.HiveJDBCByDruid;
import com.nis.util.JsonMapper; import com.nis.util.JsonMapper;
import com.nis.web.controller.BaseRestController; import com.nis.web.controller.BaseRestController;
import com.nis.web.service.AuditLogThread; import com.nis.web.service.AuditLogThread;
@@ -83,15 +84,20 @@ public class NtcLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, ntcIpLog, // ResultSet rs = HiveSqlService.getResultSet(page, ntcIpLog,
// Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
// getCol2Col(), orderBy, null);
// Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpLog.class, "foundTime",
// "recvTime");
String sql = HiveSqlService.getSql(page, ntcIpLog,
Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"), Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null,sql , NtcIpLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcIpLogPage.setList(new ArrayList()); ntcIpLogPage.setList(new ArrayList<NtcIpLog>());
} else { } else {
List list = new ArrayList(); List<Object> list = new ArrayList<Object>();
list = tableMapping.get("obj"); list = tableMapping.get("obj");
// if (tableMapping.get("obj").size() > page.getPageSize()) { // if (tableMapping.get("obj").size() > page.getPageSize()) {
// list = tableMapping.get("obj").subList(0, page.getPageSize()); // list = tableMapping.get("obj").subList(0, page.getPageSize());
@@ -149,10 +155,10 @@ public class NtcLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, ntcHttpLog, String sql = HiveSqlService.getSql(page, ntcHttpLog,
Configurations.getStringProperty(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"), Configurations.getStringProperty(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcHttpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcHttpLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcHttpLogPage.setList(new ArrayList()); ntcHttpLogPage.setList(new ArrayList());
@@ -209,10 +215,10 @@ public class NtcLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, ntcDnsLog, String sql = HiveSqlService.getSql(page, ntcDnsLog,
Configurations.getStringProperty(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"), Configurations.getStringProperty(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcDnsLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcDnsLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcDnsLogPage.setList(new ArrayList()); ntcDnsLogPage.setList(new ArrayList());
@@ -269,10 +275,10 @@ public class NtcLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, ntcMailLog, String sql = HiveSqlService.getSql(page, ntcMailLog,
Configurations.getStringProperty(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"), Configurations.getStringProperty(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcMailLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcMailLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcMailLogPage.setList(new ArrayList()); ntcMailLogPage.setList(new ArrayList());
@@ -329,10 +335,10 @@ public class NtcLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, ntcSslLog, String sql = HiveSqlService.getSql(page, ntcSslLog,
Configurations.getStringProperty(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"), Configurations.getStringProperty(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcSslLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcSslLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcSslLogPage.setList(new ArrayList()); ntcSslLogPage.setList(new ArrayList());
@@ -389,10 +395,10 @@ public class NtcLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, ntcPptpLog, String sql = HiveSqlService.getSql(page, ntcPptpLog,
Configurations.getStringProperty(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"), Configurations.getStringProperty(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcPptpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcPptpLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcPptpLogPage.setList(new ArrayList()); ntcPptpLogPage.setList(new ArrayList());
@@ -448,10 +454,10 @@ public class NtcLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, ntcL2tpLog, String sql = HiveSqlService.getSql(page, ntcL2tpLog,
Configurations.getStringProperty(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"), Configurations.getStringProperty(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcL2tpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcL2tpLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcL2tpLogPage.setList(new ArrayList()); ntcL2tpLogPage.setList(new ArrayList());
@@ -508,10 +514,10 @@ public class NtcLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, ntcOpenvpnLog, String sql = HiveSqlService.getSql(page, ntcOpenvpnLog,
Configurations.getStringProperty(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"), Configurations.getStringProperty(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcOpenvpnLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcOpenvpnLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcOpenvpnLogPage.setList(new ArrayList()); ntcOpenvpnLogPage.setList(new ArrayList());
@@ -568,10 +574,10 @@ public class NtcLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, ntcIpsecLog, String sql = HiveSqlService.getSql(page, ntcIpsecLog,
Configurations.getStringProperty(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"), Configurations.getStringProperty(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpsecLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcIpsecLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcIpsecLogPage.setList(new ArrayList()); ntcIpsecLogPage.setList(new ArrayList());
@@ -628,10 +634,10 @@ public class NtcLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, ntcSshLog, String sql = HiveSqlService.getSql(page, ntcSshLog,
Configurations.getStringProperty(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"), Configurations.getStringProperty(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcSshLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcSshLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcSshLogPage.setList(new ArrayList()); ntcSshLogPage.setList(new ArrayList());
@@ -688,10 +694,10 @@ public class NtcLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, ntcFtpLog, String sql = HiveSqlService.getSql(page, ntcFtpLog,
Configurations.getStringProperty(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"), Configurations.getStringProperty(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcFtpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcFtpLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcFtpLogPage.setList(new ArrayList()); ntcFtpLogPage.setList(new ArrayList());
@@ -746,10 +752,10 @@ public class NtcLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, ntcAppLog, String sql = HiveSqlService.getSql(page, ntcAppLog,
Configurations.getStringProperty(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"), Configurations.getStringProperty(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcAppLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcAppLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcAppLogPage.setList(new ArrayList()); ntcAppLogPage.setList(new ArrayList());
@@ -804,10 +810,10 @@ public class NtcLogSearchController extends BaseRestController {
} else { } else {
orderBy = "found_Time"; orderBy = "found_Time";
} }
ResultSet rs = HiveSqlService.getResultSet(page, ntcDdosLog, String sql = HiveSqlService.getSql(page, ntcDdosLog,
Configurations.getStringProperty(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"), Configurations.getStringProperty(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcDdosLog.class, "foundTime", Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcDdosLog.class, "foundTime",
"recvTime"); "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcDdosLogPage.setList(new ArrayList()); ntcDdosLogPage.setList(new ArrayList());

View File

@@ -2,7 +2,10 @@ package com.nis.web.service;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.ParseException; import java.text.ParseException;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.ArrayList; import java.util.ArrayList;
@@ -17,6 +20,7 @@ import org.apache.ibatis.session.SqlSessionFactory;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.alibaba.druid.pool.DruidDataSource;
import com.nis.domain.Page; import com.nis.domain.Page;
import com.nis.util.Configurations; import com.nis.util.Configurations;
import com.nis.util.Constants; import com.nis.util.Constants;
@@ -26,9 +30,21 @@ import com.nis.util.redis.SaveRedisThread;
public class HiveSqlService { public class HiveSqlService {
private final static Logger logger = LoggerFactory.getLogger(HiveSqlService.class); private final static Logger logger = LoggerFactory.getLogger(HiveSqlService.class);
static DruidDataSource datasource = null;
Connection conn = null;
ResultSet rs = null;
Statement st = null;
public static Connection getConnection() throws SQLException {
if (datasource == null) {
datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid");
}
return datasource.getConnection();
}
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd"); private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd");
public static ResultSet getResultSet(Page page, Object bean, String tableName,
public static String getSql(Page page, Object bean, String tableName,
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception { Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
tableName = tableName.toLowerCase(); tableName = tableName.toLowerCase();
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields()); String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
@@ -132,10 +148,7 @@ public class HiveSqlService {
// row_Num between " + startNum + " and " + endNum); // row_Num between " + startNum + " and " + endNum);
sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM); sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM);
logger.info("获取数据中心日志sql==================={}", sql); logger.info("获取数据中心日志sql==================={}", sql);
// ResultSet query = HiveJDBC.query(sql.toString()); return sql.toString();
ResultSet query = HiveDataSource.query(sql.toString());
logger.info("获取数据中心日志成功");
return query;
} }
public static Long getHivePageCount(Object bean, String countKey, String tableName, public static Long getHivePageCount(Object bean, String countKey, String tableName,
@@ -444,7 +457,6 @@ public class HiveSqlService {
public static String getFiledsSql(String mapName, String fileds) throws Exception { public static String getFiledsSql(String mapName, String fileds) throws Exception {
String[] fieldsColoumn = null; String[] fieldsColoumn = null;
String orderByStr = "";
// 所有字段名 // 所有字段名
List<String> columnList = new ArrayList<String>(); List<String> columnList = new ArrayList<String>();
// 所有属性名 // 所有属性名
@@ -489,7 +501,7 @@ public class HiveSqlService {
return fileds; return fileds;
} }
public static Map<String, String> getFiledAndColumnMap(Class clazz) { public static Map<String, String> getFiledAndColumnMap(Class<?> clazz) {
Map<String, String> map = new HashMap<String, String>(); Map<String, String> map = new HashMap<String, String>();
SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class); SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class);
ResultMap resultMap = sqlSessionFactory.getConfiguration().getResultMap(clazz.getSimpleName() + "Map"); ResultMap resultMap = sqlSessionFactory.getConfiguration().getResultMap(clazz.getSimpleName() + "Map");
@@ -605,10 +617,4 @@ public class HiveSqlService {
} }
} }
public static void main(String[] args) {
Long datacenterTime = Constants.DATACENTER_TIME;
double doubleValue = datacenterTime.doubleValue();
System.out.println(doubleValue);
}
} }

View File

@@ -79,6 +79,35 @@
<!-- 缓存prepared statements的大小默认值0 --> <!-- 缓存prepared statements的大小默认值0 -->
<property name="statementsCacheSize" value="${bonecp.hive.statementsCacheSize}" /> <property name="statementsCacheSize" value="${bonecp.hive.statementsCacheSize}" />
</bean> </bean>
<bean id="HiveDataSourceByDruid" class="com.alibaba.druid.pool.DruidDataSource" init-method="init" destroy-method="close">
<property name="driverClassName" value="${jdbc.hive.driver}" />
<property name="url" value="${jdbc.hive.url}" />
<property name="username" value="${jdbc.hive.username}"/>
<property name="password" value="${jdbc.hive.password}"/>
<property name="initialSize" value="${druid.hive.initialSize}" /><!-- 配置初始化连接池数量-->
<property name="minIdle" value="${druid.hive.minIdle}" /><!-- 配置最小连接池数量-->
<property name="maxActive" value="${druid.hive.maxActive}" /><!-- 配置最大连接池数量-->
<property name="maxWait" value="${druid.hive.maxWait}" /><!-- 配置获取连接等待超时的时间 单位毫秒-->
<property name="useUnfairLock" value="${druid.hive.useUnfairLock}"/><!--使用非公平锁-->
<property name="timeBetweenEvictionRunsMillis" value="${druid.hive.timeBetweenEvictionRunsMillis}" /><!-- 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒 -->
<property name="minEvictableIdleTimeMillis" value="${druid.hive.minEvictableIdleTimeMillis}" /><!-- 配置一个连接在池中最小生存的时间,单位是毫秒 -->
<property name="validationQuery" value="${druid.hive.validationQuery}" /><!--用来检测连接是否有效的sql要求是一个查询语句。 -->
<property name="testWhileIdle" value="${druid.hive.testWhileIdle}" /><!--申请连接的时候检测如果空闲时间大于timeBetweenEvictionRunsMillis执行validationQuery检测连接是否有效。-->
<property name="testOnBorrow" value="${druid.hive.testOnBorrow}" /><!--申请连接时执行validationQuery检测连接是否有效-->
<property name="testOnReturn" value="${druid.hive.testOnReturn}" /><!--归还连接时执行validationQuery检测连接是否有效-->
<property name="poolPreparedStatements" value="${druid.hive.poolPreparedStatements}" /><!-- 打开PSCache并且指定每个连接上PSCache的大小 -->
<property name="maxOpenPreparedStatements" value="${druid.hive.maxOpenPreparedStatements}" /><!--要启用PSCache必须配置大于0当大于0时poolPreparedStatements自动触发修改为true。在Druid中不会存在Oracle下PSCache占用内存过多的问题可以把这个数值配置大一些比如说100 -->
<property name="filters" value="${druid.hive.filters}" /><!-- 配置监控统计拦截的filters去掉后监控界面sql无法统计 -->
</bean>
<bean id="dynamicDataSource" class="com.nis.datasource.DynamicDataSource"> <bean id="dynamicDataSource" class="com.nis.datasource.DynamicDataSource">

View File

@@ -66,6 +66,45 @@ bonecp.hive.partitionCount=3
bonecp.hive.acquireIncrement=5 bonecp.hive.acquireIncrement=5
bonecp.hive.statementsCacheSize=100 bonecp.hive.statementsCacheSize=100
#########################配置hive使用druid连接池#########################################
#配置初始化连接池数量
druid.hive.initialSize=5
#配置最小连接池数量
druid.hive.minIdle=1
#配置最大连接池数量
druid.hive.maxActive=200
# 配置获取连接等待超时的时间 单位毫秒
druid.hive.maxWait=600000
#使用非公平锁
druid.hive.useUnfairLock=true
#配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
druid.hive.timeBetweenEvictionRunsMillis=60000
#配置一个连接在池中最小生存的时间,单位是毫秒
druid.hive.minEvictableIdleTimeMillis=300000
#用来检测连接是否有效的sql要求是一个查询语句
druid.hive.validationQuery=select unix_timestamp()
#申请连接的时候检测如果空闲时间大于timeBetweenEvictionRunsMillis执行validationQuery检测连接是否有效
druid.hive.testWhileIdle=true
#申请连接时执行validationQuery检测连接是否有效
druid.hive.testOnBorrow=true
#归还连接时执行validationQuery检测连接是否有效
druid.hive.testOnReturn=false
#打开PSCache并且指定每个连接上PSCache的大小
druid.hive.poolPreparedStatements=true
#要启用PSCache必须配置大于0当大于0时poolPreparedStatements自动触发修改为true。在Druid中不会存在Oracle下PSCache占用内存过多的问题可以把这个数值配置大一些比如说100
druid.hive.maxOpenPreparedStatements=100
#配置监控统计拦截的filters去掉后监控界面sql无法统计
druid.hive.filters=stat
##################################################################################################################################### #####################################################################################################################################
##redis连接 ##redis连接
##################################################################################################################################### #####################################################################################################################################

View File

@@ -176,6 +176,25 @@
<!-- </listener-class> --> <!-- </listener-class> -->
<!-- </listener> --> <!-- </listener> -->
<servlet>
<servlet-name>DruidStatView</servlet-name>
<servlet-class>com.alibaba.druid.support.http.StatViewServlet</servlet-class>
</servlet>
<servlet-mapping>
<servlet-name>DruidStatView</servlet-name>
<url-pattern>/druid/*</url-pattern>
</servlet-mapping>
<filter>
<filter-name>DruidWebStatFilter</filter-name>
<filter-class>com.alibaba.druid.support.http.WebStatFilter</filter-class>
<init-param>
<param-name>exclusions</param-name>
<param-value>*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*</param-value>
</init-param>
</filter>
<filter-mapping>
<filter-name>DruidWebStatFilter</filter-name>
<url-pattern>/*</url-pattern>
</filter-mapping>
</web-app> </web-app>