Merge remote-tracking branch 'origin/Release-1.1' into develop
This commit is contained in:
@@ -40,24 +40,16 @@ public class HiveJDBC {
|
||||
static {
|
||||
try {
|
||||
prop.load(Configurations.class.getResourceAsStream("/jdbc.properties"));
|
||||
driverName = prop.getProperty("jdbc.hive.driver").trim();
|
||||
url = prop.getProperty("jdbc.hive.url").trim();
|
||||
username = prop.getProperty("jdbc.hive.username").trim();
|
||||
password = prop.getProperty("jdbc.hive.password").trim();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public static void getConn(String searchActiveSys) throws Exception {
|
||||
|
||||
if (null != searchActiveSys && searchActiveSys.equals("4")) {
|
||||
driverName = prop.getProperty("jdbc.hiveA.driver").trim();
|
||||
url = prop.getProperty("jdbc.hiveA.url").trim();
|
||||
username = prop.getProperty("jdbc.hiveA.username").trim();
|
||||
password = prop.getProperty("jdbc.hiveA.password").trim();
|
||||
} else {
|
||||
driverName = prop.getProperty("jdbc.hiveB.driver").trim();
|
||||
url = prop.getProperty("jdbc.hiveB.url").trim();
|
||||
username = prop.getProperty("jdbc.hiveB.username").trim();
|
||||
password = prop.getProperty("jdbc.hiveB.password").trim();
|
||||
}
|
||||
public static void getConn() throws Exception {
|
||||
Class.forName(driverName);
|
||||
conn = DriverManager.getConnection(url, username, password);
|
||||
|
||||
@@ -65,7 +57,7 @@ public class HiveJDBC {
|
||||
|
||||
public static ResultSet query(String sql, String searchActiveSys) throws Exception {
|
||||
logger.info("开始连接数据中心日志库--------------------------");
|
||||
getConn(searchActiveSys);
|
||||
getConn();
|
||||
logger.info("连接数据中心日志库成功--------------------------");
|
||||
st = conn.createStatement();
|
||||
if (null != searchActiveSys && searchActiveSys.equals("4")) {
|
||||
@@ -135,15 +127,15 @@ public class HiveJDBC {
|
||||
Map<String, String> filedAndColumnMap = getColumn2FiledMap(entityClass);
|
||||
List<String> listString = new ArrayList<String>();
|
||||
List listObject = new ArrayList();
|
||||
//不从Object... obj中获取需要date类型的字段了,调用的时候容易漏写,改为反射获取date类型的字段
|
||||
List<String> columnList =getDateColumn(entityClass);
|
||||
// List<String> columnList =null;
|
||||
// if (null != obj && obj.length > 0) {
|
||||
// columnList = new ArrayList<String>();
|
||||
// for (int i = 0; i < obj.length; i++) {
|
||||
// columnList.add(obj[i].toString().toLowerCase());
|
||||
// }
|
||||
// }
|
||||
// 不从Object... obj中获取需要date类型的字段了,调用的时候容易漏写,改为反射获取date类型的字段
|
||||
List<String> columnList = getDateColumn(entityClass);
|
||||
// List<String> columnList =null;
|
||||
// if (null != obj && obj.length > 0) {
|
||||
// columnList = new ArrayList<String>();
|
||||
// for (int i = 0; i < obj.length; i++) {
|
||||
// columnList.add(obj[i].toString().toLowerCase());
|
||||
// }
|
||||
// }
|
||||
// ResultSet rs = HiveJDBC.query(sql.toString());
|
||||
ResultSetMetaData metaData = rs.getMetaData();
|
||||
while (rs.next()) {
|
||||
@@ -152,11 +144,11 @@ public class HiveJDBC {
|
||||
Object value = rs.getObject(i);
|
||||
String filedName = filedAndColumnMap.get(metaData.getColumnName(i).toString().toLowerCase());
|
||||
if (!StringUtil.isEmpty(value)) {
|
||||
//如果是日期类型的属性需要把时间戳转换成日期,如果时间戳为0直接把值设置为null
|
||||
// 如果是日期类型的属性需要把时间戳转换成日期,如果时间戳为0直接把值设置为null
|
||||
if (null != columnList && columnList.contains(filedName.toLowerCase())) {
|
||||
long time = 0l;
|
||||
time = Long.parseLong(value.toString());
|
||||
map.put(filedName, time==0l?null:new Date(time * 1000));
|
||||
map.put(filedName, time == 0l ? null : new Date(time * 1000));
|
||||
// map.put(filedName, new
|
||||
// Date(Long.parseLong("1476583810000")));
|
||||
} else {
|
||||
|
||||
237
src/main/java/com/nis/util/HiveJDBCByDruid.java
Normal file
237
src/main/java/com/nis/util/HiveJDBCByDruid.java
Normal file
@@ -0,0 +1,237 @@
|
||||
package com.nis.util;
|
||||
|
||||
import java.beans.BeanInfo;
|
||||
import java.beans.Introspector;
|
||||
import java.beans.PropertyDescriptor;
|
||||
import java.math.BigDecimal;
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.ResultSetMetaData;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.ibatis.mapping.ResultMap;
|
||||
import org.apache.ibatis.mapping.ResultMapping;
|
||||
import org.apache.ibatis.session.SqlSessionFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.alibaba.druid.pool.DruidDataSource;
|
||||
import com.nis.domain.Page;
|
||||
import com.nis.web.service.SpringContextHolder;
|
||||
/**
|
||||
*
|
||||
* <p>Title: HiveJDBCByDruid</p>
|
||||
* <p>Description: 使用druid连接池对hive进行查询并解析结果</p>
|
||||
* <p>Company: IIE</p>
|
||||
* @author rkg
|
||||
* @date 2018年8月20日
|
||||
*
|
||||
*/
|
||||
public class HiveJDBCByDruid {
|
||||
private final static Logger logger = LoggerFactory.getLogger(HiveJDBCByDruid.class);
|
||||
static DruidDataSource datasource = null;
|
||||
Connection conn = null;
|
||||
ResultSet rs = null;
|
||||
Statement st = null;
|
||||
|
||||
public static Connection getConnection() throws SQLException {
|
||||
if (datasource == null) {
|
||||
datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid");
|
||||
}
|
||||
return datasource.getConnection();
|
||||
}
|
||||
|
||||
/**
|
||||
* 将结果利用反射映射成对象集合
|
||||
*
|
||||
* @param rs
|
||||
* resultSet
|
||||
* @param entityClass
|
||||
* 实体类
|
||||
* @param obj
|
||||
* 那些字段需要转换为date类型(由于数据中心表结构中没有date类型数据,其日期用long型表示,界面中需要显示yyyy-MM-dd
|
||||
* hh:mm:ss形式,所以需要将long转换为date)
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
public Map<String, List<Object>> tableMapping(Page<?> page, String redisKey, String sql, Class<?> entityClass, Object... obj)
|
||||
throws Exception {
|
||||
Map<String, List<Object>> mapList = new HashMap<String, List<Object>>();
|
||||
try {
|
||||
Map<String, String> filedAndColumnMap = getColumn2FiledMap(entityClass);
|
||||
List<Object> listString = new ArrayList<Object>();
|
||||
List<Object> listObject = new ArrayList<Object>();
|
||||
// 不从Object... obj中获取需要date类型的字段了,调用的时候容易漏写,改为反射获取date类型的字段
|
||||
List<String> columnList = getDateColumn(entityClass);
|
||||
conn = getConnection();
|
||||
logger.info("连接数据中心日志库成功--------------------------");
|
||||
st = conn.createStatement();
|
||||
rs = st.executeQuery(sql);
|
||||
logger.info("执行查询语句成功sql={}", sql);
|
||||
ResultSetMetaData metaData = rs.getMetaData();
|
||||
while (rs.next()) {
|
||||
Map<String,Object> map = new HashMap<String,Object>();
|
||||
for (int i = 1; i <= metaData.getColumnCount(); i++) {
|
||||
Object value = rs.getObject(i);
|
||||
String filedName = filedAndColumnMap.get(metaData.getColumnName(i).toString().toLowerCase());
|
||||
if (!StringUtil.isEmpty(value)) {
|
||||
// 如果是日期类型的属性需要把时间戳转换成日期,如果时间戳为0直接把值设置为null
|
||||
if (null != columnList && columnList.contains(filedName.toLowerCase())) {
|
||||
long time = 0l;
|
||||
time = Long.parseLong(value.toString());
|
||||
map.put(filedName, time == 0l ? null : new Date(time * 1000));
|
||||
// map.put(filedName, new
|
||||
// Date(Long.parseLong("1476583810000")));
|
||||
} else {
|
||||
map.put(filedName, value);
|
||||
}
|
||||
} else {
|
||||
map.put(filedName, null);
|
||||
}
|
||||
}
|
||||
listString.add(JsonMapper.toJsonString(map2Obj(map, entityClass)));
|
||||
listObject.add(map2Obj(map, entityClass));
|
||||
}
|
||||
logger.info("开始关闭数据中心连接");
|
||||
HiveDataSource.closeConn();
|
||||
if (null == listString || listString.size() == 0 || null == listObject || listObject.size() == 0) {
|
||||
return null;
|
||||
} else {
|
||||
//暂时没有往缓存写的操作
|
||||
// if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
|
||||
// new SaveRedisListThread(redisKey, listString, Constants.HIVE_EXPIRE).start();
|
||||
// }
|
||||
}
|
||||
// sublist包前不包后,0-30实际获取的是0-29的数据
|
||||
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
|
||||
Integer endNum = startNum - 1 + page.getPageSize() + 1;
|
||||
if (listString.size() >= startNum) {
|
||||
if (listString.size() >= endNum) {
|
||||
mapList.put("str", listString.subList(startNum, endNum));
|
||||
} else {
|
||||
mapList.put("str", listString.subList(startNum, listString.size()));
|
||||
}
|
||||
|
||||
} else {
|
||||
mapList.put("str", new ArrayList<Object>());
|
||||
}
|
||||
if (listObject.size() >= startNum) {
|
||||
if (listObject.size() >= endNum) {
|
||||
mapList.put("obj", listObject.subList(startNum, endNum));
|
||||
} else {
|
||||
mapList.put("obj", listObject.subList(startNum, listObject.size()));
|
||||
}
|
||||
} else {
|
||||
mapList.put("obj", new ArrayList<Object>());
|
||||
}
|
||||
|
||||
} finally {
|
||||
closeConn();
|
||||
}
|
||||
return mapList;
|
||||
}
|
||||
|
||||
public void closeConn() {
|
||||
try {
|
||||
if (rs != null) {
|
||||
rs.close();
|
||||
rs = null;
|
||||
}
|
||||
if (st != null) {
|
||||
st.close();
|
||||
st = null;
|
||||
}
|
||||
if (conn != null) {
|
||||
conn.close();
|
||||
conn = null;
|
||||
}
|
||||
logger.info("关闭数据中心连接成功");
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
logger.error("关闭数据中心连接失败,失败原因" + e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 反射获取类中date类型的字段名称
|
||||
* @param type
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
public static List<String> getDateColumn(Class<?> type) throws Exception {
|
||||
List<String> columnList = new ArrayList<String>();
|
||||
BeanInfo beanInfo = Introspector.getBeanInfo(type);
|
||||
PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
|
||||
for (int i = 0; i < propertyDescriptors.length; i++) {
|
||||
PropertyDescriptor propertyDescriptor = propertyDescriptors[i];
|
||||
String name = propertyDescriptor.getName();
|
||||
String fieldTypeName = propertyDescriptor.getPropertyType().getName();
|
||||
if (fieldTypeName.equals("java.util.Date")) {
|
||||
columnList.add(name.toLowerCase());
|
||||
}
|
||||
}
|
||||
return columnList;
|
||||
}
|
||||
|
||||
public static Object map2Obj(Map<String, Object> map, Class<?> beanClass) throws Exception {
|
||||
BeanInfo beanInfo = Introspector.getBeanInfo(beanClass);
|
||||
Object obj = beanClass.newInstance();
|
||||
PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
|
||||
for (int i = 0; i < propertyDescriptors.length; i++) {
|
||||
PropertyDescriptor propertyDescriptor = propertyDescriptors[i];
|
||||
String name = propertyDescriptor.getName();
|
||||
String fieldTypeName = propertyDescriptor.getPropertyType().getName();
|
||||
if (map.containsKey(name)) {
|
||||
Object value = map.get(name);
|
||||
if (!StringUtil.isEmpty(value)) {
|
||||
if (fieldTypeName.equals("java.lang.String")) {
|
||||
propertyDescriptor.getWriteMethod().invoke(obj, value.toString());
|
||||
} else if (fieldTypeName.equals("java.lang.Integer")) {
|
||||
propertyDescriptor.getWriteMethod().invoke(obj, Integer.parseInt(value.toString()));
|
||||
} else if (fieldTypeName.equals("java.lang.Long")) {
|
||||
propertyDescriptor.getWriteMethod().invoke(obj, Long.parseLong(value.toString()));
|
||||
} else if (fieldTypeName.equals("java.lang.Boolean")) {
|
||||
propertyDescriptor.getWriteMethod().invoke(obj, Boolean.parseBoolean(value.toString()));
|
||||
} else if (fieldTypeName.equals("java.lang.Character")) {
|
||||
propertyDescriptor.getWriteMethod().invoke(obj, value.toString().toCharArray());
|
||||
} else if (fieldTypeName.equals("java.lang.Byte")) {
|
||||
propertyDescriptor.getWriteMethod().invoke(obj, value.toString().getBytes());
|
||||
} else if (fieldTypeName.equals("java.lang.Short")) {
|
||||
propertyDescriptor.getWriteMethod().invoke(obj, Short.parseShort(value.toString()));
|
||||
} else if (fieldTypeName.equals("java.lang.Float")) {
|
||||
propertyDescriptor.getWriteMethod().invoke(obj, Float.parseFloat(value.toString()));
|
||||
} else if (fieldTypeName.equals("java.lang.Double")) {
|
||||
propertyDescriptor.getWriteMethod().invoke(obj, Double.parseDouble(value.toString()));
|
||||
} else if (fieldTypeName.equals("java.math.BigDecimal")) {
|
||||
propertyDescriptor.getWriteMethod().invoke(obj,
|
||||
BigDecimal.valueOf(Long.parseLong(value.toString())));
|
||||
} else if (fieldTypeName.equals("java.util.Date")) {
|
||||
propertyDescriptor.getWriteMethod().invoke(obj, (Date) value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
|
||||
|
||||
public static Map<String, String> getColumn2FiledMap(Class<?> clazz) {
|
||||
Map<String, String> map = new HashMap<String, String>();
|
||||
SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class);
|
||||
ResultMap resultMap = sqlSessionFactory.getConfiguration().getResultMap(clazz.getSimpleName() + "Map");
|
||||
List<ResultMapping> mapping = resultMap.getResultMappings();
|
||||
for (ResultMapping mapp : mapping) {
|
||||
map.put(mapp.getColumn().toLowerCase(), mapp.getProperty());
|
||||
}
|
||||
return map;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
package com.nis.web.controller.restful;
|
||||
|
||||
import java.sql.ResultSet;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
@@ -24,7 +23,7 @@ import com.nis.restful.RestServiceException;
|
||||
import com.nis.util.Configurations;
|
||||
import com.nis.util.Constants;
|
||||
import com.nis.util.DateUtils;
|
||||
import com.nis.util.HiveJDBC;
|
||||
import com.nis.util.HiveJDBCByDruid;
|
||||
import com.nis.util.JsonMapper;
|
||||
import com.nis.web.controller.BaseRestController;
|
||||
import com.nis.web.service.AuditLogThread;
|
||||
@@ -74,10 +73,10 @@ public class LogController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, dkBehaviorLog,
|
||||
String sql = HiveSqlService.getSql(page, dkBehaviorLog,
|
||||
Configurations.getStringProperty(DkBehaviorLog.class.getSimpleName() + "HiveTable", "DK_BEHAVIOR_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, DkBehaviorLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, DkBehaviorLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -127,10 +126,10 @@ public class LogController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, pxyHttpLog,
|
||||
String sql = HiveSqlService.getSql(page, pxyHttpLog,
|
||||
Configurations.getStringProperty(PxyHttpLog.class.getSimpleName() + "HiveTable", "PXY_HTTP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, PxyHttpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, PxyHttpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package com.nis.web.controller.restful;
|
||||
|
||||
import java.sql.ResultSet;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
@@ -34,7 +33,7 @@ import com.nis.restful.RestServiceException;
|
||||
import com.nis.util.Configurations;
|
||||
import com.nis.util.Constants;
|
||||
import com.nis.util.DateUtils;
|
||||
import com.nis.util.HiveJDBC;
|
||||
import com.nis.util.HiveJDBCByDruid;
|
||||
import com.nis.util.JsonMapper;
|
||||
import com.nis.web.controller.BaseRestController;
|
||||
import com.nis.web.service.AuditLogThread;
|
||||
@@ -83,10 +82,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmAvIpLog,
|
||||
String sql = HiveSqlService.getSql(page, mmAvIpLog,
|
||||
Configurations.getStringProperty(MmAvIpLog.class.getSimpleName() + "HiveTable", "MM_AV_IP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmAvIpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmAvIpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -136,10 +135,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmAvUrlLog,
|
||||
String sql = HiveSqlService.getSql(page, mmAvUrlLog,
|
||||
Configurations.getStringProperty(MmAvUrlLog.class.getSimpleName() + "HiveTable", "MM_AV_URL_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmAvUrlLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmAvUrlLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -189,10 +188,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmPicIpLog,
|
||||
String sql = HiveSqlService.getSql(page, mmPicIpLog,
|
||||
Configurations.getStringProperty(MmPicIpLog.class.getSimpleName() + "HiveTable", "MM_PIC_IP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmPicIpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPicIpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -242,10 +241,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmPicUrlLog,
|
||||
String sql = HiveSqlService.getSql(page, mmPicUrlLog,
|
||||
Configurations.getStringProperty(MmPicUrlLog.class.getSimpleName() + "HiveTable", "MM_PIC_URL_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmPicUrlLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPicUrlLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -296,10 +295,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmVoipIpLog,
|
||||
String sql = HiveSqlService.getSql(page, mmVoipIpLog,
|
||||
Configurations.getStringProperty(MmVoipIpLog.class.getSimpleName() + "HiveTable", "MM_VOIP_IP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmVoipIpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmVoipIpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -349,10 +348,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmVoipLog,
|
||||
String sql = HiveSqlService.getSql(page, mmVoipLog,
|
||||
Configurations.getStringProperty(MmVoipAccountLog.class.getSimpleName() + "HiveTable", "MM_VOIP_ACCOUNT_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmVoipAccountLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmVoipAccountLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -403,10 +402,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmSampleAudioLog,
|
||||
String sql = HiveSqlService.getSql(page, mmSampleAudioLog,
|
||||
Configurations.getStringProperty(MmSampleAudioLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_AUDIO_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmSampleAudioLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSampleAudioLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -456,10 +455,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmSampleVideoLog,
|
||||
String sql = HiveSqlService.getSql(page, mmSampleVideoLog,
|
||||
Configurations.getStringProperty(MmSampleVideoLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VIDEO_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmSampleVideoLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSampleVideoLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -509,10 +508,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmPornAudioLevelLog,
|
||||
String sql = HiveSqlService.getSql(page, mmPornAudioLevelLog,
|
||||
Configurations.getStringProperty(MmPornAudioLevelLog.class.getSimpleName() + "HiveTable", "MM_PORN_AUDIO_LEVEL_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmPornAudioLevelLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPornAudioLevelLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -562,10 +561,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmPornVideoLevelLog,
|
||||
String sql = HiveSqlService.getSql(page, mmPornVideoLevelLog,
|
||||
Configurations.getStringProperty(MmPornVideoLevelLog.class.getSimpleName() + "HiveTable", "MM_PRON_VIDEO_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmPornVideoLevelLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmPornVideoLevelLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -616,10 +615,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmSamplePicLog,
|
||||
String sql = HiveSqlService.getSql(page, mmSamplePicLog,
|
||||
Configurations.getStringProperty(MmSamplePicLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_PIC_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmSamplePicLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSamplePicLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
@@ -669,10 +668,10 @@ public class MmLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, mmSampleVoipLog,
|
||||
String sql = HiveSqlService.getSql(page, mmSampleVoipLog,
|
||||
Configurations.getStringProperty(MmSampleVoipLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VOIP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, MmSampleVoipLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, MmSampleVoipLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
|
||||
@@ -37,6 +37,7 @@ import com.nis.util.Configurations;
|
||||
import com.nis.util.Constants;
|
||||
import com.nis.util.DateUtils;
|
||||
import com.nis.util.HiveJDBC;
|
||||
import com.nis.util.HiveJDBCByDruid;
|
||||
import com.nis.util.JsonMapper;
|
||||
import com.nis.web.controller.BaseRestController;
|
||||
import com.nis.web.service.AuditLogThread;
|
||||
@@ -83,15 +84,20 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcIpLog,
|
||||
// ResultSet rs = HiveSqlService.getResultSet(page, ntcIpLog,
|
||||
// Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
|
||||
// getCol2Col(), orderBy, null);
|
||||
// Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpLog.class, "foundTime",
|
||||
// "recvTime");
|
||||
String sql = HiveSqlService.getSql(page, ntcIpLog,
|
||||
Configurations.getStringProperty(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null,sql , NtcIpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcIpLogPage.setList(new ArrayList());
|
||||
ntcIpLogPage.setList(new ArrayList<NtcIpLog>());
|
||||
} else {
|
||||
List list = new ArrayList();
|
||||
List<Object> list = new ArrayList<Object>();
|
||||
list = tableMapping.get("obj");
|
||||
// if (tableMapping.get("obj").size() > page.getPageSize()) {
|
||||
// list = tableMapping.get("obj").subList(0, page.getPageSize());
|
||||
@@ -149,10 +155,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcHttpLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcHttpLog,
|
||||
Configurations.getStringProperty(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcHttpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcHttpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcHttpLogPage.setList(new ArrayList());
|
||||
@@ -209,10 +215,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcDnsLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcDnsLog,
|
||||
Configurations.getStringProperty(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcDnsLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcDnsLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcDnsLogPage.setList(new ArrayList());
|
||||
@@ -269,10 +275,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcMailLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcMailLog,
|
||||
Configurations.getStringProperty(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcMailLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcMailLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcMailLogPage.setList(new ArrayList());
|
||||
@@ -329,10 +335,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcSslLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcSslLog,
|
||||
Configurations.getStringProperty(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcSslLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcSslLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcSslLogPage.setList(new ArrayList());
|
||||
@@ -389,10 +395,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcPptpLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcPptpLog,
|
||||
Configurations.getStringProperty(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcPptpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcPptpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcPptpLogPage.setList(new ArrayList());
|
||||
@@ -448,10 +454,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcL2tpLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcL2tpLog,
|
||||
Configurations.getStringProperty(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcL2tpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcL2tpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcL2tpLogPage.setList(new ArrayList());
|
||||
@@ -508,10 +514,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcOpenvpnLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcOpenvpnLog,
|
||||
Configurations.getStringProperty(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcOpenvpnLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcOpenvpnLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcOpenvpnLogPage.setList(new ArrayList());
|
||||
@@ -568,10 +574,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcIpsecLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcIpsecLog,
|
||||
Configurations.getStringProperty(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpsecLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcIpsecLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcIpsecLogPage.setList(new ArrayList());
|
||||
@@ -628,10 +634,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcSshLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcSshLog,
|
||||
Configurations.getStringProperty(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcSshLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcSshLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcSshLogPage.setList(new ArrayList());
|
||||
@@ -688,10 +694,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcFtpLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcFtpLog,
|
||||
Configurations.getStringProperty(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcFtpLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcFtpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcFtpLogPage.setList(new ArrayList());
|
||||
@@ -746,10 +752,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcAppLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcAppLog,
|
||||
Configurations.getStringProperty(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcAppLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcAppLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcAppLogPage.setList(new ArrayList());
|
||||
@@ -804,10 +810,10 @@ public class NtcLogSearchController extends BaseRestController {
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
ResultSet rs = HiveSqlService.getResultSet(page, ntcDdosLog,
|
||||
String sql = HiveSqlService.getSql(page, ntcDdosLog,
|
||||
Configurations.getStringProperty(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcDdosLog.class, "foundTime",
|
||||
Map<String, List<Object>> tableMapping = new HiveJDBCByDruid().tableMapping(page, null, sql, NtcDdosLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
ntcDdosLogPage.setList(new ArrayList());
|
||||
|
||||
@@ -2,7 +2,10 @@ package com.nis.web.service;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
@@ -17,6 +20,7 @@ import org.apache.ibatis.session.SqlSessionFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.alibaba.druid.pool.DruidDataSource;
|
||||
import com.nis.domain.Page;
|
||||
import com.nis.util.Configurations;
|
||||
import com.nis.util.Constants;
|
||||
@@ -26,9 +30,21 @@ import com.nis.util.redis.SaveRedisThread;
|
||||
|
||||
public class HiveSqlService {
|
||||
private final static Logger logger = LoggerFactory.getLogger(HiveSqlService.class);
|
||||
static DruidDataSource datasource = null;
|
||||
Connection conn = null;
|
||||
ResultSet rs = null;
|
||||
Statement st = null;
|
||||
|
||||
public static Connection getConnection() throws SQLException {
|
||||
if (datasource == null) {
|
||||
datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid");
|
||||
}
|
||||
return datasource.getConnection();
|
||||
}
|
||||
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
|
||||
private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd");
|
||||
public static ResultSet getResultSet(Page page, Object bean, String tableName,
|
||||
|
||||
public static String getSql(Page page, Object bean, String tableName,
|
||||
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
|
||||
tableName = tableName.toLowerCase();
|
||||
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
|
||||
@@ -132,10 +148,7 @@ public class HiveSqlService {
|
||||
// row_Num between " + startNum + " and " + endNum);
|
||||
sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM);
|
||||
logger.info("获取数据中心日志sql==================={}", sql);
|
||||
// ResultSet query = HiveJDBC.query(sql.toString());
|
||||
ResultSet query = HiveDataSource.query(sql.toString());
|
||||
logger.info("获取数据中心日志成功");
|
||||
return query;
|
||||
return sql.toString();
|
||||
}
|
||||
|
||||
public static Long getHivePageCount(Object bean, String countKey, String tableName,
|
||||
@@ -444,7 +457,6 @@ public class HiveSqlService {
|
||||
|
||||
public static String getFiledsSql(String mapName, String fileds) throws Exception {
|
||||
String[] fieldsColoumn = null;
|
||||
String orderByStr = "";
|
||||
// 所有字段名
|
||||
List<String> columnList = new ArrayList<String>();
|
||||
// 所有属性名
|
||||
@@ -489,7 +501,7 @@ public class HiveSqlService {
|
||||
return fileds;
|
||||
}
|
||||
|
||||
public static Map<String, String> getFiledAndColumnMap(Class clazz) {
|
||||
public static Map<String, String> getFiledAndColumnMap(Class<?> clazz) {
|
||||
Map<String, String> map = new HashMap<String, String>();
|
||||
SqlSessionFactory sqlSessionFactory = SpringContextHolder.getBean(SqlSessionFactory.class);
|
||||
ResultMap resultMap = sqlSessionFactory.getConfiguration().getResultMap(clazz.getSimpleName() + "Map");
|
||||
@@ -605,10 +617,4 @@ public class HiveSqlService {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
Long datacenterTime = Constants.DATACENTER_TIME;
|
||||
double doubleValue = datacenterTime.doubleValue();
|
||||
System.out.println(doubleValue);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user