();
+ // for (int i = 0; i < obj.length; i++) {
+ // columnList.add(obj[i].toString().toLowerCase());
+ // }
+ // }
// ResultSet rs = HiveJDBC.query(sql.toString());
ResultSetMetaData metaData = rs.getMetaData();
while (rs.next()) {
@@ -152,11 +144,11 @@ public class HiveJDBC {
Object value = rs.getObject(i);
String filedName = filedAndColumnMap.get(metaData.getColumnName(i).toString().toLowerCase());
if (!StringUtil.isEmpty(value)) {
- //如果是日期类型的属性需要把时间戳转换成日期,如果时间戳为0直接把值设置为null
+ // 如果是日期类型的属性需要把时间戳转换成日期,如果时间戳为0直接把值设置为null
if (null != columnList && columnList.contains(filedName.toLowerCase())) {
long time = 0l;
time = Long.parseLong(value.toString());
- map.put(filedName, time==0l?null:new Date(time * 1000));
+ map.put(filedName, time == 0l ? null : new Date(time * 1000));
// map.put(filedName, new
// Date(Long.parseLong("1476583810000")));
} else {
diff --git a/src/main/java/com/nis/util/HiveJDBCByDruid.java b/src/main/java/com/nis/util/HiveJDBCByDruid.java
new file mode 100644
index 0000000..e048b2f
--- /dev/null
+++ b/src/main/java/com/nis/util/HiveJDBCByDruid.java
@@ -0,0 +1,237 @@
+package com.nis.util;
+
+import java.beans.BeanInfo;
+import java.beans.Introspector;
+import java.beans.PropertyDescriptor;
+import java.math.BigDecimal;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ibatis.mapping.ResultMap;
+import org.apache.ibatis.mapping.ResultMapping;
+import org.apache.ibatis.session.SqlSessionFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.druid.pool.DruidDataSource;
+import com.nis.domain.Page;
+import com.nis.web.service.SpringContextHolder;
+/**
+ *
+ * Title: HiveJDBCByDruid
+ * Description: 使用druid连接池对hive进行查询并解析结果
+ * Company: IIE
+ * @author rkg
+ * @date 2018年8月20日
+ *
+ */
+public class HiveJDBCByDruid {
+ private final static Logger logger = LoggerFactory.getLogger(HiveJDBCByDruid.class);
+ static DruidDataSource datasource = null;
+ Connection conn = null;
+ ResultSet rs = null;
+ Statement st = null;
+
+ public static Connection getConnection() throws SQLException {
+ if (datasource == null) {
+ datasource = (DruidDataSource) SpringContextHolder.getBean("HiveDataSourceByDruid");
+ }
+ return datasource.getConnection();
+ }
+
+ /**
+ * 将结果利用反射映射成对象集合
+ *
+ * @param rs
+ * resultSet
+ * @param entityClass
+ * 实体类
+ * @param obj
+ * 那些字段需要转换为date类型(由于数据中心表结构中没有date类型数据,其日期用long型表示,界面中需要显示yyyy-MM-dd
+ * hh:mm:ss形式,所以需要将long转换为date)
+ * @return
+ * @throws Exception
+ */
+ public Map> tableMapping(Page> page, String redisKey, String sql, Class> entityClass, Object... obj)
+ throws Exception {
+ Map> mapList = new HashMap>();
+ try {
+ Map filedAndColumnMap = getColumn2FiledMap(entityClass);
+ List