package cn.ac.iie.dao; import cn.ac.iie.config.ApplicationConfig; import cn.ac.iie.utils.ClickhouseConnect; import com.alibaba.druid.pool.DruidPooledConnection; import com.arangodb.entity.BaseDocument; import com.arangodb.entity.BaseEdgeDocument; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.sql.ResultSet; import java.sql.Statement; import java.util.ArrayList; import java.util.HashMap; import java.util.function.Function; import java.util.function.Supplier; import static cn.ac.iie.service.read.ReadClickhouseData.putMapByHashcode; /** * 读取clickhouse数据,封装到map * @author wlh */ public class BaseClickhouseData { private static final Logger LOG = LoggerFactory.getLogger(BaseClickhouseData.class); static HashMap>> newVertexFqdnMap = new HashMap<>(); static HashMap>> newVertexIpMap = new HashMap<>(); static HashMap>> newVertexSubscriberMap = new HashMap<>(); static HashMap>> newRelationFqdnAddressIpMap = new HashMap<>(); static HashMap>> newRelationIpVisitFqdnMap = new HashMap<>(); static HashMap>> newRelationSubsciberLocateIpMap = new HashMap<>(); static HashMap>> newRelationFqdnSameFqdnMap = new HashMap<>(); private static ClickhouseConnect manger = ClickhouseConnect.getInstance(); private DruidPooledConnection connection; private Statement statement; void baseDocumentFromClickhouse(HashMap>> newMap, Supplier getSqlSupplier, Function formatResultFunc){ long start = System.currentTimeMillis(); initializeMap(newMap); String sql = getSqlSupplier.get(); try { connection = manger.getConnection(); statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery(sql); int i = 0; while (resultSet.next()) { T newDoc = formatResultFunc.apply(resultSet); if (newDoc != null) { i+=1; putMapByHashcode(newDoc, newMap); } } long last = System.currentTimeMillis(); LOG.info(sql + "\n读取"+i+"条数据,运行时间:" + (last - start)); }catch (Exception e){ e.printStackTrace(); }finally { manger.clear(statement,connection); } } private void initializeMap(HashMap>> map){ try { for (int i = 0; i < ApplicationConfig.THREAD_POOL_NUMBER; i++) { map.put(i, new HashMap<>(16)); } }catch (Exception e){ e.printStackTrace(); LOG.error("初始化数据失败"); } } }