Merge branch 'feature-181031' into develop

This commit is contained in:
doufenghu
2018-11-09 15:09:34 +08:00
8 changed files with 88 additions and 92 deletions

View File

@@ -472,7 +472,7 @@
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>2.1.1</version>
<version>2.3.3</version>
<exclusions>
<exclusion>
<groupId>org.apache.logging.log4j</groupId>

View File

@@ -8,7 +8,6 @@ import org.springframework.beans.BeansException;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
public class PropertyPlaceholderConfigurerCrypt extends PropertyPlaceholderConfigurer {
@Override
@@ -16,20 +15,7 @@ public class PropertyPlaceholderConfigurerCrypt extends PropertyPlaceholderConfi
throws BeansException {
try {
String productPassword = props.getProperty("jdbc.product.password");
String productScretKey = props.getProperty("jdbc.product.key");
if (null != productPassword) {
props.setProperty("jdbc.product.password",
new String(AESUtil.decrypt(Base64.decodeBase64(productPassword), productScretKey)));
}
String devlopPassword = props.getProperty("jdbc.devlop.password");
String devlopScretKey = props.getProperty("jdbc.devlop.key");
if (null != devlopPassword) {
props.setProperty("jdbc.devlop.password",
new String(AESUtil.decrypt(Base64.decodeBase64(devlopPassword), devlopScretKey)));
}
// mysql
String logPassword = props.getProperty("jdbc.log.password");
String logScretKey = props.getProperty("jdbc.log.key");
if (null != logPassword) {
@@ -37,67 +23,20 @@ public class PropertyPlaceholderConfigurerCrypt extends PropertyPlaceholderConfi
new String(AESUtil.decrypt(Base64.decodeBase64(logPassword), logScretKey)));
}
// 日志A版
String logAPassword = props.getProperty("jdbc.logA.password");
String logAScretKey = props.getProperty("jdbc.logA.key");
if (null != logAPassword) {
props.setProperty("jdbc.logA.password",
new String(AESUtil.decrypt(Base64.decodeBase64(logAPassword), logAScretKey)));
}
// 日志A版
String logCPassword = props.getProperty("jdbc.logC.password");
String logCScretKey = props.getProperty("jdbc.logC.key");
if (null != logAPassword) {
props.setProperty("jdbc.logC.password",
new String(AESUtil.decrypt(Base64.decodeBase64(logCPassword), logCScretKey)));
}
// 测试使用,后期会删除
String testPassword = props.getProperty("jdbc.test.password");
String testScretKey = props.getProperty("jdbc.test.key");
if (null != testPassword) {
props.setProperty("jdbc.test.password",
new String(AESUtil.decrypt(Base64.decodeBase64(testPassword), testScretKey)));
}
String jkPzPassword = props.getProperty("jdbc.jk.password");
String jkPzScretKey = props.getProperty("jdbc.jk.key");
if (null != jkPzPassword) {
props.setProperty("jdbc.jk.password",
new String(AESUtil.decrypt(Base64.decodeBase64(jkPzPassword), jkPzScretKey)));
}
//A版hive库
String hiveAPassword = props.getProperty("jdbc.hiveA.password");
String hiveAScretKey = props.getProperty("jdbc.hiveA.key");
if (null != hiveAPassword) {
props.setProperty("jdbc.hiveA.password",
new String(AESUtil.decrypt(Base64.decodeBase64(hiveAPassword), hiveAScretKey)));
}
//B版hive库
String hiveBPassword = props.getProperty("jdbc.hiveB.password");
String hiveBScretKey = props.getProperty("jdbc.hiveB.key");
if (null != hiveBPassword) {
props.setProperty("jdbc.hiveB.password",
new String(AESUtil.decrypt(Base64.decodeBase64(hiveBPassword), hiveBScretKey)));
}
//神通数据库库
String clusterPassword = props.getProperty("jdbc.log.cluster.password");
String clusterScretKey = props.getProperty("jdbc.log.cluster.key");
if (null != clusterPassword) {
props.setProperty("jdbc.log.cluster.password",
new String(AESUtil.decrypt(Base64.decodeBase64(clusterPassword), clusterScretKey)));
}
//clickhouse
// clickhouse
String clickHousePassword = props.getProperty("jdbc.clickhouse.password");
String clickHouseScretKey = props.getProperty("jdbc.clickhouse.key");
if (null != clickHousePassword) {
props.setProperty("jdbc.clickhouse.password",
new String(AESUtil.decrypt(Base64.decodeBase64(clickHousePassword), clickHouseScretKey)));
}
// hive
String hivePassword = props.getProperty("jdbc.hive.password");
String hiveScretKey = props.getProperty("jdbc.hive.key");
if (null != clickHousePassword) {
props.setProperty("jdbc.hive.password",
new String(AESUtil.decrypt(Base64.decodeBase64(hivePassword), hiveScretKey)));
}
} catch (Exception e) {
e.printStackTrace();

View File

@@ -1409,7 +1409,7 @@
<result column="found_Time" jdbcType="TIMESTAMP" property="foundTime" />
<result column="recv_Time" jdbcType="TIMESTAMP" property="recvTime" />
<result column="cap_ip" jdbcType="VARCHAR" property="capIp" />
<result column="voip_rotocol" jdbcType="VARCHAR" property="voipProtocol" />
<result column="voip_protocol" jdbcType="VARCHAR" property="voipProtocol" />
<result column="rtp_d_ip" jdbcType="VARCHAR" property="rtpDIp" />
<result column="rtp_s_ip" jdbcType="VARCHAR" property="rtpSIp" />
<result column="rtp_d_port" jdbcType="VARCHAR" property="rtpDPort" />

View File

@@ -1,4 +1,4 @@
package com.nis.util;
package com.nis.web.dao.impl;
import java.beans.BeanInfo;
import java.beans.Introspector;
@@ -20,9 +20,11 @@ import org.apache.ibatis.mapping.ResultMapping;
import org.apache.ibatis.session.SqlSessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Repository;
import com.alibaba.druid.pool.DruidDataSource;
import com.nis.domain.Page;
import com.nis.util.Constants;
import com.nis.web.service.SpringContextHolder;
import com.zdjizhi.utils.StringUtil;
@@ -42,6 +44,7 @@ import com.zdjizhi.utils.StringUtil;
* @date 2018年8月20日
*
*/
@Repository
public class LogJDBCByDruid {
private final static Logger logger = LoggerFactory.getLogger(LogJDBCByDruid.class);
static DruidDataSource datasource = null;

View File

@@ -2,9 +2,6 @@ package com.nis.web.service;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
@@ -14,13 +11,13 @@ import java.util.Map;
import org.apache.ibatis.mapping.ResultMap;
import org.apache.ibatis.mapping.ResultMapping;
import org.apache.ibatis.session.SqlSessionFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.alibaba.druid.pool.DruidDataSource;
import com.nis.domain.Page;
import com.nis.util.Configurations;
import com.nis.util.Constants;
import com.nis.util.LogJDBCByDruid;
import com.nis.web.dao.impl.LogJDBCByDruid;
import com.zdjizhi.utils.StringUtil;
/**
@@ -33,10 +30,9 @@ import com.zdjizhi.utils.StringUtil;
public class LogDataService {
// private final static Logger logger =
// LoggerFactory.getLogger(LogDataService.class);
static DruidDataSource datasource = null;
Connection conn = null;
ResultSet rs = null;
Statement st = null;
@Autowired
private LogJDBCByDruid logJDBCByDruid;
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd");
private static Map<String, Map<String, String>> col2col = new HashMap<String, Map<String, String>>();
@@ -189,7 +185,7 @@ public class LogDataService {
}
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
StringBuffer foundTimeSql = new StringBuffer();
foundTimeSql.append("select distinct found_time from " + tableName + " where ");
foundTimeSql.append("select found_time from " + tableName + " where ");
if (whereSB.length() == 0) {// 没有其他查询条件只有默认的found_time条件
if (whereFoundTime.length() > 0) {
int indexOf = whereFoundTime.indexOf("and") + "and".length();
@@ -365,10 +361,11 @@ public class LogDataService {
*/
private <T> void searchFromDataCenter(Page<T> page, Object bean, StringBuffer selSql, StringBuffer countSql)
throws Exception {
new LogJDBCByDruid().getTableData(page, selSql.toString(), bean.getClass());
// new LogJDBCByDruid().getTableData(page, selSql.toString(), bean.getClass());
logJDBCByDruid.getTableData(page, selSql.toString(), bean.getClass());
if (Constants.ISOPENLOGCOUNTANDLAST) {
if (page.getList() != null && page.getList().size() > 0) {
new LogJDBCByDruid().getCount(page, countSql.toString().toLowerCase());
logJDBCByDruid.getCount(page, countSql.toString().toLowerCase());
}
}

View File

@@ -16,7 +16,6 @@ import com.nis.domain.restful.ConfigPzIdSource;
import com.zdjizhi.utils.StringUtil;
import com.nis.web.dao.ConfigPzIdDao;
import com.nis.web.service.BaseLogService;
import com.sun.tools.internal.xjc.reader.xmlschema.bindinfo.BIConversion.Static;
/**
* @ClassName:ConfigPzIdService

View File

@@ -0,0 +1,47 @@
#由于数据中心hive日志表的表名会变动,所以本系统中将日志的表名提取到配置文件中方便后期修改,key是bean的名称+HiveTable,value是hive的实际表名
NtcIpLogHiveTable=singhand_NTC_IP_LOG
NtcHttpLogHiveTable=singhand_NTC_HTTP_LOG
NtcDnsLogHiveTable=singhand_NTC_DNS_LOG
NtcMailLogHiveTable=singhand_NTC_MAIL_LOG
NtcSslLogHiveTable=singhand_NTC_SSL_LOG
NtcPptpLogHiveTable=singhand_NTC_PPTP_LOG
NtcL2tpLogHiveTable=singhand_NTC_L2TP_LOG
NtcOpenvpnLogHiveTable=singhand_NTC_OPENVPN_LOG
NtcIpsecLogHiveTable=singhand_NTC_IPSEC_LOG
NtcSshLogHiveTable=singhand_NTC_SSH_LOG
NtcFtpLogHiveTable=singhand_NTC_FTP_LOG
NtcAppLogHiveTable=singhand_NTC_APP_LOG
NtcDdosLogHiveTable=singhand_NTC_DDOS_LOG
NtcP2pLogHiveTable=singhand_NTC_P2P_LOG
NtcBgpLogHiveTable=singhand_NTC_BGP_LOG
DkBehaviorLogHiveTable=singhand_DK_BEHAVIOR_LOG
MmAvIpLogHiveTable=singhand_MM_AV_IP_LOG
MmAvUrlLogHiveTable=singhand_MM_AV_URL_LOG
MmPicIpLogHiveTable=singhand_MM_PIC_IP_LOG
MmPicUrlLogHiveTable=singhand_MM_PIC_URL_LOG
MmVoipIpLogHiveTable=singhand_MM_VOIP_IP_LOG
MmVoipAccountLogHiveTable=singhand_MM_VOIP_ACCOUNT_LOG
MmSampleAudioLogHiveTable=singhand_MM_SAMPLE_AUDIO_LOG
MmSampleVideoLogHiveTable=singhand_MM_SAMPLE_VIDEO_LOG
MmPornAudioLevelLogHiveTable=singhand_MM_PORN_AUDIO_LEVEL_LOG
MmPornVideoLevelLogHiveTable=singhand_MM_PORN_VIDEO_LEVEL_LOG
MmSamplePicLogHiveTable=singhand_MM_SAMPLE_PIC_LOG
MmSampleVoipLogHiveTable=singhand_MM_SAMPLE_VOIP_LOG
PxyHttpLogHiveTable=singhand_PXY_HTTP_LOG
MmFaceRecognizationLogHiveTable=singhand_MM_FACE_RECOGNIZATION_LOG
MmLogoDetectionLogHiveTable=singhand_MM_LOGO_DETECTION_LOG
MmSpeakerRecognizationLogHiveTable=singhand_MM_SPEAKER_RECOGNIZATION_LOG
NtcVoipLogHiveTable=singhand_NTC_VOIP_LOG
NtcStreamingMediaLogHiveTable=singhand_NTC_STREAMING_MEDIA_LOG
MmFileDigestLogHiveTable=singhand_MM_FILE_DIGEST_LOG
NtcKeywordsUrlLogHiveTable=singhand_NTC_KEYWORDS_URL_LOG
NtcCollectVoipLogHiveTable=singhand_NTC_COLLECT_VOIP_LOG

View File

@@ -40,7 +40,7 @@ bonecp.statementsCacheSize=100
#数据中心hive接口配置
############################################################################################################################################
#数据中心hive日志库数据库名称,程序中每次查询时使用的数据库名称 use dbName
jdbc.hive.DBName=maat
#jdbc.hive.DBName=maat
#A版日志库
jdbc.hive.driver=org.apache.hive.jdbc.HiveDriver
#元辰鑫内网
@@ -48,16 +48,27 @@ jdbc.hive.driver=org.apache.hive.jdbc.HiveDriver
#华严
#jdbc.hive.url=jdbc:hive2://192.168.11.243:2181,192.168.10.76:2181,192.168.10.77:2181/maat;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=hiveserver2
#元辰鑫外网
jdbc.hive.url=jdbc:hive2://192.168.10.22:10000/maat
#jdbc.hive.url=jdbc:hive2://192.168.10.22:10000/maat
#亦庄演示环境高可用集群连接,注意使用此种方法需要配置hosts,主机名与ip的对应关系
#jdbc.hive.url=jdbc:hive2://10.3.48.2:2181,10.3.48.3:2181,10.3.48.4:2181/maat;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=hiveserver2
jdbc.hive.username=xa_z2_mesa
jdbc.hive.key=aC/8fTC9vfPVhCk+CDzbAQ==
#jdbc.hive.username=xa_z2_mesa
#jdbc.hive.key=aC/8fTC9vfPVhCk+CDzbAQ==
#加密后密码
jdbc.hive.password=V3GyFlG8Mg01bTt8ykFVaA==
#jdbc.hive.password=V3GyFlG8Mg01bTt8ykFVaA==
#实际密码
#jdbc.hiveA.password=123!@#qwe
#星汉hive数据源
jdbc.hive.DBName=singhand_ntcstore
jdbc.hive.url=jdbc:hive2://218.76.55.165:30000/singhand_ntcstore
jdbc.hive.username=singhand
jdbc.hive.key=uKYTzWsRN1+X5Tt3W+CA4A==
jdbc.hive.password=f+IYYX73TPEqUp3fxLzxow==
bonecp.hive.idleMaxAgeInMinutes=60
bonecp.hive.idleConnectionTestPeriodInMinutes=240
bonecp.hive.maxConnectionsPerPartition=20