diff --git a/.idea/uiDesigner.xml b/.idea/uiDesigner.xml
new file mode 100644
index 0000000..e96534f
--- /dev/null
+++ b/.idea/uiDesigner.xml
@@ -0,0 +1,124 @@
+
+
+
+
+ -
+
+
+ -
+
+
+ -
+
+
+ -
+
+
+ -
+
+
+
+
+
+ -
+
+
+
+
+
+ -
+
+
+
+
+
+ -
+
+
+
+
+
+ -
+
+
+
+
+ -
+
+
+
+
+ -
+
+
+
+
+ -
+
+
+
+
+ -
+
+
+
+
+ -
+
+
+
+
+ -
+
+
+ -
+
+
+
+
+ -
+
+
+
+
+ -
+
+
+
+
+ -
+
+
+
+
+ -
+
+
+
+
+ -
+
+
+ -
+
+
+ -
+
+
+ -
+
+
+ -
+
+
+
+
+ -
+
+
+ -
+
+
+
+
+
\ No newline at end of file
diff --git a/src/main/java/com/nis/domain/LogEntity.java b/src/main/java/com/nis/domain/LogEntity.java
index 0f5bab4..1a3d821 100644
--- a/src/main/java/com/nis/domain/LogEntity.java
+++ b/src/main/java/com/nis/domain/LogEntity.java
@@ -48,12 +48,10 @@ public abstract class LogEntity implements Serializable {
protected Date foundTime;
@ApiModelProperty(value="接收时间", required=true)
protected Date recvTime;
- /*@ApiModelProperty(value="外层嵌套关联信息ID", required=true)
- protected Long overId;*/
- @ApiModelProperty(value="外层嵌套关联信息ID", required=true)
- protected String overId;
@ApiModelProperty(value="协议类型", required=true)
protected String protocol;
+ @ApiModelProperty(value="IP地址类型", required=true)
+ protected Integer addrType;
@ApiModelProperty(value="服务端ip地址", required=true)
protected String serverIp;
@ApiModelProperty(value="客户端ip地址", required=true)
@@ -62,29 +60,25 @@ public abstract class LogEntity implements Serializable {
protected Integer serverPort;
@ApiModelProperty(value="客户端端口", required=true)
protected Integer clientPort;
- @ApiModelProperty(value="嵌套协议类型", required=true)
- protected String nestProtocol;
- @ApiModelProperty(value="嵌套服务端ip地址", required=true)
- protected String nestServerIp;
- @ApiModelProperty(value="嵌套客户端ip地址", required=true)
- protected String nestClientIp;
- @ApiModelProperty(value="嵌套服务端端口", required=true)
- protected Integer nestServerPort;
- @ApiModelProperty(value="嵌套客户端端口", required=true)
- protected Integer nestClientPort;
- @ApiModelProperty(value="业务类型", required=true)
+ @ApiModelProperty(value="业务类型", required=true)
protected Integer serviceType;
+ @ApiModelProperty(value="串联设备编号", required=true)
+ protected Integer deviceId;
+ @ApiModelProperty(value="传输方向", required=true, notes = "0:域内->域外,1:域外->域内,描述的是CLIENT_IP信息")
+ protected Integer direction;
+ @ApiModelProperty(value="流类型", required=true, notes = "0:c2s,1:s2c;2;double")
+ protected Integer streamType;
@ApiModelProperty(value="出入口编号", required=true)
protected Long entranceId;
@ApiModelProperty(value="处理机IP", required=true)
protected String cljIp;
- @ApiModelProperty(value="封堵包记录文件", required=true)
- protected String injectedPktFile;
-
- @ApiModelProperty(value="存放现场日志文件的URL地址", required=true)
- protected String sceneFile;
- @ApiModelProperty(value="管控动作", required=true)
- protected Integer action;
+
+ @ApiModelProperty(value="用户嵌套地址列表", required=true)
+ protected String nestAddrList;
+
+ @ApiModelProperty(value="用户自定义域", required=true)
+ private String userRegion;
+
@ApiModelProperty(value="服务端地址定位信息", required=true)
protected String serverLocate;
@ApiModelProperty(value="客户端地址定位信息", required=true)
@@ -94,21 +88,21 @@ public abstract class LogEntity implements Serializable {
protected Long foundTimeCluster;
protected Long recvTimeCluster;
-
-
+
+ protected String searchCfgId;
protected String searchFoundStartTime;
protected String searchFoundEndTime;
protected Long searchFoundStartTimeCluster;
protected Long searchFoundEndTimeCluster;
- protected String searchCfgId;
protected String searchProtocol;
- protected String searchServiceType;
protected String searchServerIp;
protected String searchClientIp;
+ protected Integer searchDirection;
+ protected String searchServiceType;
protected String searchEntranceId;
protected String searchCljIp;
- protected String tableName;//神通数据库根据A/B版,动态切换表名
+
@JsonIgnore
public Long getFoundTimeCluster() {
@@ -139,19 +133,7 @@ public abstract class LogEntity implements Serializable {
public void setSearchFoundEndTimeCluster(Long searchFoundEndTimeCluster) {
this.searchFoundEndTimeCluster = searchFoundEndTimeCluster;
}
- @JsonIgnore
- public String getTableName() {
- return tableName;
- }
- public void setTableName(String tableName) {
- this.tableName = tableName;
- }
- public String getOverId() {
- return overId;
- }
- public void setOverId(String overId) {
- this.overId = overId;
- }
+
/**
* 当前实体分页对象
*/
@@ -165,19 +147,12 @@ public abstract class LogEntity implements Serializable {
/**
* @Title:
* @Description: TODO
- * @param 入参
+ * @param
*/
public LogEntity() {
super();
}
-
- public String getInjectedPktFile() {
- return injectedPktFile;
- }
-
- public void setInjectedPktFile(String injectedPktFile) {
- this.injectedPktFile = injectedPktFile;
- }
+
/**
@@ -268,26 +243,6 @@ public abstract class LogEntity implements Serializable {
- /**
- * @return overId
- */
- /*public Long getOverId() {
- return overId;
- }
-
-
-
-
- *//**
- * @param overId 要设置的 overId
- *//*
- public void setOverId(Long overId) {
- this.overId = overId;
- }*/
-
-
-
-
/**
* @return protocol
*/
@@ -388,105 +343,6 @@ public abstract class LogEntity implements Serializable {
- /**
- * @return nestProtocol
- */
- public String getNestProtocol() {
- return nestProtocol;
- }
-
-
-
-
- /**
- * @param nestProtocol 要设置的 nestProtocol
- */
- public void setNestProtocol(String nestProtocol) {
- this.nestProtocol = nestProtocol;
- }
-
-
-
-
- /**
- * @return nestServerIp
- */
- public String getNestServerIp() {
- return nestServerIp;
- }
-
-
-
-
- /**
- * @param nestServerIp 要设置的 nestServerIp
- */
- public void setNestServerIp(String nestServerIp) {
- this.nestServerIp = nestServerIp;
- }
-
-
-
-
- /**
- * @return nestClientIp
- */
- public String getNestClientIp() {
- return nestClientIp;
- }
-
-
-
-
- /**
- * @param nestClientIp 要设置的 nestClientIp
- */
- public void setNestClientIp(String nestClientIp) {
- this.nestClientIp = nestClientIp;
- }
-
-
-
-
- /**
- * @return nestServerPort
- */
- public Integer getNestServerPort() {
- return nestServerPort;
- }
-
-
-
-
- /**
- * @param nestServerPort 要设置的 nestServerPort
- */
- public void setNestServerPort(Integer nestServerPort) {
- this.nestServerPort = nestServerPort;
- }
-
-
-
-
- /**
- * @return nestClientPort
- */
- public Integer getNestClientPort() {
- return nestClientPort;
- }
-
-
-
-
- /**
- * @param nestClientPort 要设置的 nestClientPort
- */
- public void setNestClientPort(Integer nestClientPort) {
- this.nestClientPort = nestClientPort;
- }
-
-
-
/**
* @return serviceType
@@ -742,8 +598,6 @@ public abstract class LogEntity implements Serializable {
}
-
-
/**
* @param searchClientIp 要设置的 searchClientIp
*/
@@ -763,8 +617,6 @@ public abstract class LogEntity implements Serializable {
}
-
-
/**
* @param searchEntranceId 要设置的 searchEntranceId
*/
@@ -795,8 +647,6 @@ public abstract class LogEntity implements Serializable {
}
-
-
/**
* @return searchServiceType
*/
@@ -805,9 +655,6 @@ public abstract class LogEntity implements Serializable {
return searchServiceType;
}
-
-
-
/**
* @param searchServiceType 要设置的 searchServiceType
*/
@@ -815,25 +662,64 @@ public abstract class LogEntity implements Serializable {
this.searchServiceType = searchServiceType;
}
-
+ public Integer getAddrType() {
+ return addrType;
+ }
- public String getSceneFile() {
- return sceneFile;
- }
- public void setSceneFile(String sceneFile) {
- this.sceneFile = sceneFile;
- }
-
- public Integer getAction() {
- return action;
- }
- public void setAction(Integer action) {
- this.action = action;
- }
-
-
+ public void setAddrType(Integer addrType) {
+ this.addrType = addrType;
+ }
- @Override
+ public Integer getDeviceId() {
+ return deviceId;
+ }
+
+ public void setDeviceId(Integer deviceId) {
+ this.deviceId = deviceId;
+ }
+
+ public Integer getDirection() {
+ return direction;
+ }
+
+ public void setDirection(Integer direction) {
+ this.direction = direction;
+ }
+
+ public Integer getStreamType() {
+ return streamType;
+ }
+
+ public void setStreamType(Integer streamType) {
+ this.streamType = streamType;
+ }
+
+ public String getNestAddrList() {
+ return nestAddrList;
+ }
+
+ public void setNestAddrList(String nestAddrList) {
+ this.nestAddrList = nestAddrList;
+ }
+
+ @JsonIgnore
+ public Integer getSearchDirection() {
+ return searchDirection;
+ }
+
+ public void setSearchDirection(Integer searchDirection) {
+ this.searchDirection = searchDirection;
+ }
+
+ public String getUserRegion() {
+ return userRegion;
+ }
+
+ public void setUserRegion(String userRegion) {
+ this.userRegion = userRegion;
+ }
+
+ @Override
public String toString() {
return ReflectionToStringBuilder.toString(this);
}
diff --git a/src/main/java/com/nis/domain/restful/DfHttpKeywordLog.java b/src/main/java/com/nis/domain/restful/DfHttpKeywordLog.java
deleted file mode 100644
index 06f9cd0..0000000
--- a/src/main/java/com/nis/domain/restful/DfHttpKeywordLog.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/**
-* @Title: DfHttpKeywordLog.java
-* @Package com.nis.domain.restful
-* @Description: TODO(用一句话描述该文件做什么)
-* @author (ddm)
-* @date 2016年9月7日 上午10:16:30
-* @version V1.0
-*/
-package com.nis.domain.restful;
-
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.nis.domain.LogEntity;
-import com.wordnik.swagger.annotations.ApiModelProperty;
-
-/**
-* @ClassName: DfHttpKeywordLog
-* @Description: HTTP关键字XX日志
-* @author (ddm)
-* @date 2016年9月7日 上午10:16:30
-* @version V1.0
-*/
-public class DfHttpKeywordLog extends LogEntity{
-
- /**
- * @Fields serialVersionUID : TODO(用一句话描述这个变量表示什么)
- */
- private static final long serialVersionUID = -44600714074823563L;
- @ApiModelProperty(value="TCP初始序列号", required=true)
- protected Long c2sIsn;
- @ApiModelProperty(value="是否HTTP代理标志", required=true)
- protected Long httpProxyFlag;
- @ApiModelProperty(value="HTTP会话序列号", required=true)
- protected Long httpSeq;
- @ApiModelProperty(value="url地址", required=true)
- protected String url;
- @ApiModelProperty(value="请求行", required=true)
- protected String reqLine;
- @ApiModelProperty(value="请求头转储文件", required=true)
- protected String reqHdrFile;
- @ApiModelProperty(value="请求体转储文件", required=true)
- protected String reqBodyFile;
- @ApiModelProperty(value="应答行", required=true)
- protected String resLine;
- @ApiModelProperty(value="应答头转储文件", required=true)
- protected String resHdrFile;
- @ApiModelProperty(value="应答体体转储文件", required=true)
- protected String resBodyFile;
-
-
- protected String searchUrl;
- @JsonIgnore
- public String getSearchUrl() {
- return searchUrl;
- }
-
- public void setSearchUrl(String searchUrl) {
- this.searchUrl = searchUrl;
- }
-
- public Long getC2sIsn() {
- return c2sIsn;
- }
- public void setC2sIsn(Long c2sIsn) {
- this.c2sIsn = c2sIsn;
- }
- public Long getHttpProxyFlag() {
- return httpProxyFlag;
- }
- public void setHttpProxyFlag(Long httpProxyFlag) {
- this.httpProxyFlag = httpProxyFlag;
- }
- public Long getHttpSeq() {
- return httpSeq;
- }
- public void setHttpSeq(Long httpSeq) {
- this.httpSeq = httpSeq;
- }
- public String getUrl() {
- return url;
- }
- public void setUrl(String url) {
- this.url = url;
- }
- public String getReqLine() {
- return reqLine;
- }
- public void setReqLine(String reqLine) {
- this.reqLine = reqLine;
- }
- public String getReqHdrFile() {
- return reqHdrFile;
- }
- public void setReqHdrFile(String reqHdrFile) {
- this.reqHdrFile = reqHdrFile;
- }
- public String getReqBodyFile() {
- return reqBodyFile;
- }
- public void setReqBodyFile(String reqBodyFile) {
- this.reqBodyFile = reqBodyFile;
- }
- public String getResLine() {
- return resLine;
- }
- public void setResLine(String resLine) {
- this.resLine = resLine;
- }
- public String getResHdrFile() {
- return resHdrFile;
- }
- public void setResHdrFile(String resHdrFile) {
- this.resHdrFile = resHdrFile;
- }
- public String getResBodyFile() {
- return resBodyFile;
- }
- public void setResBodyFile(String resBodyFile) {
- this.resBodyFile = resBodyFile;
- }
-
-}
diff --git a/src/main/java/com/nis/domain/restful/DfHttpReqLog.java b/src/main/java/com/nis/domain/restful/DfHttpReqLog.java
deleted file mode 100644
index 5df16c0..0000000
--- a/src/main/java/com/nis/domain/restful/DfHttpReqLog.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/**
-* @Title: DfHttpReqLog.java
-* @Package com.nis.domain.restful
-* @Description: TODO(用一句话描述该文件做什么)
-* @author (ddm)
-* @date 2016年9月5日 下午10:18:32
-* @version V1.0
-*/
-package com.nis.domain.restful;
-
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.nis.domain.LogEntity;
-import com.wordnik.swagger.annotations.ApiModelProperty;
-
-/**
-* @ClassName: DfHttpReqLog
-* @Description: 协议请求XX日志
-* @author (ddm)
-* @date 2016年9月5日 下午1:58:33
-* @version V1.0
-*/
-public class DfHttpReqLog extends LogEntity{
-
- /**
- * @Fields serialVersionUID : TODO(用一句话描述这个变量表示什么)
- */
- private static final long serialVersionUID = 1040386874238362829L;
- @ApiModelProperty(value="TCP初始序列号", required=true)
- protected Long c2sIsn;
- @ApiModelProperty(value="是否HTTP代理标志", required=true)
- protected Long httpProxyFlag;
- @ApiModelProperty(value="HTTP会话序列号", required=true)
- protected Long httpSeq;
- @ApiModelProperty(value="url地址", required=true)
- protected String url;
- @ApiModelProperty(value="请求行", required=true)
- protected String reqLine;
- @ApiModelProperty(value="请求头转储文件", required=true)
- protected String reqHdrFile;
- @ApiModelProperty(value="请求体转储文件", required=true)
- protected String reqBodyFile;
- @ApiModelProperty(value="Cookie值", required=true)
- protected String cookie;
- @ApiModelProperty(value="referer值", required=true)
- protected String referer;
- @ApiModelProperty(value="UA值", required=true)
- protected String ua;
- @ApiModelProperty(value="请求头用户自定义域名称", required=true)
- protected String reqUserDefineKey;
- @ApiModelProperty(value="请求头用户自定义域值", required=true)
- protected String reqUserDefineValue;
-
- protected String searchUrl;
- @JsonIgnore
- public String getSearchUrl() {
- return searchUrl;
- }
-
- public void setSearchUrl(String searchUrl) {
- this.searchUrl = searchUrl;
- }
-
- public Long getC2sIsn() {
- return c2sIsn;
- }
- public void setC2sIsn(Long c2sIsn) {
- this.c2sIsn = c2sIsn;
- }
- public Long getHttpProxyFlag() {
- return httpProxyFlag;
- }
- public void setHttpProxyFlag(Long httpProxyFlag) {
- this.httpProxyFlag = httpProxyFlag;
- }
- public Long getHttpSeq() {
- return httpSeq;
- }
- public void setHttpSeq(Long httpSeq) {
- this.httpSeq = httpSeq;
- }
- public String getUrl() {
- return url;
- }
- public void setUrl(String url) {
- this.url = url;
- }
- public String getReqLine() {
- return reqLine;
- }
- public void setReqLine(String reqLine) {
- this.reqLine = reqLine;
- }
- public String getReqHdrFile() {
- return reqHdrFile;
- }
- public void setReqHdrFile(String reqHdrFile) {
- this.reqHdrFile = reqHdrFile;
- }
- public String getReqBodyFile() {
- return reqBodyFile;
- }
- public void setReqBodyFile(String reqBodyFile) {
- this.reqBodyFile = reqBodyFile;
- }
-
- public String getCookie() {
- return cookie;
- }
-
- public void setCookie(String cookie) {
- this.cookie = cookie;
- }
-
- public String getReferer() {
- return referer;
- }
-
- public void setReferer(String referer) {
- this.referer = referer;
- }
-
- public String getUa() {
- return ua;
- }
-
- public void setUa(String ua) {
- this.ua = ua;
- }
-
- public String getReqUserDefineKey() {
- return reqUserDefineKey;
- }
-
- public void setReqUserDefineKey(String reqUserDefineKey) {
- this.reqUserDefineKey = reqUserDefineKey;
- }
-
- public String getReqUserDefineValue() {
- return reqUserDefineValue;
- }
-
- public void setReqUserDefineValue(String reqUserDefineValue) {
- this.reqUserDefineValue = reqUserDefineValue;
- }
-
-}
diff --git a/src/main/java/com/nis/domain/restful/DfHttpResLog.java b/src/main/java/com/nis/domain/restful/DfHttpResLog.java
deleted file mode 100644
index ec4c116..0000000
--- a/src/main/java/com/nis/domain/restful/DfHttpResLog.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/**
-* @Title: DfHttpResLog.java
-* @Package com.nis.domain.restful
-* @Description: TODO(用一句话描述该文件做什么)
-* @author (ddm)
-* @date 2016年9月7日 上午9:59:30
-* @version V1.0
-*/
-package com.nis.domain.restful;
-
-import com.fasterxml.jackson.annotation.JsonIgnore;
-import com.nis.domain.LogEntity;
-import com.wordnik.swagger.annotations.ApiModelProperty;
-
-/**
-* @ClassName: DfHttpResLog
-* @Description: 协议响应XX日志
-* @author (ddm)
-* @date 2016年9月7日 上午9:59:30
-* @version V1.0
-*/
-public class DfHttpResLog extends LogEntity{
-
- /**
- * @Fields serialVersionUID : TODO(用一句话描述这个变量表示什么)
- */
- private static final long serialVersionUID = -6230615037776378250L;
- @ApiModelProperty(value="TCP初始序列号", required=true)
- protected Long c2sIsn;
- @ApiModelProperty(value="是否HTTP代理标志", required=true)
- protected Long httpProxyFlag;
- @ApiModelProperty(value="HTTP会话序列号", required=true)
- protected Long httpSeq;
- @ApiModelProperty(value="url地址", required=true)
- protected String url;
- @ApiModelProperty(value="请求行", required=true)
- protected String reqLine;
- @ApiModelProperty(value="请求头转储文件", required=true)
- protected String reqHdrFile;
- @ApiModelProperty(value="请求体转储文件", required=true)
- protected String reqBodyFile;
- @ApiModelProperty(value="应答行", required=true)
- protected String resLine;
- @ApiModelProperty(value="应答头转储文件", required=true)
- protected String resHdrFile;
- @ApiModelProperty(value="应答体体转储文件", required=true)
- protected String resBodyFile;
-
- @ApiModelProperty(value="SET-Cookie", required=true)
- protected String setCookie;
- @ApiModelProperty(value="CONTENT-LEN值", required=true)
- protected String contentLen;
- @ApiModelProperty(value="CONTENT-TYPE值", required=true)
- protected String contentType;
- @ApiModelProperty(value="请求头用户自定义域名称", required=true)
- protected String resUserDefineKey;
- @ApiModelProperty(value="请求头用户自定义域值", required=true)
- protected String resUserDefineValue;
-
- protected String searchUrl;
-
- @JsonIgnore
- public String getSearchUrl() {
- return searchUrl;
- }
-
- public void setSearchUrl(String searchUrl) {
- this.searchUrl = searchUrl;
- }
- public Long getC2sIsn() {
- return c2sIsn;
- }
- public void setC2sIsn(Long c2sIsn) {
- this.c2sIsn = c2sIsn;
- }
- public Long getHttpProxyFlag() {
- return httpProxyFlag;
- }
- public void setHttpProxyFlag(Long httpProxyFlag) {
- this.httpProxyFlag = httpProxyFlag;
- }
- public Long getHttpSeq() {
- return httpSeq;
- }
- public void setHttpSeq(Long httpSeq) {
- this.httpSeq = httpSeq;
- }
- public String getUrl() {
- return url;
- }
- public void setUrl(String url) {
- this.url = url;
- }
- public String getReqLine() {
- return reqLine;
- }
- public void setReqLine(String reqLine) {
- this.reqLine = reqLine;
- }
- public String getReqHdrFile() {
- return reqHdrFile;
- }
- public void setReqHdrFile(String reqHdrFile) {
- this.reqHdrFile = reqHdrFile;
- }
- public String getReqBodyFile() {
- return reqBodyFile;
- }
- public void setReqBodyFile(String reqBodyFile) {
- this.reqBodyFile = reqBodyFile;
- }
- public String getResLine() {
- return resLine;
- }
- public void setResLine(String resLine) {
- this.resLine = resLine;
- }
- public String getResHdrFile() {
- return resHdrFile;
- }
- public void setResHdrFile(String resHdrFile) {
- this.resHdrFile = resHdrFile;
- }
- public String getResBodyFile() {
- return resBodyFile;
- }
- public void setResBodyFile(String resBodyFile) {
- this.resBodyFile = resBodyFile;
- }
-
- public String getSetCookie() {
- return setCookie;
- }
-
- public void setSetCookie(String setCookie) {
- this.setCookie = setCookie;
- }
-
- public String getContentLen() {
- return contentLen;
- }
-
- public void setContentLen(String contentLen) {
- this.contentLen = contentLen;
- }
-
- public String getContentType() {
- return contentType;
- }
-
- public void setContentType(String contentType) {
- this.contentType = contentType;
- }
-
- public String getResUserDefineKey() {
- return resUserDefineKey;
- }
-
- public void setResUserDefineKey(String resUserDefineKey) {
- this.resUserDefineKey = resUserDefineKey;
- }
-
- public String getResUserDefineValue() {
- return resUserDefineValue;
- }
-
- public void setResUserDefineValue(String resUserDefineValue) {
- this.resUserDefineValue = resUserDefineValue;
- }
-
-
-}
diff --git a/src/main/java/com/nis/domain/restful/DfIpPortLog.java b/src/main/java/com/nis/domain/restful/DfIpPortLog.java
deleted file mode 100644
index f863835..0000000
--- a/src/main/java/com/nis/domain/restful/DfIpPortLog.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
-* @Title: DfIpPortLog.java
-* @Package com.nis.domain.restful
-* @Description: TODO(用一句话描述该文件做什么)
-* @author (darnell)
-* @date 2016年8月31日 下午9:58:33
-* @version V1.0
-*/
-package com.nis.domain.restful;
-
-import com.nis.domain.LogEntity;
-
-/**
-* @ClassName: DfIpPortLog
-* @Description: 端口XX日志实体
-* @author (darnell)
-* @date 2016年8月31日 下午9:58:33
-* @version V1.0
-*/
-public class DfIpPortLog extends LogEntity{
- /**
- * @Fields serialVersionUID : TODO(用一句话描述这个变量表示什么)
- */
- private static final long serialVersionUID = 340154456985429911L;
-
-
-}
diff --git a/src/main/java/com/nis/domain/restful/NtcDnsLog.java b/src/main/java/com/nis/domain/restful/NtcDnsLog.java
new file mode 100644
index 0000000..5ffdc46
--- /dev/null
+++ b/src/main/java/com/nis/domain/restful/NtcDnsLog.java
@@ -0,0 +1,124 @@
+package com.nis.domain.restful;
+
+import com.nis.domain.LogEntity;
+import com.wordnik.swagger.annotations.ApiModelProperty;
+
+
+/**
+ * Created by darnell on 2018/6/11.
+ */
+public class NtcDnsLog extends LogEntity {
+
+ private static final long serialVersionUID = -6980951963455847281L;
+
+ @ApiModelProperty(value="递归请求", required=true)
+ protected Integer rd;
+
+ @ApiModelProperty(value="请求/应答", required=true)
+ protected Integer qr;
+
+ @ApiModelProperty(value="递归应答", required=true)
+ protected Integer ra;
+
+ @ApiModelProperty(value="资源记录", required=true)
+ protected String rr;
+
+ @ApiModelProperty(value="查询类型", required=true)
+ protected Integer qType;
+
+ @ApiModelProperty(value="查询类", required=true)
+ protected Integer qclass;
+
+ @ApiModelProperty(value="OPCODE", required=true)
+ protected Integer opcode;
+
+ @ApiModelProperty(value="查询内容", required=true)
+ protected String qname;
+
+
+ @ApiModelProperty(value="别名", required=true)
+ protected String cname;
+
+ @ApiModelProperty(value="DNS_SUB", required=true)
+ protected Integer dnsSub ;
+
+ public Integer getRd() {
+ return rd;
+ }
+
+ public void setRd(Integer rd) {
+ this.rd = rd;
+ }
+
+ public Integer getQr() {
+ return qr;
+ }
+
+ public void setQr(Integer qr) {
+ this.qr = qr;
+ }
+
+ public Integer getRa() {
+ return ra;
+ }
+
+ public void setRa(Integer ra) {
+ this.ra = ra;
+ }
+
+ public String getRr() {
+ return rr;
+ }
+
+ public void setRr(String rr) {
+ this.rr = rr;
+ }
+
+ public Integer getqType() {
+ return qType;
+ }
+
+ public void setqType(Integer qType) {
+ this.qType = qType;
+ }
+
+ public Integer getQclass() {
+ return qclass;
+ }
+
+ public void setQclass(Integer qclass) {
+ this.qclass = qclass;
+ }
+
+ public Integer getOpcode() {
+ return opcode;
+ }
+
+ public void setOpcode(Integer opcode) {
+ this.opcode = opcode;
+ }
+
+ public String getQname() {
+ return qname;
+ }
+
+ public void setQname(String qname) {
+ this.qname = qname;
+ }
+
+ public String getCname() {
+ return cname;
+ }
+
+ public void setCname(String cname) {
+ this.cname = cname;
+ }
+
+ public Integer getDnsSub() {
+ return dnsSub;
+ }
+
+ public void setDnsSub(Integer dnsSub) {
+ this.dnsSub = dnsSub;
+ }
+}
diff --git a/src/main/java/com/nis/domain/restful/NtcFtpLog.java b/src/main/java/com/nis/domain/restful/NtcFtpLog.java
new file mode 100644
index 0000000..00d9ae9
--- /dev/null
+++ b/src/main/java/com/nis/domain/restful/NtcFtpLog.java
@@ -0,0 +1,36 @@
+package com.nis.domain.restful;
+
+import com.nis.domain.LogEntity;
+import com.wordnik.swagger.annotations.ApiModelProperty;
+
+/**
+ * Created by darnell on 2018/6/11.
+ */
+public class NtcFtpLog extends LogEntity {
+
+ private static final long serialVersionUID = -4856757288296929083L;
+
+ @ApiModelProperty(value = "FTP链接", required = true)
+ protected String ftpUrl;
+
+
+ @ApiModelProperty(value = "FTP内容", required = true)
+ protected String ftpContent;
+
+
+ public String getFtpUrl() {
+ return ftpUrl;
+ }
+
+ public void setFtpUrl(String ftpUrl) {
+ this.ftpUrl = ftpUrl;
+ }
+
+ public String getFtpContent() {
+ return ftpContent;
+ }
+
+ public void setFtpContent(String ftpContent) {
+ this.ftpContent = ftpContent;
+ }
+}
diff --git a/src/main/java/com/nis/domain/restful/NtcHttpLog.java b/src/main/java/com/nis/domain/restful/NtcHttpLog.java
new file mode 100644
index 0000000..fe2309f
--- /dev/null
+++ b/src/main/java/com/nis/domain/restful/NtcHttpLog.java
@@ -0,0 +1,79 @@
+package com.nis.domain.restful;
+
+import com.nis.domain.LogEntity;
+import com.wordnik.swagger.annotations.ApiModelProperty;
+
+/**
+ * Created by darnell on 2018/6/11.
+ */
+public class NtcHttpLog extends LogEntity {
+
+ private static final long serialVersionUID = 2553033624540656138L;
+
+ @ApiModelProperty(value="TCP初始序列号", required=true)
+ protected Long c2sIsn;
+
+ @ApiModelProperty(value="是否HTTP代理标志", required=true)
+ protected Long httpProxyFlag;
+
+ @ApiModelProperty(value="HTTP会话序列号", required=true)
+ protected Long httpSeq;
+
+ @ApiModelProperty(value="url地址", required=true)
+ protected String url;
+
+ @ApiModelProperty(value="请求行", required=true)
+ protected String reqLine;
+
+ @ApiModelProperty(value="应答行", required=true)
+ protected String resLine;
+
+
+ public Long getC2sIsn() {
+ return c2sIsn;
+ }
+
+ public void setC2sIsn(Long c2sIsn) {
+ this.c2sIsn = c2sIsn;
+ }
+
+ public Long getHttpProxyFlag() {
+ return httpProxyFlag;
+ }
+
+ public void setHttpProxyFlag(Long httpProxyFlag) {
+ this.httpProxyFlag = httpProxyFlag;
+ }
+
+ public Long getHttpSeq() {
+ return httpSeq;
+ }
+
+ public void setHttpSeq(Long httpSeq) {
+ this.httpSeq = httpSeq;
+ }
+
+ public String getUrl() {
+ return url;
+ }
+
+ public void setUrl(String url) {
+ this.url = url;
+ }
+
+ public String getReqLine() {
+ return reqLine;
+ }
+
+ public void setReqLine(String reqLine) {
+ this.reqLine = reqLine;
+ }
+
+ public String getResLine() {
+ return resLine;
+ }
+
+ public void setResLine(String resLine) {
+ this.resLine = resLine;
+ }
+}
diff --git a/src/main/java/com/nis/domain/restful/NtcIpLog.java b/src/main/java/com/nis/domain/restful/NtcIpLog.java
new file mode 100644
index 0000000..5659709
--- /dev/null
+++ b/src/main/java/com/nis/domain/restful/NtcIpLog.java
@@ -0,0 +1,12 @@
+package com.nis.domain.restful;
+
+import com.nis.domain.LogEntity;
+
+/**
+ * Created by darnell on 2018/6/10.
+ */
+public class NtcIpLog extends LogEntity {
+
+
+ private static final long serialVersionUID = 1325588394265592830L;
+}
diff --git a/src/main/java/com/nis/domain/restful/NtcIpsecLog.java b/src/main/java/com/nis/domain/restful/NtcIpsecLog.java
new file mode 100644
index 0000000..51860df
--- /dev/null
+++ b/src/main/java/com/nis/domain/restful/NtcIpsecLog.java
@@ -0,0 +1,34 @@
+package com.nis.domain.restful;
+
+import com.nis.domain.LogEntity;
+import com.wordnik.swagger.annotations.ApiModelProperty;
+
+/**
+ * Created by darnell on 2018/6/11.
+ */
+public class NtcIpsecLog extends LogEntity {
+
+ private static final long serialVersionUID = 7413267107457939517L;
+
+
+ @ApiModelProperty(value="交换协议", required=true)
+ protected Integer exProtocol;
+ @ApiModelProperty(value="ISAKMP模式", required=true)
+ protected Integer isakmpMode;
+
+ public Integer getExProtocol() {
+ return exProtocol;
+ }
+
+ public void setExProtocol(Integer exProtocol) {
+ this.exProtocol = exProtocol;
+ }
+
+ public Integer getIsakmpMode() {
+ return isakmpMode;
+ }
+
+ public void setIsakmpMode(Integer isakmpMode) {
+ this.isakmpMode = isakmpMode;
+ }
+}
diff --git a/src/main/java/com/nis/domain/restful/NtcL2tpLog.java b/src/main/java/com/nis/domain/restful/NtcL2tpLog.java
new file mode 100644
index 0000000..95135c0
--- /dev/null
+++ b/src/main/java/com/nis/domain/restful/NtcL2tpLog.java
@@ -0,0 +1,44 @@
+package com.nis.domain.restful;
+
+import com.nis.domain.LogEntity;
+import com.wordnik.swagger.annotations.ApiModelProperty;
+
+/**
+ * Created by darnell on 2018/6/11.
+ */
+public class NtcL2tpLog extends LogEntity {
+
+ private static final long serialVersionUID = -3793435902270137597L;
+
+
+ @ApiModelProperty(value="通道类型", required=true)
+ protected Integer tunnelType;
+ @ApiModelProperty(value="加密方式", required=true)
+ protected Integer encryptMode;
+ @ApiModelProperty(value="用户名称", required=true)
+ protected String chapName;
+
+ public Integer getTunnelType() {
+ return tunnelType;
+ }
+
+ public void setTunnelType(Integer tunnelType) {
+ this.tunnelType = tunnelType;
+ }
+
+ public Integer getEncryptMode() {
+ return encryptMode;
+ }
+
+ public void setEncryptMode(Integer encryptMode) {
+ this.encryptMode = encryptMode;
+ }
+
+ public String getChapName() {
+ return chapName;
+ }
+
+ public void setChapName(String chapName) {
+ this.chapName = chapName;
+ }
+}
diff --git a/src/main/java/com/nis/domain/restful/NtcMailLog.java b/src/main/java/com/nis/domain/restful/NtcMailLog.java
new file mode 100644
index 0000000..fcd0c39
--- /dev/null
+++ b/src/main/java/com/nis/domain/restful/NtcMailLog.java
@@ -0,0 +1,63 @@
+package com.nis.domain.restful;
+
+import com.nis.domain.LogEntity;
+import com.wordnik.swagger.annotations.ApiModelProperty;
+
+/**
+ * Created by darnell on 2018/6/11.
+ */
+public class NtcMailLog extends LogEntity{
+
+ private static final long serialVersionUID = 2541894088824218908L;
+
+ @ApiModelProperty(value="邮件协议类型", required=true)
+ protected String mailProto;
+ @ApiModelProperty(value="发件人", required=true)
+ protected String mailFrom;
+ @ApiModelProperty(value="收件人", required=true)
+ protected String mailTo;
+ @ApiModelProperty(value="主题", required=true)
+ protected String subject;
+ @ApiModelProperty(value="EML文件转储路径", required=true)
+ protected String emlFile;
+
+ public String getMailProto() {
+ return mailProto;
+ }
+
+ public void setMailProto(String mailProto) {
+ this.mailProto = mailProto;
+ }
+
+ public String getMailFrom() {
+ return mailFrom;
+ }
+
+ public void setMailFrom(String mailFrom) {
+ this.mailFrom = mailFrom;
+ }
+
+ public String getMailTo() {
+ return mailTo;
+ }
+
+ public void setMailTo(String mailTo) {
+ this.mailTo = mailTo;
+ }
+
+ public String getSubject() {
+ return subject;
+ }
+
+ public void setSubject(String subject) {
+ this.subject = subject;
+ }
+
+ public String getEmlFile() {
+ return emlFile;
+ }
+
+ public void setEmlFile(String emlFile) {
+ this.emlFile = emlFile;
+ }
+}
diff --git a/src/main/java/com/nis/domain/restful/NtcOpenvpnLog.java b/src/main/java/com/nis/domain/restful/NtcOpenvpnLog.java
new file mode 100644
index 0000000..13f8c4b
--- /dev/null
+++ b/src/main/java/com/nis/domain/restful/NtcOpenvpnLog.java
@@ -0,0 +1,54 @@
+package com.nis.domain.restful;
+
+import com.nis.domain.LogEntity;
+import com.wordnik.swagger.annotations.ApiModelProperty;
+
+/**
+ * Created by darnell on 2018/6/11.
+ */
+public class NtcOpenvpnLog extends LogEntity {
+
+ private static final long serialVersionUID = 1114102412562665190L;
+
+
+ @ApiModelProperty(value="版本信息", required=true)
+ protected String version;
+ @ApiModelProperty(value="加密方式", required=true)
+ protected String encryptMode;
+ @ApiModelProperty(value="是否有HMAC", required=true)
+ protected Integer hmac;
+ @ApiModelProperty(value="通道类型", required=true)
+ protected Integer tunnelType;
+
+ public String getVersion() {
+ return version;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ public String getEncryptMode() {
+ return encryptMode;
+ }
+
+ public void setEncryptMode(String encryptMode) {
+ this.encryptMode = encryptMode;
+ }
+
+ public Integer getHmac() {
+ return hmac;
+ }
+
+ public void setHmac(Integer hmac) {
+ this.hmac = hmac;
+ }
+
+ public Integer getTunnelType() {
+ return tunnelType;
+ }
+
+ public void setTunnelType(Integer tunnelType) {
+ this.tunnelType = tunnelType;
+ }
+}
diff --git a/src/main/java/com/nis/domain/restful/NtcPptpLog.java b/src/main/java/com/nis/domain/restful/NtcPptpLog.java
new file mode 100644
index 0000000..4169b6f
--- /dev/null
+++ b/src/main/java/com/nis/domain/restful/NtcPptpLog.java
@@ -0,0 +1,39 @@
+package com.nis.domain.restful;
+
+import com.nis.domain.LogEntity;
+import com.wordnik.swagger.annotations.ApiModelProperty;
+
+/**
+ * Created by darnell on 2018/6/11.
+ */
+public class NtcPptpLog extends LogEntity {
+
+ private static final long serialVersionUID = -1558260429830428665L;
+
+
+ @ApiModelProperty(value="通道类型", required=true)
+ protected Integer tunnelType;
+ @ApiModelProperty(value="加密方式", required=true)
+ protected Integer encryptMode;
+
+ public Integer getTunnelType() {
+ return tunnelType;
+ }
+
+ public void setTunnelType(Integer tunnelType) {
+ this.tunnelType = tunnelType;
+ }
+
+ public Integer getEncryptMode() {
+ return encryptMode;
+ }
+
+ public void setEncryptMode(Integer encryptMode) {
+ this.encryptMode = encryptMode;
+ }
+
+
+
+
+
+}
diff --git a/src/main/java/com/nis/domain/restful/NtcSshLog.java b/src/main/java/com/nis/domain/restful/NtcSshLog.java
new file mode 100644
index 0000000..e05ac42
--- /dev/null
+++ b/src/main/java/com/nis/domain/restful/NtcSshLog.java
@@ -0,0 +1,74 @@
+package com.nis.domain.restful;
+
+import com.nis.domain.LogEntity;
+import com.wordnik.swagger.annotations.ApiModelProperty;
+
+/**
+ * Created by darnell on 2018/6/11.
+ */
+public class NtcSshLog extends LogEntity {
+
+ private static final long serialVersionUID = -5032732529386809500L;
+
+
+ @ApiModelProperty(value="版本信息", required=true)
+ protected String version;
+ @ApiModelProperty(value="主机密钥", required=true)
+ protected String hostKey;
+ @ApiModelProperty(value="主机cookie", required=true)
+ protected String hostCookie;
+ @ApiModelProperty(value="加密方式", required=true)
+ protected String encryptMode;
+ @ApiModelProperty(value="消息认证码", required=true)
+ protected String mac;
+ @ApiModelProperty(value="通道类型", required=true)
+ protected Integer tunnelType;
+
+ public String getVersion() {
+ return version;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ public String getHostKey() {
+ return hostKey;
+ }
+
+ public void setHostKey(String hostKey) {
+ this.hostKey = hostKey;
+ }
+
+ public String getHostCookie() {
+ return hostCookie;
+ }
+
+ public void setHostCookie(String hostCookie) {
+ this.hostCookie = hostCookie;
+ }
+
+ public String getEncryptMode() {
+ return encryptMode;
+ }
+
+ public void setEncryptMode(String encryptMode) {
+ this.encryptMode = encryptMode;
+ }
+
+ public String getMac() {
+ return mac;
+ }
+
+ public void setMac(String mac) {
+ this.mac = mac;
+ }
+
+ public Integer getTunnelType() {
+ return tunnelType;
+ }
+
+ public void setTunnelType(Integer tunnelType) {
+ this.tunnelType = tunnelType;
+ }
+}
diff --git a/src/main/java/com/nis/domain/restful/NtcSslLog.java b/src/main/java/com/nis/domain/restful/NtcSslLog.java
new file mode 100644
index 0000000..2d666fb
--- /dev/null
+++ b/src/main/java/com/nis/domain/restful/NtcSslLog.java
@@ -0,0 +1,57 @@
+package com.nis.domain.restful;
+
+import com.nis.domain.LogEntity;
+import com.wordnik.swagger.annotations.ApiModelProperty;
+
+/**
+ * Created by darnell on 2018/6/11.
+ */
+public class NtcSslLog extends LogEntity {
+
+ private static final long serialVersionUID = 164434539180653065L;
+
+
+ @ApiModelProperty(value="版本信息", required=true)
+ protected String version;
+
+ @ApiModelProperty(value="Server Name Indication", required=true, notes = "主要解决一台服务器只能使用一个证书-域名的缺点,通过sni可支持一台服务器为多台域名提供服务")
+ protected String sni;
+
+ @ApiModelProperty(value="SAN-Subject Alternative Name Certificate", required=true, notes = "数字证书的一种形式,运行SSL证书绑定多个域名和子域名。另EV证书,扩展验证证书,只能绑定一个域名")
+ protected String san;
+
+ @ApiModelProperty(value="证书授权机构", required=true)
+ protected String ca;
+
+ public String getVersion() {
+ return version;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ public String getSni() {
+ return sni;
+ }
+
+ public void setSni(String sni) {
+ this.sni = sni;
+ }
+
+ public String getSan() {
+ return san;
+ }
+
+ public void setSan(String san) {
+ this.san = san;
+ }
+
+ public String getCa() {
+ return ca;
+ }
+
+ public void setCa(String ca) {
+ this.ca = ca;
+ }
+}
diff --git a/src/main/java/com/nis/restful/DefaultRestErrorResolver.java b/src/main/java/com/nis/restful/DefaultRestErrorResolver.java
index d3fda62..987cf67 100644
--- a/src/main/java/com/nis/restful/DefaultRestErrorResolver.java
+++ b/src/main/java/com/nis/restful/DefaultRestErrorResolver.java
@@ -40,14 +40,7 @@ public class DefaultRestErrorResolver implements RestErrorResolver,InitializingB
RestResult error = new RestResult();
error.setStatus(this.getHttpStatusByEx(ex));// 设置http状态
//获取日志源[只有日志需要返回日志源和ActiveSys]
- String logSource = ((RestServiceException) ex).getLogSource();
- String activeSys = ((RestServiceException) ex).getActiveSys();
- if(logSource != null ){
- error.setLogSource(logSource);
- }
- if(activeSys != null ){
- error.setActiveSys(activeSys);
- }
+ int logSource = ((RestServiceException) ex).getLogSource();
//RestServiceException 包含有错误code
if(ex instanceof RestServiceException){
@@ -71,8 +64,9 @@ public class DefaultRestErrorResolver implements RestErrorResolver,InitializingB
if(this.exceptionMappingDefinitions.containsKey(ex.getClass().getName())){
return HttpStatus.valueOf(Integer.parseInt(this.exceptionMappingDefinitions.get(ex.getClass()
.getName())));
+ } else {
+ return HttpStatus.INTERNAL_SERVER_ERROR;
}
- else return HttpStatus.INTERNAL_SERVER_ERROR;
}
diff --git a/src/main/java/com/nis/restful/RestResult.java b/src/main/java/com/nis/restful/RestResult.java
index d921f83..08444ef 100644
--- a/src/main/java/com/nis/restful/RestResult.java
+++ b/src/main/java/com/nis/restful/RestResult.java
@@ -48,7 +48,7 @@ public class RestResult {
/**
* 日志数据来源本地:0 数据中心:1
*/
- private String logSource;
+ private Integer logSource;
/**
* 追踪状态码
@@ -59,14 +59,13 @@ public class RestResult {
}
- public RestResult(HttpStatus status, RestBusinessCode businessCode, String msg,String fromUri,String activeSys,String fromSign,String traceCode) {
+ public RestResult(HttpStatus status, RestBusinessCode businessCode, String msg, String fromUri, Integer logSource,String traceCode) {
super();
this.status = status;
this.businessCode = businessCode;
this.msg = msg;
this.fromUri = fromUri;
- this.activeSys=activeSys;
- this.logSource=logSource;
+ this.logSource= logSource;
this.traceCode=traceCode;
}
@@ -137,14 +136,15 @@ public class RestResult {
this.activeSys = activeSys;
}
- public String getLogSource() {
+ public Integer getLogSource() {
return logSource;
}
- public void setLogSource(String logSource) {
+
+ public void setLogSource(Integer logSource) {
this.logSource = logSource;
}
-
- /**
+
+ /**
* @return data
*/
public Object getData() {
@@ -166,7 +166,9 @@ public class RestResult {
this.traceCode = traceCode;
}
+ @Override
public String toString() {
+
return new StringBuilder().append("HttpStatus:").append(getStatus().value())
.append(" errorcode:")
.append(this.getBusinessCode().getValue())
diff --git a/src/main/java/com/nis/restful/RestServiceException.java b/src/main/java/com/nis/restful/RestServiceException.java
index 410791a..7a2b40c 100644
--- a/src/main/java/com/nis/restful/RestServiceException.java
+++ b/src/main/java/com/nis/restful/RestServiceException.java
@@ -8,9 +8,7 @@ public class RestServiceException extends RuntimeException{
private int errorCode;
- private String logSource;
-
- private String activeSys;
+ private int logSource;
private String traceCode;
/**
@@ -33,8 +31,10 @@ public class RestServiceException extends RuntimeException{
this.traceCode = thread.getTraceCode();
thread.setConsumerTime(time);
thread.setBusinessCode(this.getErrorCode());
- if(StringUtils.isEmpty(thread.getExceptionInfo()))
- thread.setExceptionInfo(message);
+ if(StringUtils.isEmpty(thread.getExceptionInfo())) {
+ thread.setExceptionInfo(message);
+ }
+
new Thread(thread).start();
}
@@ -48,8 +48,10 @@ public class RestServiceException extends RuntimeException{
this.traceCode = thread.getTraceCode();
thread.setConsumerTime(time);
thread.setBusinessCode(this.getErrorCode());
- if(StringUtils.isEmpty(thread.getExceptionInfo()))
- thread.setExceptionInfo(message);
+ if(StringUtils.isEmpty(thread.getExceptionInfo())) {
+ thread.setExceptionInfo(message);
+ }
+
new Thread(thread).start();
}
@@ -61,19 +63,14 @@ public class RestServiceException extends RuntimeException{
public void setErrorCode(int errorCode) {
this.errorCode = errorCode;
}
-
- public String getLogSource() {
- return logSource;
- }
- public void setLogSource(String logSource) {
- this.logSource = logSource;
- }
- public String getActiveSys() {
- return activeSys;
- }
- public void setActiveSys(String activeSys) {
- this.activeSys = activeSys;
- }
+
+ public int getLogSource() {
+ return logSource;
+ }
+
+ public void setLogSource(int logSource) {
+ this.logSource = logSource;
+ }
public String getTraceCode() {
return traceCode;
diff --git a/src/main/java/com/nis/util/JsonMapper.java b/src/main/java/com/nis/util/JsonMapper.java
index faa9d59..6c7efa2 100644
--- a/src/main/java/com/nis/util/JsonMapper.java
+++ b/src/main/java/com/nis/util/JsonMapper.java
@@ -31,7 +31,7 @@ import com.fasterxml.jackson.module.jaxb.JaxbAnnotationModule;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.nis.domain.Page;
-import com.nis.domain.restful.DfIpPortLog;
+
/**
* 简单封装Jackson,实现JSON String<->Java Object的Mapper. 封装不同的输出风格,
@@ -264,15 +264,7 @@ public class JsonMapper extends ObjectMapper {
* map.put("id", 2); map.put("pId", 1); map.put("name", "你好");
* map.put("open", true); list.add(map);
*/
- List ipPort = new ArrayList();
- DfIpPortLog ip = new DfIpPortLog();
- ip.setCljIp("aa");
- ip.setId(null);
- ipPort.add(ip);
- Page page = new Page();
- page.setList(ipPort);
- String json = JsonMapper.getInstance().toJsonString(ipPort);
- System.out.println(json);
+
}
}
diff --git a/src/main/java/com/nis/util/httpclient/HttpClientUtil.java b/src/main/java/com/nis/util/httpclient/HttpClientUtil.java
index 1e45481..d3f393a 100644
--- a/src/main/java/com/nis/util/httpclient/HttpClientUtil.java
+++ b/src/main/java/com/nis/util/httpclient/HttpClientUtil.java
@@ -15,6 +15,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
+import com.nis.util.JsonMapper;
import org.apache.http.NameValuePair;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.entity.UrlEncodedFormEntity;
@@ -107,10 +108,9 @@ public class HttpClientUtil {
}
public static void main(String[] args) throws ClientProtocolException, IOException {
HttpClientUtil hd = new HttpClientUtil();
- hd.get("http://10.0.6.115:9200/_sql?sql=select * from dfipportlog-2016-09-07-15 limit 1 10");
- Map map = new HashMap();
- map.put("id","1");
- hd.post("http://localhost:8080/springMVC/menu/getChildren.do",map);
+ String result = hd.get("http://10.0.6.104:8080/galaxy/service/cfg/v1/configPzIdSources");
+ Object mapResult = JsonMapper.fromJsonString(result,Map.class);
+ System.out.println(result);
}
}
diff --git a/src/main/java/com/nis/web/controller/BaseController.java b/src/main/java/com/nis/web/controller/BaseController.java
index 6f62a47..25bd644 100644
--- a/src/main/java/com/nis/web/controller/BaseController.java
+++ b/src/main/java/com/nis/web/controller/BaseController.java
@@ -111,7 +111,7 @@ public class BaseController {
/**
* 添加Model消息
- * @param message
+ * @param
*/
protected void addMessage(Model model, String... messages) {
StringBuilder sb = new StringBuilder();
@@ -123,7 +123,7 @@ public class BaseController {
/**
* 添加Flash消息
- * @param message
+ * @param
*/
protected void addMessage(RedirectAttributes redirectAttributes, String... messages) {
StringBuilder sb = new StringBuilder();
diff --git a/src/main/java/com/nis/web/controller/BaseRestController.java b/src/main/java/com/nis/web/controller/BaseRestController.java
index 1632757..b27a30b 100644
--- a/src/main/java/com/nis/web/controller/BaseRestController.java
+++ b/src/main/java/com/nis/web/controller/BaseRestController.java
@@ -219,9 +219,18 @@ public class BaseRestController {
return convert(restResult);
}
- public Map serviceResponse(SaveRequestLogThread thread, long time, HttpServletRequest request,
- HttpServletResponse response, String msg, Object data, String activeSys, String logSource) {
- logger.info("结果集处理开始----" + System.currentTimeMillis());
+ /**
+ * 日志结果响应格式规范
+ * @param auditLogThread
+ * @param executedTime
+ * @param request
+ * @param msg
+ * @param data
+ * @param logSource
+ * @return
+ */
+ public Map serviceLogResponse(SaveRequestLogThread auditLogThread, long executedTime, HttpServletRequest request,
+ String msg, Object data, Integer logSource) {
RestResult restResult = new RestResult();
String requestMethod = request.getMethod();
if (requestMethod.equals(RequestMethod.GET.name())) {
@@ -240,12 +249,11 @@ public class BaseRestController {
restResult.setFromUri(request.getRequestURI());
restResult.setData(data);
restResult.setMsg(msg);
- restResult.setActiveSys(activeSys);
restResult.setLogSource(logSource);
- restResult.setTraceCode(thread.getTraceCode());
- thread.setConsumerTime(time);
- thread.setBusinessCode(restResult.getBusinessCode().getValue());
- new Thread(thread).start();
+ restResult.setTraceCode(auditLogThread.getTraceCode());
+ auditLogThread.setConsumerTime(executedTime);
+ auditLogThread.setBusinessCode(restResult.getBusinessCode().getValue());
+ new Thread(auditLogThread).start();
return convert(restResult);
}
@@ -264,7 +272,6 @@ public class BaseRestController {
}
successMap.put(RestConstants.TRACE_CODE, re.getTraceCode());
successMap.put(RestConstants.REST_SERVICE_DATA, re.getData());
- logger.info("结果集处理结束----" + System.currentTimeMillis());
return successMap;
}
diff --git a/src/main/java/com/nis/web/controller/restful/DfKeyConvertUrlController.java b/src/main/java/com/nis/web/controller/restful/DfKeyConvertUrlController.java
index 95d2aaf..4edf3a7 100644
--- a/src/main/java/com/nis/web/controller/restful/DfKeyConvertUrlController.java
+++ b/src/main/java/com/nis/web/controller/restful/DfKeyConvertUrlController.java
@@ -50,7 +50,7 @@ public class DfKeyConvertUrlController extends BaseRestController{
@Autowired
protected ServicesRequestLogService servicesRequestLogService;
- protected String logSource = "0";
+ protected int logSource = 0;
@RequestMapping(value="/dfKeyConvertUrlSources", method = RequestMethod.GET)
@ApiOperation(value="关键字业务转换URL日志信息获取" , httpMethod = "GET", notes="get log list")
@@ -61,7 +61,9 @@ public class DfKeyConvertUrlController extends BaseRestController{
if(!Constants.ACTIVESYS_A.equals(searchActiveSys)
&& !Constants.ACTIVESYS_C.equals(searchActiveSys)
- ) searchActiveSys=Constants.ACTIVESYS_B;
+ ) {
+ searchActiveSys=Constants.ACTIVESYS_B;
+ }
long start=System.currentTimeMillis();
SaveRequestLogThread thread=super.saveRequestLog(servicesRequestLogService,Constants.OPACTION_GET,request, null);
@@ -80,11 +82,10 @@ public class DfKeyConvertUrlController extends BaseRestController{
if (!(e instanceof RestServiceException)) {
e = new RestServiceException(thread, System.currentTimeMillis() - start, "关键字业务转换URL日志信息检索失败");
}
- ((RestServiceException) e).setActiveSys(searchActiveSys);
((RestServiceException) e).setLogSource(logSource);
throw ((RestServiceException) e);
}
- return serviceResponse(thread,System.currentTimeMillis()-start,request, response, "关键字业务转换URL日志信息检索成功",dfKeyConvertUrlPage
- ,searchActiveSys, logSource);
+ return serviceLogResponse(thread, System.currentTimeMillis()-start, request, "关键字业务转换URL日志信息检索成功",dfKeyConvertUrlPage
+ , logSource);
}
}
diff --git a/src/main/java/com/nis/web/controller/restful/DfKeyMailAddController.java b/src/main/java/com/nis/web/controller/restful/DfKeyMailAddController.java
index 15d37a3..91b7b7c 100644
--- a/src/main/java/com/nis/web/controller/restful/DfKeyMailAddController.java
+++ b/src/main/java/com/nis/web/controller/restful/DfKeyMailAddController.java
@@ -50,7 +50,7 @@ public class DfKeyMailAddController extends BaseRestController {
@Autowired
protected DfKeyMailAddService dfKeyMailAddService;
- protected String logSource = "0";
+ protected int logSource = 0;
@RequestMapping(value = "/dfKeyMailAddSources", method = RequestMethod.GET)
@ApiOperation(value = "关键字业务转换邮件地址日志信息获取", httpMethod = "GET", notes = "get log list")
@@ -61,7 +61,9 @@ public class DfKeyMailAddController extends BaseRestController {
if(!Constants.ACTIVESYS_A.equals(searchActiveSys)
&& !Constants.ACTIVESYS_C.equals(searchActiveSys)
- ) searchActiveSys=Constants.ACTIVESYS_B;
+ ) {
+ searchActiveSys=Constants.ACTIVESYS_B;
+ }
long start = System.currentTimeMillis();
SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request, null);
@@ -81,11 +83,11 @@ public class DfKeyMailAddController extends BaseRestController {
if (!(e instanceof RestServiceException)) {
e = new RestServiceException(thread, System.currentTimeMillis() - start, "关键字业务转换邮件地址日志信息检索失败");
}
- ((RestServiceException) e).setActiveSys(searchActiveSys);
+
((RestServiceException) e).setLogSource(logSource);
throw ((RestServiceException) e);
}
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "关键字业务转换邮件地址日志信息检索成功",
- dfKeyMailAddPage,searchActiveSys, logSource);
+ return serviceLogResponse(thread, System.currentTimeMillis() - start, request, "关键字业务转换邮件地址日志信息检索成功",
+ dfKeyMailAddPage, logSource);
}
}
diff --git a/src/main/java/com/nis/web/controller/restful/DfLogSearchController.java b/src/main/java/com/nis/web/controller/restful/DfLogSearchController.java
deleted file mode 100644
index 8d14ec4..0000000
--- a/src/main/java/com/nis/web/controller/restful/DfLogSearchController.java
+++ /dev/null
@@ -1,3152 +0,0 @@
-/**
-* @Title: DfLogSearchController.java
-* @Package com.nis.web.controller
-* @Description: TODO(用一句话描述该文件做什么)
-* @author (ddm)
-* @date 2016年9月5日 下午10:52:37
-* @version V1.0
-*/
-package com.nis.web.controller.restful;
-
-import java.sql.ResultSet;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.cxf.common.util.StringUtils;
-import org.apache.log4j.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.ui.Model;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RequestMethod;
-import org.springframework.web.bind.annotation.RequestParam;
-import org.springframework.web.bind.annotation.RestController;
-
-import com.nis.datasource.CustomerContextHolder;
-import com.nis.domain.LogEntity;
-import com.nis.domain.Page;
-import com.nis.domain.restful.DfDnsLog;
-import com.nis.domain.restful.DfFtpLog;
-import com.nis.domain.restful.DfHttpKeywordLog;
-import com.nis.domain.restful.DfHttpReqLog;
-import com.nis.domain.restful.DfHttpResLog;
-import com.nis.domain.restful.DfIpPortLog;
-import com.nis.domain.restful.DfIpsecLog;
-import com.nis.domain.restful.DfL2tpLog;
-import com.nis.domain.restful.DfMailLog;
-import com.nis.domain.restful.DfOpenvpnLog;
-import com.nis.domain.restful.DfPptpLog;
-import com.nis.domain.restful.DfSshLog;
-import com.nis.domain.restful.DfSslLog;
-import com.nis.domain.restful.DfTunnelRandomLog;
-import com.nis.restful.RestBusinessCode;
-import com.nis.restful.RestServiceException;
-import com.nis.util.Configurations;
-import com.nis.util.Constants;
-import com.nis.util.DateUtils;
-import com.nis.util.HiveJDBC;
-import com.nis.util.JsonMapper;
-import com.nis.util.StringUtil;
-import com.nis.util.redis.RedisDao;
-import com.nis.util.redis.SaveRedisThread;
-import com.nis.web.controller.BaseRestController;
-import com.nis.web.service.HiveSqlService;
-import com.nis.web.service.SaveRequestLogThread;
-import com.nis.web.service.ServicesRequestLogService;
-import com.nis.web.service.restful.DfLogSearchService;
-import com.wordnik.swagger.annotations.ApiOperation;
-
-/**
- * @ClassName: DfLogSearchController
- * @Description: TODO(这里用一句话描述这个类的作用)
- * @author (ddm)
- * @date 2016年9月5日 下午10:52:37
- * @version V1.0
- */
-@RestController
-//@RequestMapping("${servicePath}/log/v1")
-@SuppressWarnings({ "rawtypes", "unchecked" })
-public class DfLogSearchController extends BaseRestController {
- protected final Logger logger1 = Logger.getLogger(this.getClass());
-
- protected String logSource = "0";
-
- @Autowired
- protected DfLogSearchService dfLogService;
- @Autowired
- protected RedisDao redisDao;
- @Autowired
- protected ServicesRequestLogService servicesRequestLogService;
-
- @RequestMapping(value = "/dfIpPortLogs", method = RequestMethod.GET)
- @ApiOperation(value = "端口封堵分页获取", httpMethod = "GET", notes = "get log list")
- public Map dfIpPortLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfIpPortLog ipPortLog, Model model, HttpServletRequest request, HttpServletResponse response) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
- long start = System.currentTimeMillis();
-
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- Page ipPortLogPage = new Page();
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- try {
- resetTime(ipPortLog);
- logger1.info("请求参数检验开始---" + System.currentTimeMillis());
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, ipPortLog, DfIpPortLog.class, page);
- logger1.info("请求参数检验结束---" + System.currentTimeMillis());
- if (Constants.ONLY_SEL_FROM_HIVE || (HiveSqlService.ifTimeGreaterThan48(ipPortLog.getSearchFoundStartTime(),
- ipPortLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- // 神通数据库查询打开 && (( A版&&查询开始时间>神通数据库数据A最早时间)||(
- // B版&&查询开始时间>神通数据库数据B最早时间))
- if (Constants.IS_SELECT_CLUSTER && ((Constants.ACTIVESYS_A.equals(searchActiveSys)
- && (ipPortLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_A_START_TIME)
- || (Constants.ACTIVESYS_B.equals(searchActiveSys)
- && (ipPortLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_B_START_TIME))) {
- // 打开神通数据库
- logger.info("开启神通数据库---" + System.currentTimeMillis());
- CustomerContextHolder.setCustomerType(CustomerContextHolder.DATA_SOURCE_H);// 开启数据源F
- // 拼凑rediskeyA、B版
- if (Constants.ACTIVESYS_A.equals(searchActiveSys)) {
- whichHive = "&CLUSTERA";
- // A版表名动态设置
- ipPortLog.setTableName(Configurations.getStringProperty(DfIpPortLog.class.getSimpleName() + "A",
- "t_xa_df_ip_port_log_hit_mpp").trim());
- } else if (Constants.ACTIVESYS_B.equals(searchActiveSys)) {
- whichHive = "&CLUSTERB";
- // B版表名动态设置
- ipPortLog.setTableName(Configurations
- .getStringProperty(DfIpPortLog.class.getSimpleName() + "B", "t_xa_df_ip_port_log_mpp")
- .trim());
- }
-
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 加上AB版的条件
- key = key + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- long startTime = System.currentTimeMillis();
- logger1.info("Redis查询开始:" + startTime);
-
- ipPortLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
-
- long endTime = System.currentTimeMillis();
- logger1.info(ipPortLogPage.getList().size());
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- long startTime = System.currentTimeMillis();
- ipPortLogPage = dfLogService.findIpPortPageCluster(
- new Page(request, response, DfIpPortLog.class), ipPortLog,
- searchActiveSys);
- long endTime = System.currentTimeMillis();
- if (Constants.IS_OPEN_REDIS) {
- new SaveRedisThread(key, ipPortLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
-
- } else {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- logger1.info("hive-redis查询开始---" + System.currentTimeMillis());
- List list = new ArrayList();
- ipPortLogPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfIpPortLog bean = (DfIpPortLog) JsonMapper.fromJsonString(str, DfIpPortLog.class);
- list.add(bean);
- }
- ipPortLogPage.setList(list);
-
- } else {
- ipPortLogPage.setList(new ArrayList());
- }
- logger1.info("hive-redis查询结束---" + System.currentTimeMillis());
- } else {
- logger1.info("hive查询开始---" + System.currentTimeMillis());
- ipPortLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfIpPortLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, ipPortLog, "DF_IP_PORT_LOG", getCol2Col(), orderBy,
- searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfIpPortLog.class,
- "foundTime", "recvTime");
- if (tableMapping == null) {
- ipPortLogPage.setList(new ArrayList());
-
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper.fromJsonList(jsonString,
- DfIpPortLog.class);
- ipPortLogPage.setList(List);
-
- } else {
- ipPortLogPage.setList(new ArrayList());
- }
- }
- }
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- ipPortLogPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(ipPortLog, countKey, "DF_IP_PORT_LOG",
- getCol2Col(), searchActiveSys);
- ipPortLogPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(ipPortLog, countKey, "DF_IP_PORT_LOG",
- getCol2Col(), searchActiveSys);
- ipPortLogPage.setCount(Long.valueOf(hivePageCount));
- }
- ipPortLogPage.setPageNo(page.getPageNo());
- ipPortLogPage.setPageSize(page.getPageSize());
- }
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- long startTime = System.currentTimeMillis();
- logger1.info("Redis查询开始:" + startTime);
-
- ipPortLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key), Page.class);
-
- long endTime = System.currentTimeMillis();
- logger1.info(ipPortLogPage.getList().size());
- logger1.info("Redis查询结束:" + endTime);
- logger1.info("Redis时长:" + ((endTime - startTime) / 1000));
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- long startTime = System.currentTimeMillis();
- logger1.info("orcl查询开始时间:" + startTime);
- ipPortLogPage = dfLogService.findIpPortPage(
- new Page(request, response, DfIpPortLog.class), ipPortLog, searchActiveSys);
- long endTime = System.currentTimeMillis();
- logger1.info("orcl查询结束时间:" + endTime);
- logger1.info("orcl时长:" + ((endTime - startTime) / 1000));
- if (Constants.IS_OPEN_REDIS) {
- logger1.info("redis存储开始时间:" + endTime);
- new SaveRedisThread(key, ipPortLogPage, Constants.ORACLE_EXPIRE).start();
- logger1.info("redis存储结束时间:" + endTime);
- }
- }
- }
-
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "端口封堵日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- }
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "端口封堵日志检索成功",
- ipPortLogPage, searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfHttpReqLogs", method = RequestMethod.GET)
- @ApiOperation(value = "HTTP协议请求封堵分页获取", httpMethod = "GET", notes = "get log list")
- public Map dfHttpReqLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- // @RequestParam(value = "type", required = false, defaultValue =
- // "oracle") String type,
- Page page, DfHttpReqLog httpReqLog, HttpServletRequest request, HttpServletResponse response, Model model) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page httpReqLogPage = new Page();
- try {
- resetTime(httpReqLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, httpReqLog, DfHttpReqLog.class, page);
- if (Constants.ONLY_SEL_FROM_HIVE
- || (HiveSqlService.ifTimeGreaterThan48(httpReqLog.getSearchFoundStartTime(),
- httpReqLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- // 神通数据库查询打开 && (( A版&&查询开始时间>神通数据库数据A最早时间)||(
- // B版&&查询开始时间>神通数据库数据B最早时间))
- if (Constants.IS_SELECT_CLUSTER && ((Constants.ACTIVESYS_A.equals(searchActiveSys)
- && (httpReqLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_A_START_TIME)
- || (Constants.ACTIVESYS_B.equals(searchActiveSys)
- && (httpReqLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_B_START_TIME))) {
- // 打开神通数据库
- logger.info("开启神通数据库---" + System.currentTimeMillis());
- CustomerContextHolder.setCustomerType(CustomerContextHolder.DATA_SOURCE_H);// 开启数据源F
- // 拼凑rediskeyA、B版
- if (Constants.ACTIVESYS_A.equals(searchActiveSys)) {
- whichHive = "&CLUSTERA";
- // A版表名动态设置
- httpReqLog.setTableName(Configurations.getStringProperty(DfHttpReqLog.class.getSimpleName() + "A",
- "t_xa_df_ip_port_log_hit_mpp").trim());
- } else if (Constants.ACTIVESYS_B.equals(searchActiveSys)) {
- whichHive = "&CLUSTERB";
- // B版表名动态设置
- httpReqLog.setTableName(Configurations
- .getStringProperty(DfHttpReqLog.class.getSimpleName() + "B", "t_xa_df_http_req_log_mpp")
- .trim());
- }
-
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 加上AB版的条件
- key = key + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- long startTime = System.currentTimeMillis();
- logger1.info("Redis查询开始:" + startTime);
-
- httpReqLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
-
- long endTime = System.currentTimeMillis();
- logger1.info(httpReqLogPage.getList().size());
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- long startTime = System.currentTimeMillis();
- httpReqLogPage = dfLogService.findHttpReqPageCluster(
- new Page(request, response, DfHttpReqLog.class), httpReqLog,
- searchActiveSys);
- long endTime = System.currentTimeMillis();
- if (Constants.IS_OPEN_REDIS) {
- new SaveRedisThread(key, httpReqLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
-
- } else {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- httpReqLogPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfHttpReqLog bean = (DfHttpReqLog) JsonMapper.fromJsonString(str, DfHttpReqLog.class);
- list.add(bean);
- }
- httpReqLogPage.setList(list);
-
- } else {
- httpReqLogPage.setList(new ArrayList());
- }
- } else {
-
- httpReqLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfHttpReqLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, httpReqLog, "DF_HTTP_REQ_LOG", getCol2Col(),
- orderBy, searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfHttpReqLog.class,
- "foundTime", "recvTime");
- if (tableMapping == null) {
- httpReqLogPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper.fromJsonList(jsonString,
- DfHttpReqLog.class);
- httpReqLogPage.setList(List);
- } else {
- httpReqLogPage.setList(new ArrayList());
- }
- }
- }
-
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- httpReqLogPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(httpReqLog, countKey, "DF_HTTP_REQ_LOG",
- getCol2Col(), searchActiveSys);
- httpReqLogPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(httpReqLog, countKey, "DF_HTTP_REQ_LOG",
- getCol2Col(), searchActiveSys);
- httpReqLogPage.setCount(Long.valueOf(hivePageCount));
- }
-
- httpReqLogPage.setPageNo(page.getPageNo());
- httpReqLogPage.setPageSize(page.getPageSize());
- }
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
-
- // 存在则直接从redis中查询
- if (keyExists) {
- httpReqLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- httpReqLogPage = dfLogService.findHttpReqPage(
- new Page(request, response, DfHttpReqLog.class), httpReqLog,
- searchActiveSys);
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, httpReqLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "HTTP协议请求日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- }
-
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "HTTP协议请求日志检索成功",
- httpReqLogPage, searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfHttpResLogs", method = RequestMethod.GET)
- @ApiOperation(value = "HTTP协议响应封堵日志获取", httpMethod = "GET", notes = "get log list")
- public Map dfHttpResLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfHttpResLog httpResLog, HttpServletRequest request, HttpServletResponse response, Model model) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page httpResLogPage = new Page();
-
- try {
- resetTime(httpResLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, httpResLog, DfHttpResLog.class, page);
- if (Constants.ONLY_SEL_FROM_HIVE
- || (HiveSqlService.ifTimeGreaterThan48(httpResLog.getSearchFoundStartTime(),
- httpResLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- // 神通数据库查询打开 && (( A版&&查询开始时间>神通数据库数据A最早时间)||(
- // B版&&查询开始时间>神通数据库数据B最早时间))
- if (Constants.IS_SELECT_CLUSTER && ((Constants.ACTIVESYS_A.equals(searchActiveSys)
- && (httpResLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_A_START_TIME)
- || (Constants.ACTIVESYS_B.equals(searchActiveSys)
- && (httpResLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_B_START_TIME))) {
- // 打开神通数据库
- logger.info("开启神通数据库---" + System.currentTimeMillis());
- CustomerContextHolder.setCustomerType(CustomerContextHolder.DATA_SOURCE_H);// 开启数据源F
- // 拼凑rediskeyA、B版
- if (Constants.ACTIVESYS_A.equals(searchActiveSys)) {
- whichHive = "&CLUSTERA";
- // A版表名动态设置
- httpResLog.setTableName(Configurations.getStringProperty(DfHttpResLog.class.getSimpleName() + "A",
- "t_xa_df_ip_port_log_hit_mpp").trim());
- } else if (Constants.ACTIVESYS_B.equals(searchActiveSys)) {
- whichHive = "&CLUSTERB";
- // B版表名动态设置
- httpResLog.setTableName(Configurations
- .getStringProperty(DfHttpResLog.class.getSimpleName() + "B", "t_xa_df_http_res_log_mpp")
- .trim());
- }
-
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 加上AB版的条件
- key = key + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- long startTime = System.currentTimeMillis();
- logger1.info("Redis查询开始:" + startTime);
-
- httpResLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
-
- long endTime = System.currentTimeMillis();
- logger1.info(httpResLogPage.getList().size());
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- long startTime = System.currentTimeMillis();
- httpResLogPage = dfLogService.findHttpResPageCluster(
- new Page(request, response, DfHttpResLog.class), httpResLog,
- searchActiveSys);
- long endTime = System.currentTimeMillis();
- if (Constants.IS_OPEN_REDIS) {
- new SaveRedisThread(key, httpResLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
-
- } else {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- httpResLogPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfHttpResLog bean = (DfHttpResLog) JsonMapper.fromJsonString(str, DfHttpResLog.class);
- list.add(bean);
- }
- httpResLogPage.setList(list);
- } else {
- httpResLogPage.setList(new ArrayList());
- }
- } else {
-
- httpResLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfHttpResLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, httpResLog, "DF_HTTP_RES_LOG", getCol2Col(),
- orderBy, searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfHttpResLog.class,
- "foundTime", "recvTime");
- if (tableMapping == null) {
- httpResLogPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper.fromJsonList(jsonString,
- DfHttpResLog.class);
- httpResLogPage.setList(List);
- } else {
- httpResLogPage.setList(new ArrayList());
- }
- }
- }
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- httpResLogPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(httpResLog, countKey, "DF_HTTP_RES_LOG",
- getCol2Col(), searchActiveSys);
- httpResLogPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(httpResLog, countKey, "DF_HTTP_RES_LOG",
- getCol2Col(), searchActiveSys);
- httpResLogPage.setCount(Long.valueOf(hivePageCount));
- }
- httpResLogPage.setPageNo(page.getPageNo());
- httpResLogPage.setPageSize(page.getPageSize());
- }
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- httpResLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- httpResLogPage = dfLogService.findHttpResPage(
- new Page(request, response, DfHttpResLog.class), httpResLog, searchActiveSys);
-
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, httpResLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "HTTP协议响应日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
-
- }
-
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "HTTP协议响应日志检索成功",
- httpResLogPage, searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfHttpKeywordLogs", method = RequestMethod.GET)
- @ApiOperation(value = "HTTP协议关键字、请求多域、响应多域封堵日志获取", httpMethod = "GET", notes = "get log list")
- public Map dfHttpKeywordLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfHttpKeywordLog httpKeywordLog, HttpServletRequest request, HttpServletResponse response,
- Model model) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- String searchServiceType = httpKeywordLog.getSearchServiceType();
- if (StringUtils.isEmpty(searchServiceType)) {
- httpKeywordLog.setSearchServiceType("5"); // 0x05
- }
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page httpKeywordLogPage = new Page();
-
- try {
- resetTime(httpKeywordLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, httpKeywordLog, DfHttpKeywordLog.class, page);
- if (Constants.ONLY_SEL_FROM_HIVE
- || (HiveSqlService.ifTimeGreaterThan48(httpKeywordLog.getSearchFoundStartTime(),
- httpKeywordLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- // 神通数据库查询打开 && (( A版&&查询开始时间>神通数据库数据A最早时间)||(
- // B版&&查询开始时间>神通数据库数据B最早时间))
- if (Constants.IS_SELECT_CLUSTER && ((Constants.ACTIVESYS_A.equals(searchActiveSys)
- && (httpKeywordLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_A_START_TIME)
- || (Constants.ACTIVESYS_B.equals(searchActiveSys)
- && (httpKeywordLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_B_START_TIME))) {
- // 打开神通数据库
- logger.info("开启神通数据库---" + System.currentTimeMillis());
- CustomerContextHolder.setCustomerType(CustomerContextHolder.DATA_SOURCE_H);// 开启数据源F
- // 拼凑rediskeyA、B版
- if (Constants.ACTIVESYS_A.equals(searchActiveSys)) {
- whichHive = "&CLUSTERA";
- // A版表名动态设置
- httpKeywordLog.setTableName(Configurations.getStringProperty(DfHttpKeywordLog.class.getSimpleName() + "A",
- "t_xa_df_ip_port_log_hit_mpp").trim());
- } else if (Constants.ACTIVESYS_B.equals(searchActiveSys)) {
- whichHive = "&CLUSTERB";
- // B版表名动态设置
- httpKeywordLog.setTableName(Configurations
- .getStringProperty(DfHttpKeywordLog.class.getSimpleName() + "B", "t_xa_df_http_keyword_log_mpp")
- .trim());
- }
-
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 加上AB版的条件
- key = key + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- long startTime = System.currentTimeMillis();
- logger1.info("Redis查询开始:" + startTime);
-
- httpKeywordLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
-
- long endTime = System.currentTimeMillis();
- logger1.info(httpKeywordLogPage.getList().size());
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- long startTime = System.currentTimeMillis();
- httpKeywordLogPage = dfLogService.findHttpKeywordPageCluster(
- new Page(request, response, DfHttpKeywordLog.class), httpKeywordLog,
- searchActiveSys);
- long endTime = System.currentTimeMillis();
- if (Constants.IS_OPEN_REDIS) {
- new SaveRedisThread(key, httpKeywordLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
-
- } else {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- httpKeywordLogPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfHttpKeywordLog bean = (DfHttpKeywordLog) JsonMapper.fromJsonString(str,
- DfHttpKeywordLog.class);
- list.add(bean);
- }
- httpKeywordLogPage.setList(list);
-
- } else {
- httpKeywordLogPage.setList(new ArrayList());
- }
- } else {
-
- httpKeywordLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfHttpKeywordLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, httpKeywordLog, "DF_HTTP_KEYWORD_LOG",
- getCol2Col(), orderBy, searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfHttpKeywordLog.class,
- "foundTime", "recvTime");
- if (tableMapping == null) {
- httpKeywordLogPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper
- .fromJsonList(jsonString, DfHttpKeywordLog.class);
- httpKeywordLogPage.setList(List);
-
- } else {
- httpKeywordLogPage.setList(new ArrayList());
- }
-
- }
- }
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- httpKeywordLogPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(httpKeywordLog, countKey,
- "DF_HTTP_KEYWORD_LOG", getCol2Col(), searchActiveSys);
- httpKeywordLogPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(httpKeywordLog, countKey,
- "DF_HTTP_KEYWORD_LOG", getCol2Col(), searchActiveSys);
- httpKeywordLogPage.setCount(Long.valueOf(hivePageCount));
- }
- httpKeywordLogPage.setPageNo(page.getPageNo());
- httpKeywordLogPage.setPageSize(page.getPageSize());
- }
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- httpKeywordLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- httpKeywordLogPage = dfLogService.findHttpKeywordPage(
- new Page(request, response, DfHttpKeywordLog.class), httpKeywordLog,
- searchActiveSys);
-
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, httpKeywordLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "HTTP协议关键字、请求多域、响应多域封堵日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- }
-
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response,
- "HTTP协议关键字、请求多域、响应多域封堵日志检索成功", httpKeywordLogPage, searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfHttpReqMultiPartLogs", method = RequestMethod.GET)
- @ApiOperation(value = "Http协议请求多域封堵日志获取", httpMethod = "GET", notes = "get log list")
- public Map dfHttpReqMultiPartLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfHttpKeywordLog httpReqMultiLog, HttpServletRequest request, HttpServletResponse response,
- Model model) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- httpReqMultiLog.setSearchServiceType("17"); // 0x11
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page httpReqMultiPage = new Page();
-
- try {
- resetTime(httpReqMultiLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, httpReqMultiLog, DfHttpKeywordLog.class, page);
- if (Constants.ONLY_SEL_FROM_HIVE
- || (HiveSqlService.ifTimeGreaterThan48(httpReqMultiLog.getSearchFoundStartTime(),
- httpReqMultiLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- httpReqMultiPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfHttpKeywordLog bean = (DfHttpKeywordLog) JsonMapper.fromJsonString(str,
- DfHttpKeywordLog.class);
- list.add(bean);
- }
- httpReqMultiPage.setList(list);
-
- } else {
- httpReqMultiPage.setList(new ArrayList());
- }
- } else {
-
- httpReqMultiPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfHttpKeywordLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, httpReqMultiLog, "DF_HTTP_KEYWORD_LOG",
- getCol2Col(), orderBy, searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfHttpKeywordLog.class,
- "foundTime", "recvTime");
- if (tableMapping == null) {
- httpReqMultiPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper
- .fromJsonList(jsonString, DfHttpKeywordLog.class);
- httpReqMultiPage.setList(List);
-
- } else {
- httpReqMultiPage.setList(new ArrayList());
- }
-
- }
- }
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- httpReqMultiPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(httpReqMultiLog, countKey,
- "DF_HTTP_KEYWORD_LOG", getCol2Col(), searchActiveSys);
- httpReqMultiPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(httpReqMultiLog, countKey,
- "DF_HTTP_KEYWORD_LOG", getCol2Col(), searchActiveSys);
- httpReqMultiPage.setCount(Long.valueOf(hivePageCount));
- }
- httpReqMultiPage.setPageNo(page.getPageNo());
- httpReqMultiPage.setPageSize(page.getPageSize());
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- httpReqMultiPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- httpReqMultiPage = dfLogService.findHttpMultiPartPage(
- new Page(request, response, DfHttpKeywordLog.class), httpReqMultiLog,
- searchActiveSys);
-
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, httpReqMultiPage, Constants.ORACLE_EXPIRE).start();
- }
- }
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "Http协议请求多域封堵日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- }
-
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "Http协议请求多域封堵日志检索成功",
- httpReqMultiPage, searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfHttpResMultiPartLogs", method = RequestMethod.GET)
- @ApiOperation(value = "Http协议响应多域封堵日志获取", httpMethod = "GET", notes = "get log list")
- public Map dfHttpResMultiPartLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfHttpKeywordLog httpResMultiLog, HttpServletRequest request, HttpServletResponse response,
- Model model) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- httpResMultiLog.setSearchServiceType("18"); // 0x12
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page httpResMultiPage = new Page();
-
- try {
- resetTime(httpResMultiLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, httpResMultiLog, DfHttpKeywordLog.class, page);
- if (Constants.ONLY_SEL_FROM_HIVE
- || (HiveSqlService.ifTimeGreaterThan48(httpResMultiLog.getSearchFoundStartTime(),
- httpResMultiLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- // if (true) {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- httpResMultiPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfHttpKeywordLog bean = (DfHttpKeywordLog) JsonMapper.fromJsonString(str,
- DfHttpKeywordLog.class);
- list.add(bean);
- }
- httpResMultiPage.setList(list);
-
- } else {
- httpResMultiPage.setList(new ArrayList());
- }
- } else {
-
- httpResMultiPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfHttpKeywordLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, httpResMultiLog, "DF_HTTP_KEYWORD_LOG",
- getCol2Col(), orderBy, searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfHttpKeywordLog.class,
- "foundTime", "recvTime");
- if (tableMapping == null) {
- httpResMultiPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper
- .fromJsonList(jsonString, DfHttpKeywordLog.class);
- httpResMultiPage.setList(List);
- } else {
- httpResMultiPage.setList(new ArrayList());
- }
- }
- }
-
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- httpResMultiPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(httpResMultiLog, countKey,
- "DF_HTTP_KEYWORD_LOG", getCol2Col(), searchActiveSys);
- httpResMultiPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(httpResMultiLog, countKey,
- "DF_HTTP_KEYWORD_LOG", getCol2Col(), searchActiveSys);
- httpResMultiPage.setCount(Long.valueOf(hivePageCount));
- }
- httpResMultiPage.setPageNo(page.getPageNo());
- httpResMultiPage.setPageSize(page.getPageSize());
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- httpResMultiPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- httpResMultiPage = dfLogService.findHttpMultiPartPage(
- new Page(request, response, DfHttpKeywordLog.class), httpResMultiLog,
- searchActiveSys);
-
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, httpResMultiPage, Constants.ORACLE_EXPIRE).start();
- }
- }
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "Http协议响应多域封堵日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- }
-
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "Http协议响应多域封堵日志检索成功",
- httpResMultiPage, searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfMailLogs", method = RequestMethod.GET)
- @ApiOperation(value = "MAIL封堵日志获取", httpMethod = "GET", notes = "get log list")
- public Map dfMailLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfMailLog mailLog, HttpServletRequest request, HttpServletResponse response, Model model) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page mailLogPage = new Page();
-
- try {
- resetTime(mailLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, mailLog, DfMailLog.class, page);
- if (Constants.ONLY_SEL_FROM_HIVE || (HiveSqlService.ifTimeGreaterThan48(mailLog.getSearchFoundStartTime(),
- mailLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- // 神通数据库查询打开 && (( A版&&查询开始时间>神通数据库数据A最早时间)||(
- // B版&&查询开始时间>神通数据库数据B最早时间))
- if (Constants.IS_SELECT_CLUSTER && ((Constants.ACTIVESYS_A.equals(searchActiveSys)
- && (mailLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_A_START_TIME)
- || (Constants.ACTIVESYS_B.equals(searchActiveSys)
- && (mailLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_B_START_TIME))) {
- // 打开神通数据库
- logger.info("开启神通数据库---" + System.currentTimeMillis());
- CustomerContextHolder.setCustomerType(CustomerContextHolder.DATA_SOURCE_H);// 开启数据源F
- // 拼凑rediskeyA、B版
- if (Constants.ACTIVESYS_A.equals(searchActiveSys)) {
- whichHive = "&CLUSTERA";
- // A版表名动态设置
- mailLog.setTableName(Configurations.getStringProperty(DfMailLog.class.getSimpleName() + "A",
- "t_xa_df_ip_port_log_hit_mpp").trim());
- } else if (Constants.ACTIVESYS_B.equals(searchActiveSys)) {
- whichHive = "&CLUSTERB";
- // B版表名动态设置
- mailLog.setTableName(Configurations
- .getStringProperty(DfMailLog.class.getSimpleName() + "B", "t_xa_df_mail_log_mpp")
- .trim());
- }
-
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 加上AB版的条件
- key = key + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- long startTime = System.currentTimeMillis();
- logger1.info("Redis查询开始:" + startTime);
-
- mailLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
-
- long endTime = System.currentTimeMillis();
- logger1.info(mailLogPage.getList().size());
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- long startTime = System.currentTimeMillis();
- mailLogPage = dfLogService.findMailPageCluster(
- new Page(request, response, DfMailLog.class), mailLog,
- searchActiveSys);
- long endTime = System.currentTimeMillis();
- if (Constants.IS_OPEN_REDIS) {
- new SaveRedisThread(key, mailLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
-
- } else {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- mailLogPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfMailLog bean = (DfMailLog) JsonMapper.fromJsonString(str, DfMailLog.class);
- list.add(bean);
- }
- mailLogPage.setList(list);
-
- } else {
- mailLogPage.setList(new ArrayList());
- }
- } else {
-
- mailLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfMailLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, mailLog, "DF_MAIL_LOG", getCol2Col(), orderBy,
- searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfMailLog.class, "foundTime",
- "recvTime");
- if (tableMapping == null) {
- mailLogPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper.fromJsonList(jsonString,
- DfMailLog.class);
- mailLogPage.setList(List);
- } else {
- mailLogPage.setList(new ArrayList());
- }
- }
- }
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- mailLogPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(mailLog, countKey, "DF_MAIL_LOG",
- getCol2Col(), searchActiveSys);
- mailLogPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(mailLog, countKey, "DF_MAIL_LOG", getCol2Col(),
- searchActiveSys);
- mailLogPage.setCount(Long.valueOf(hivePageCount));
- }
-
- mailLogPage.setPageNo(page.getPageNo());
- mailLogPage.setPageSize(page.getPageSize());
- }
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- mailLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key), Page.class);
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- mailLogPage = dfLogService.findMailPage(new Page(request, response, DfMailLog.class),
- mailLog, searchActiveSys);
-
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, mailLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "MAIL日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- }
-
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "MAIL日志检索成功", mailLogPage,
- searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfDnsLogs", method = RequestMethod.GET)
- @ApiOperation(value = "DNS封堵日志获取", httpMethod = "GET", notes = "get log list")
- public Map dfDnsLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfDnsLog dnsLog, HttpServletRequest request, HttpServletResponse response, Model model) {
- logger1.info("dfDnsLogList接口请求方法开始----" + System.currentTimeMillis());
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
-
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page dnsLogPage = new Page();
-
- try {
- resetTime(dnsLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, dnsLog, DfDnsLog.class, page);
- if (Constants.ONLY_SEL_FROM_HIVE || (HiveSqlService.ifTimeGreaterThan48(dnsLog.getSearchFoundStartTime(),
- dnsLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logger1.info("hive开始----" + System.currentTimeMillis());
- logSource = "1";
- // 神通数据库查询打开 && (( A版&&查询开始时间>神通数据库数据A最早时间)||(
- // B版&&查询开始时间>神通数据库数据B最早时间))
- if (Constants.IS_SELECT_CLUSTER && ((Constants.ACTIVESYS_A.equals(searchActiveSys)
- && (dnsLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_A_START_TIME)
- || (Constants.ACTIVESYS_B.equals(searchActiveSys)
- && (dnsLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_B_START_TIME))) {
- // 打开神通数据库
- logger.info("开启神通数据库---" + System.currentTimeMillis());
- CustomerContextHolder.setCustomerType(CustomerContextHolder.DATA_SOURCE_H);// 开启数据源F
- // 拼凑rediskeyA、B版
- if (Constants.ACTIVESYS_A.equals(searchActiveSys)) {
- whichHive = "&CLUSTERA";
- // A版表名动态设置
- dnsLog.setTableName(Configurations.getStringProperty(DfDnsLog.class.getSimpleName() + "A",
- "t_xa_df_ip_port_log_hit_mpp").trim());
- } else if (Constants.ACTIVESYS_B.equals(searchActiveSys)) {
- whichHive = "&CLUSTERB";
- // B版表名动态设置
- dnsLog.setTableName(Configurations
- .getStringProperty(DfDnsLog.class.getSimpleName() + "B", "t_xa_df_dns_log_mpp")
- .trim());
- }
-
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 加上AB版的条件
- key = key + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- long startTime = System.currentTimeMillis();
- logger1.info("Redis查询开始:" + startTime);
-
- dnsLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
-
- long endTime = System.currentTimeMillis();
- logger1.info(dnsLogPage.getList().size());
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- long startTime = System.currentTimeMillis();
- dnsLogPage = dfLogService.findDnsPageCluster(
- new Page(request, response, DfDnsLog.class), dnsLog,
- searchActiveSys);
- long endTime = System.currentTimeMillis();
- if (Constants.IS_OPEN_REDIS) {
- new SaveRedisThread(key, dnsLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
-
- } else {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- dnsLogPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfDnsLog bean = (DfDnsLog) JsonMapper.fromJsonString(str, DfDnsLog.class);
- list.add(bean);
- }
- dnsLogPage.setList(list);
-
- } else {
- dnsLogPage.setList(new ArrayList());
- }
- } else {
-
- dnsLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfDnsLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, dnsLog, "DF_DNS_LOG", getCol2Col(), orderBy,
- searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfDnsLog.class, "foundTime",
- "recvTime");
- if (tableMapping == null) {
- dnsLogPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper.fromJsonList(jsonString,
- DfDnsLog.class);
- dnsLogPage.setList(List);
-
- } else {
- dnsLogPage.setList(new ArrayList());
- }
-
- }
- }
-
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- dnsLogPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(dnsLog, countKey, "DF_DNS_LOG",
- getCol2Col(), searchActiveSys);
- dnsLogPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(dnsLog, countKey, "DF_DNS_LOG", getCol2Col(),
- searchActiveSys);
- dnsLogPage.setCount(Long.valueOf(hivePageCount));
- }
-
- dnsLogPage.setPageNo(page.getPageNo());
- dnsLogPage.setPageSize(page.getPageSize());
- logger1.info("hive结束----" + System.currentTimeMillis());
- }
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- logger1.info("redis Key校验开始----" + System.currentTimeMillis());
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- logger1.info("redis Key校验结束----" + System.currentTimeMillis());
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- logger1.info("redis查询开始----" + System.currentTimeMillis());
- dnsLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key), Page.class);
- logger1.info("redis查询结束----" + System.currentTimeMillis());
- } else {
- logger1.info("oracle查询开始----" + System.currentTimeMillis());
- // 不存在则查询数据库并保存查询结果到redis中
- dnsLogPage = dfLogService.findDnsPage(new Page(request, response, DfDnsLog.class), dnsLog,
- searchActiveSys);
- logger1.info("oracle查询结束----" + System.currentTimeMillis());
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, dnsLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "DNS日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- } finally {
- logger1.info("dfDnsLogList接口请求方法结束----" + System.currentTimeMillis());
- }
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "DNS日志检索成功", dnsLogPage,
- searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfFtpLogs", method = RequestMethod.GET)
- @ApiOperation(value = "FTP封堵日志获取", httpMethod = "GET", notes = "get log list")
- public Map dfFtpLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfFtpLog ftpLog, HttpServletRequest request, HttpServletResponse response, Model model) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page ftpLogPage = new Page();
-
- try {
- resetTime(ftpLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, ftpLog, DfFtpLog.class, page);
- if (Constants.ONLY_SEL_FROM_HIVE || (HiveSqlService.ifTimeGreaterThan48(ftpLog.getSearchFoundStartTime(),
- ftpLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- // if (true) {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- ftpLogPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfFtpLog bean = (DfFtpLog) JsonMapper.fromJsonString(str, DfFtpLog.class);
- list.add(bean);
- }
-
- ftpLogPage.setList(list);
-
- } else {
- ftpLogPage.setList(new ArrayList());
- }
- } else {
-
- ftpLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfFtpLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, ftpLog, "DF_FTP_LOG", getCol2Col(), orderBy,
- searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfFtpLog.class, "foundTime",
- "recvTime");
- if (tableMapping == null) {
- ftpLogPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper.fromJsonList(jsonString,
- DfFtpLog.class);
- ftpLogPage.setList(List);
-
- } else {
- ftpLogPage.setList(new ArrayList());
- }
- }
- }
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- ftpLogPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(ftpLog, countKey, "DF_FTP_LOG",
- getCol2Col(), searchActiveSys);
- ftpLogPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(ftpLog, countKey, "DF_FTP_LOG", getCol2Col(),
- searchActiveSys);
- ftpLogPage.setCount(Long.valueOf(hivePageCount));
- }
- ftpLogPage.setPageNo(page.getPageNo());
- ftpLogPage.setPageSize(page.getPageSize());
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- ftpLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key), Page.class);
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- ftpLogPage = dfLogService.findFtpPage(new Page(request, response, DfFtpLog.class), ftpLog,
- searchActiveSys);
-
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, ftpLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "FTP日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- }
-
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "FTP日志检索成功", ftpLogPage,
- searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfPPTPLogs", method = RequestMethod.GET)
- @ApiOperation(value = "PPTP封堵日志获取", httpMethod = "GET", notes = "get log list")
- public Map dfPptpLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfPptpLog pptpLog, HttpServletRequest request, HttpServletResponse response, Model model) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page pptpLogPage = new Page();
-
- try {
- resetTime(pptpLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, pptpLog, DfPptpLog.class, page);
-
- // 单独对PPTP的EncryptMode字段校验
- if (!StringUtil.isBlank(pptpLog.getSearchEncryptMode())) {
- Integer.parseInt(pptpLog.getSearchEncryptMode());
- }
- if (Constants.ONLY_SEL_FROM_HIVE || (HiveSqlService.ifTimeGreaterThan48(pptpLog.getSearchFoundStartTime(),
- pptpLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- // 神通数据库查询打开 && (( A版&&查询开始时间>神通数据库数据A最早时间)||(
- // B版&&查询开始时间>神通数据库数据B最早时间))
- if (Constants.IS_SELECT_CLUSTER && ((Constants.ACTIVESYS_A.equals(searchActiveSys)
- && (pptpLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_A_START_TIME)
- || (Constants.ACTIVESYS_B.equals(searchActiveSys)
- && (pptpLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_B_START_TIME))) {
- // 打开神通数据库
- logger.info("开启神通数据库---" + System.currentTimeMillis());
- CustomerContextHolder.setCustomerType(CustomerContextHolder.DATA_SOURCE_H);// 开启数据源F
- // 拼凑rediskeyA、B版
- if (Constants.ACTIVESYS_A.equals(searchActiveSys)) {
- whichHive = "&CLUSTERA";
- // A版表名动态设置
- pptpLog.setTableName(Configurations.getStringProperty(DfPptpLog.class.getSimpleName() + "A",
- "t_xa_df_ip_port_log_hit_mpp").trim());
- } else if (Constants.ACTIVESYS_B.equals(searchActiveSys)) {
- whichHive = "&CLUSTERB";
- // B版表名动态设置
- pptpLog.setTableName(Configurations
- .getStringProperty(DfPptpLog.class.getSimpleName() + "B", "t_xa_df_pptp_log_mpp")
- .trim());
- }
-
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 加上AB版的条件
- key = key + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- long startTime = System.currentTimeMillis();
- logger1.info("Redis查询开始:" + startTime);
-
- pptpLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
-
- long endTime = System.currentTimeMillis();
- logger1.info(pptpLogPage.getList().size());
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- long startTime = System.currentTimeMillis();
- pptpLogPage = dfLogService.findPptpPageCluster(
- new Page(request, response, DfPptpLog.class), pptpLog,
- searchActiveSys);
- long endTime = System.currentTimeMillis();
- if (Constants.IS_OPEN_REDIS) {
- new SaveRedisThread(key, pptpLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
-
- } else {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- pptpLogPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfPptpLog bean = (DfPptpLog) JsonMapper.fromJsonString(str, DfPptpLog.class);
- list.add(bean);
- }
- pptpLogPage.setList(list);
-
- } else {
- pptpLogPage.setList(new ArrayList());
- }
- } else {
-
- pptpLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfPptpLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, pptpLog, "DF_PPTP_LOG", getCol2Col(), orderBy,
- searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfPptpLog.class, "foundTime",
- "recvTime");
- if (tableMapping == null) {
- pptpLogPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper.fromJsonList(jsonString,
- DfPptpLog.class);
- pptpLogPage.setList(List);
-
- } else {
- pptpLogPage.setList(new ArrayList());
- }
- }
- }
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- pptpLogPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(pptpLog, countKey, "DF_PPTP_LOG",
- getCol2Col(), searchActiveSys);
- pptpLogPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(pptpLog, countKey, "DF_PPTP_LOG", getCol2Col(),
- searchActiveSys);
- pptpLogPage.setCount(Long.valueOf(hivePageCount));
- }
- pptpLogPage.setPageNo(page.getPageNo());
- pptpLogPage.setPageSize(page.getPageSize());
- }
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- pptpLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key), Page.class);
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- pptpLogPage = dfLogService.findPptpPage(new Page(request, response, DfPptpLog.class),
- pptpLog, searchActiveSys);
-
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, pptpLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
- } catch (NumberFormatException e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- RestServiceException restE = new RestServiceException(thread, System.currentTimeMillis() - start,
- "searchEncryptMode参数格式错误", RestBusinessCode.param_formate_error.getValue());
- restE.setActiveSys(searchActiveSys);
- restE.setLogSource(logSource);
- throw restE;
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "PPTP日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- }
-
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "PPTP日志检索成功", pptpLogPage,
- searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfL2tpLogs", method = RequestMethod.GET)
- @ApiOperation(value = "L2TP封堵日志获取", httpMethod = "GET", notes = "get log list")
- public Map dfL2tpLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfL2tpLog l2tpLog, HttpServletRequest request, HttpServletResponse response, Model model) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page l2tpLogPage = new Page();
-
- try {
- resetTime(l2tpLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, l2tpLog, DfL2tpLog.class, page);
-
- // 单独对Ipsec的EncryptMode字段校验
- if (!StringUtil.isBlank(l2tpLog.getSearchEncryptMode())) {
- Integer.parseInt(l2tpLog.getSearchEncryptMode());
- }
- if (Constants.ONLY_SEL_FROM_HIVE || (HiveSqlService.ifTimeGreaterThan48(l2tpLog.getSearchFoundStartTime(),
- l2tpLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- // 神通数据库查询打开 && (( A版&&查询开始时间>神通数据库数据A最早时间)||(
- // B版&&查询开始时间>神通数据库数据B最早时间))
- if (Constants.IS_SELECT_CLUSTER && ((Constants.ACTIVESYS_A.equals(searchActiveSys)
- && (l2tpLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_A_START_TIME)
- || (Constants.ACTIVESYS_B.equals(searchActiveSys)
- && (l2tpLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_B_START_TIME))) {
- // 打开神通数据库
- logger.info("开启神通数据库---" + System.currentTimeMillis());
- CustomerContextHolder.setCustomerType(CustomerContextHolder.DATA_SOURCE_H);// 开启数据源F
- // 拼凑rediskeyA、B版
- if (Constants.ACTIVESYS_A.equals(searchActiveSys)) {
- whichHive = "&CLUSTERA";
- // A版表名动态设置
- l2tpLog.setTableName(Configurations.getStringProperty(DfL2tpLog.class.getSimpleName() + "A",
- "t_xa_df_ip_port_log_hit_mpp").trim());
- } else if (Constants.ACTIVESYS_B.equals(searchActiveSys)) {
- whichHive = "&CLUSTERB";
- // B版表名动态设置
- l2tpLog.setTableName(Configurations
- .getStringProperty(DfL2tpLog.class.getSimpleName() + "B", "t_xa_df_l2tp_log_mpp")
- .trim());
- }
-
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 加上AB版的条件
- key = key + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- long startTime = System.currentTimeMillis();
- logger1.info("Redis查询开始:" + startTime);
-
- l2tpLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
-
- long endTime = System.currentTimeMillis();
- logger1.info(l2tpLogPage.getList().size());
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- long startTime = System.currentTimeMillis();
- l2tpLogPage = dfLogService.findL2tpPageCluster(
- new Page(request, response, DfL2tpLog.class), l2tpLog,
- searchActiveSys);
- long endTime = System.currentTimeMillis();
- if (Constants.IS_OPEN_REDIS) {
- new SaveRedisThread(key, l2tpLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
-
- } else {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- l2tpLogPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfL2tpLog bean = (DfL2tpLog) JsonMapper.fromJsonString(str, DfL2tpLog.class);
- list.add(bean);
- }
-
- l2tpLogPage.setList(list);
-
- } else {
- l2tpLogPage.setList(new ArrayList());
- }
- } else {
-
- l2tpLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfL2tpLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, l2tpLog, "DF_L2TP_LOG", getCol2Col(), orderBy,
- searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfL2tpLog.class, "foundTime",
- "recvTime");
- if (tableMapping == null) {
- l2tpLogPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper.fromJsonList(jsonString,
- DfL2tpLog.class);
- l2tpLogPage.setList(List);
- } else {
- l2tpLogPage.setList(new ArrayList());
- }
- }
- }
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- l2tpLogPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(l2tpLog, countKey, "DF_L2TP_LOG",
- getCol2Col(), searchActiveSys);
- l2tpLogPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(l2tpLog, countKey, "DF_L2TP_LOG", getCol2Col(),
- searchActiveSys);
- l2tpLogPage.setCount(Long.valueOf(hivePageCount));
- }
- l2tpLogPage.setPageNo(page.getPageNo());
- l2tpLogPage.setPageSize(page.getPageSize());
- }
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- l2tpLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key), Page.class);
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- l2tpLogPage = dfLogService.findL2tpPage(new Page(request, response, DfL2tpLog.class),
- l2tpLog, searchActiveSys);
-
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, l2tpLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
- } catch (NumberFormatException e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- RestServiceException restE = new RestServiceException(thread, System.currentTimeMillis() - start,
- "EncryptMode参数格式错误", RestBusinessCode.param_formate_error.getValue());
- restE.setActiveSys(searchActiveSys);
- restE.setLogSource(logSource);
- throw restE;
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "L2TP日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- }
-
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "L2TP日志检索成功", l2tpLogPage,
- searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfIpsecLogs", method = RequestMethod.GET)
- @ApiOperation(value = "IPSEC封堵日志获取", httpMethod = "GET", notes = "get log list")
- public Map dfIpsecLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfIpsecLog ipsecLog, HttpServletRequest request, HttpServletResponse response, Model model) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page ipsecLogPage = new Page();
-
- try {
- resetTime(ipsecLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, ipsecLog, DfIpsecLog.class, page);
-
- // 单独对Ipsec的exProtocol字段校验
- if (!StringUtil.isBlank(ipsecLog.getSearchExProtocol())) {
- Integer.parseInt(ipsecLog.getSearchExProtocol());
- }
- if (Constants.ONLY_SEL_FROM_HIVE || (HiveSqlService.ifTimeGreaterThan48(ipsecLog.getSearchFoundStartTime(),
- ipsecLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- // 神通数据库查询打开 && (( A版&&查询开始时间>神通数据库数据A最早时间)||(
- // B版&&查询开始时间>神通数据库数据B最早时间))
- if (Constants.IS_SELECT_CLUSTER && ((Constants.ACTIVESYS_A.equals(searchActiveSys)
- && (ipsecLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_A_START_TIME)
- || (Constants.ACTIVESYS_B.equals(searchActiveSys)
- && (ipsecLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_B_START_TIME))) {
- // 打开神通数据库
- logger.info("开启神通数据库---" + System.currentTimeMillis());
- CustomerContextHolder.setCustomerType(CustomerContextHolder.DATA_SOURCE_H);// 开启数据源F
- // 拼凑rediskeyA、B版
- if (Constants.ACTIVESYS_A.equals(searchActiveSys)) {
- whichHive = "&CLUSTERA";
- // A版表名动态设置
- ipsecLog.setTableName(Configurations.getStringProperty(DfIpsecLog.class.getSimpleName() + "A",
- "t_xa_df_ip_port_log_hit_mpp").trim());
- } else if (Constants.ACTIVESYS_B.equals(searchActiveSys)) {
- whichHive = "&CLUSTERB";
- // B版表名动态设置
- ipsecLog.setTableName(Configurations
- .getStringProperty(DfIpsecLog.class.getSimpleName() + "B", "t_xa_df_ipsec_log_mpp")
- .trim());
- }
-
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 加上AB版的条件
- key = key + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- long startTime = System.currentTimeMillis();
- logger1.info("Redis查询开始:" + startTime);
-
- ipsecLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
-
- long endTime = System.currentTimeMillis();
- logger1.info(ipsecLogPage.getList().size());
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- long startTime = System.currentTimeMillis();
- ipsecLogPage = dfLogService.findIpsecPageCluster(
- new Page(request, response, DfIpsecLog.class), ipsecLog,
- searchActiveSys);
- long endTime = System.currentTimeMillis();
- if (Constants.IS_OPEN_REDIS) {
- new SaveRedisThread(key, ipsecLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
-
- } else {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- ipsecLogPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfIpsecLog bean = (DfIpsecLog) JsonMapper.fromJsonString(str, DfIpsecLog.class);
- list.add(bean);
- }
-
- ipsecLogPage.setList(list);
-
- } else {
- ipsecLogPage.setList(new ArrayList());
- }
- } else {
-
- ipsecLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfIpsecLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, ipsecLog, "DF_IPSEC_LOG", getCol2Col(), orderBy,
- searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfIpsecLog.class, "foundTime",
- "recvTime");
- if (tableMapping == null) {
- ipsecLogPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper.fromJsonList(jsonString,
- DfIpsecLog.class);
- ipsecLogPage.setList(List);
-
- } else {
- ipsecLogPage.setList(new ArrayList());
- }
- }
- }
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- ipsecLogPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(ipsecLog, countKey, "DF_IPSEC_LOG",
- getCol2Col(), searchActiveSys);
- ipsecLogPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(ipsecLog, countKey, "DF_IPSEC_LOG",
- getCol2Col(), searchActiveSys);
- ipsecLogPage.setCount(Long.valueOf(hivePageCount));
- }
- ipsecLogPage.setPageNo(page.getPageNo());
- ipsecLogPage.setPageSize(page.getPageSize());
- }
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- ipsecLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key), Page.class);
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- ipsecLogPage = dfLogService.findIpsecPage(new Page(request, response, DfIpsecLog.class),
- ipsecLog, searchActiveSys);
-
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, ipsecLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
- } catch (NumberFormatException e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- RestServiceException restE = new RestServiceException(thread, System.currentTimeMillis() - start,
- "exProtocol参数格式错误", RestBusinessCode.param_formate_error.getValue());
- restE.setActiveSys(searchActiveSys);
- restE.setLogSource(logSource);
- throw restE;
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "IPSEC日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- }
-
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "IPSEC日志检索成功",
- ipsecLogPage, searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfOpenVpnLogs", method = RequestMethod.GET)
- @ApiOperation(value = "OPENVPN封堵日志获取", httpMethod = "GET", notes = "get log list")
- public Map dfOpenvpnLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfOpenvpnLog openvpnLog, HttpServletRequest request, HttpServletResponse response, Model model) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page openvpnLogPage = new Page();
-
- try {
- resetTime(openvpnLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, openvpnLog, DfOpenvpnLog.class, page);
- if (Constants.ONLY_SEL_FROM_HIVE
- || (HiveSqlService.ifTimeGreaterThan48(openvpnLog.getSearchFoundStartTime(),
- openvpnLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- // 神通数据库查询打开 && (( A版&&查询开始时间>神通数据库数据A最早时间)||(
- // B版&&查询开始时间>神通数据库数据B最早时间))
- if (Constants.IS_SELECT_CLUSTER && ((Constants.ACTIVESYS_A.equals(searchActiveSys)
- && (openvpnLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_A_START_TIME)
- || (Constants.ACTIVESYS_B.equals(searchActiveSys)
- && (openvpnLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_B_START_TIME))) {
- // 打开神通数据库
- logger.info("开启神通数据库---" + System.currentTimeMillis());
- CustomerContextHolder.setCustomerType(CustomerContextHolder.DATA_SOURCE_H);// 开启数据源F
- // 拼凑rediskeyA、B版
- if (Constants.ACTIVESYS_A.equals(searchActiveSys)) {
- whichHive = "&CLUSTERA";
- // A版表名动态设置
- openvpnLog.setTableName(Configurations.getStringProperty(DfOpenvpnLog.class.getSimpleName() + "A",
- "t_xa_df_ip_port_log_hit_mpp").trim());
- } else if (Constants.ACTIVESYS_B.equals(searchActiveSys)) {
- whichHive = "&CLUSTERB";
- // B版表名动态设置
- openvpnLog.setTableName(Configurations
- .getStringProperty(DfOpenvpnLog.class.getSimpleName() + "B", "t_xa_df_openvpn_log_mpp")
- .trim());
- }
-
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 加上AB版的条件
- key = key + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- long startTime = System.currentTimeMillis();
- logger1.info("Redis查询开始:" + startTime);
-
- openvpnLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
-
- long endTime = System.currentTimeMillis();
- logger1.info(openvpnLogPage.getList().size());
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- long startTime = System.currentTimeMillis();
- openvpnLogPage = dfLogService.findOpenvpnPageCluster(
- new Page(request, response, DfOpenvpnLog.class), openvpnLog,
- searchActiveSys);
- long endTime = System.currentTimeMillis();
- if (Constants.IS_OPEN_REDIS) {
- new SaveRedisThread(key, openvpnLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
-
- } else {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- openvpnLogPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfOpenvpnLog bean = (DfOpenvpnLog) JsonMapper.fromJsonString(str, DfOpenvpnLog.class);
- list.add(bean);
- }
-
- openvpnLogPage.setList(list);
-
- } else {
- openvpnLogPage.setList(new ArrayList());
- }
- } else {
-
- openvpnLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfOpenvpnLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, openvpnLog, "DF_OPENVPN_LOG", getCol2Col(),
- orderBy, searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfOpenvpnLog.class,
- "foundTime", "recvTime");
- if (tableMapping == null) {
- openvpnLogPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper.fromJsonList(jsonString,
- DfOpenvpnLog.class);
- openvpnLogPage.setList(List);
-
- } else {
-
- openvpnLogPage.setList(new ArrayList());
- }
- }
- }
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- openvpnLogPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(openvpnLog, countKey, "DF_OPENVPN_LOG",
- getCol2Col(), searchActiveSys);
- openvpnLogPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(openvpnLog, countKey, "DF_OPENVPN_LOG",
- getCol2Col(), searchActiveSys);
- openvpnLogPage.setCount(Long.valueOf(hivePageCount));
- }
- openvpnLogPage.setPageNo(page.getPageNo());
- openvpnLogPage.setPageSize(page.getPageSize());
- }
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
-
- if (keyExists) {
- openvpnLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- openvpnLogPage = dfLogService.findOpenvpnPage(
- new Page(request, response, DfOpenvpnLog.class), openvpnLog, searchActiveSys);
-
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, openvpnLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
-
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "OPENVPN日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- }
-
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "OPENVPN日志检索成功",
- openvpnLogPage, searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfSSHLogs", method = RequestMethod.GET)
- @ApiOperation(value = "SSH封堵日志获取", httpMethod = "GET", notes = "get log list")
- public Map dfSshLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfSshLog sshLog, HttpServletRequest request, HttpServletResponse response, Model model) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page sshLogPage = new Page();
-
- try {
- resetTime(sshLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, sshLog, DfSshLog.class, page);
-
- // 单独对SSH的EncryptMode字段校验
- if (!StringUtil.isBlank(sshLog.getSearchEncryptMode())) {
- Long.parseLong(sshLog.getSearchEncryptMode());
- }
- if (Constants.ONLY_SEL_FROM_HIVE || (HiveSqlService.ifTimeGreaterThan48(sshLog.getSearchFoundStartTime(),
- sshLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- // 神通数据库查询打开 && (( A版&&查询开始时间>神通数据库数据A最早时间)||(
- // B版&&查询开始时间>神通数据库数据B最早时间))
- if (Constants.IS_SELECT_CLUSTER && ((Constants.ACTIVESYS_A.equals(searchActiveSys)
- && (sshLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_A_START_TIME)
- || (Constants.ACTIVESYS_B.equals(searchActiveSys)
- && (sshLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_B_START_TIME))) {
- // 打开神通数据库
- logger.info("开启神通数据库---" + System.currentTimeMillis());
- CustomerContextHolder.setCustomerType(CustomerContextHolder.DATA_SOURCE_H);// 开启数据源F
- // 拼凑rediskeyA、B版
- if (Constants.ACTIVESYS_A.equals(searchActiveSys)) {
- whichHive = "&CLUSTERA";
- // A版表名动态设置
- sshLog.setTableName(Configurations.getStringProperty(DfSshLog.class.getSimpleName() + "A",
- "t_xa_df_ip_port_log_hit_mpp").trim());
- } else if (Constants.ACTIVESYS_B.equals(searchActiveSys)) {
- whichHive = "&CLUSTERB";
- // B版表名动态设置
- sshLog.setTableName(Configurations
- .getStringProperty(DfSshLog.class.getSimpleName() + "B", "t_xa_df_ssh_log_mpp")
- .trim());
- }
-
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 加上AB版的条件
- key = key + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- long startTime = System.currentTimeMillis();
- logger1.info("Redis查询开始:" + startTime);
-
- sshLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
-
- long endTime = System.currentTimeMillis();
- logger1.info(sshLogPage.getList().size());
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- long startTime = System.currentTimeMillis();
- sshLogPage = dfLogService.findSshPageCluster(
- new Page(request, response, DfSshLog.class), sshLog,
- searchActiveSys);
- long endTime = System.currentTimeMillis();
- if (Constants.IS_OPEN_REDIS) {
- new SaveRedisThread(key, sshLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
-
- } else {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- sshLogPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfSshLog bean = (DfSshLog) JsonMapper.fromJsonString(str, DfSshLog.class);
- list.add(bean);
- }
-
- sshLogPage.setList(list);
-
- } else {
- sshLogPage.setList(new ArrayList());
- }
- } else {
-
- sshLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfSshLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, sshLog, "DF_SSH_LOG", getCol2Col(), orderBy,
- searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfSshLog.class, "foundTime",
- "recvTime");
- if (tableMapping == null) {
- sshLogPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper.fromJsonList(jsonString,
- DfSshLog.class);
- sshLogPage.setList(List);
- } else {
- sshLogPage.setList(new ArrayList());
- }
- }
- }
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- sshLogPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(sshLog, countKey, "DF_SSH_LOG",
- getCol2Col(), searchActiveSys);
- sshLogPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(sshLog, countKey, "DF_SSH_LOG", getCol2Col(),
- searchActiveSys);
- sshLogPage.setCount(Long.valueOf(hivePageCount));
- }
- sshLogPage.setPageNo(page.getPageNo());
- sshLogPage.setPageSize(page.getPageSize());
- }
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- sshLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key), Page.class);
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- sshLogPage = dfLogService.findSshPage(new Page(request, response, DfSshLog.class), sshLog,
- searchActiveSys);
-
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, sshLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
- } catch (NumberFormatException e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- RestServiceException restE = new RestServiceException(thread, System.currentTimeMillis() - start,
- "searchEncryptMode参数格式错误", RestBusinessCode.param_formate_error.getValue());
- restE.setActiveSys(searchActiveSys);
- restE.setLogSource(logSource);
- throw restE;
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "SSH日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- }
-
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "SSH日志检索成功", sshLogPage,
- searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfSSLLogs", method = RequestMethod.GET)
- @ApiOperation(value = "SSL封堵日志获取", httpMethod = "GET", notes = "get log list")
- public Map dfSslLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfSslLog sslLog, HttpServletRequest request, HttpServletResponse response, Model model) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page sslLogPage = new Page();
- try {
- resetTime(sslLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, sslLog, DfSslLog.class, page);
- if (Constants.ONLY_SEL_FROM_HIVE || (HiveSqlService.ifTimeGreaterThan48(sslLog.getSearchFoundStartTime(),
- sslLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- // 神通数据库查询打开 && (( A版&&查询开始时间>神通数据库数据A最早时间)||(
- // B版&&查询开始时间>神通数据库数据B最早时间))
- if (Constants.IS_SELECT_CLUSTER && ((Constants.ACTIVESYS_A.equals(searchActiveSys)
- && (sslLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_A_START_TIME)
- || (Constants.ACTIVESYS_B.equals(searchActiveSys)
- && (sslLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_B_START_TIME))) {
- // 打开神通数据库
- logger.info("开启神通数据库---" + System.currentTimeMillis());
- CustomerContextHolder.setCustomerType(CustomerContextHolder.DATA_SOURCE_H);// 开启数据源F
- // 拼凑rediskeyA、B版
- if (Constants.ACTIVESYS_A.equals(searchActiveSys)) {
- whichHive = "&CLUSTERA";
- // A版表名动态设置
- sslLog.setTableName(Configurations.getStringProperty(DfSslLog.class.getSimpleName() + "A",
- "t_xa_df_ip_port_log_hit_mpp").trim());
- } else if (Constants.ACTIVESYS_B.equals(searchActiveSys)) {
- whichHive = "&CLUSTERB";
- // B版表名动态设置
- sslLog.setTableName(Configurations
- .getStringProperty(DfSslLog.class.getSimpleName() + "B", "t_xa_df_ssl_log_mpp")
- .trim());
- }
-
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 加上AB版的条件
- key = key + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- long startTime = System.currentTimeMillis();
- logger1.info("Redis查询开始:" + startTime);
-
- sslLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
-
- long endTime = System.currentTimeMillis();
- logger1.info(sslLogPage.getList().size());
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- long startTime = System.currentTimeMillis();
- sslLogPage = dfLogService.findSslPageCluster(
- new Page(request, response, DfSslLog.class), sslLog,
- searchActiveSys);
- long endTime = System.currentTimeMillis();
- if (Constants.IS_OPEN_REDIS) {
- new SaveRedisThread(key, sslLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
-
- } else {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- sslLogPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfSslLog bean = (DfSslLog) JsonMapper.fromJsonString(str, DfSslLog.class);
- list.add(bean);
- }
-
- sslLogPage.setList(list);
-
- } else {
- sslLogPage.setList(new ArrayList());
- }
- } else {
-
- sslLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfSslLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, sslLog, "DF_SSL_LOG", getCol2Col(), orderBy,
- searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfSslLog.class, "foundTime",
- "recvTime");
- if (tableMapping == null) {
- sslLogPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper.fromJsonList(jsonString,
- DfSslLog.class);
- sslLogPage.setList(List);
- } else {
- sslLogPage.setList(new ArrayList());
- }
- }
- }
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- sslLogPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(sslLog, countKey, "DF_SSL_LOG",
- getCol2Col(), searchActiveSys);
- sslLogPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(sslLog, countKey, "DF_SSL_LOG", getCol2Col(),
- searchActiveSys);
- sslLogPage.setCount(Long.valueOf(hivePageCount));
- }
- sslLogPage.setPageNo(page.getPageNo());
- sslLogPage.setPageSize(page.getPageSize());
- }
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- sslLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key), Page.class);
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- sslLogPage = dfLogService.findSslPage(new Page(request, response, DfSslLog.class), sslLog,
- searchActiveSys);
-
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, sslLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "SSL日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- }
-
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "SSL日志检索成功", sslLogPage,
- searchActiveSys, logSource);
- }
-
- @RequestMapping(value = "/dfTunnelRandomLogs", method = RequestMethod.GET)
- @ApiOperation(value = "隧道协议随机封堵日志获取", httpMethod = "GET", notes = "get log list")
- public Map dfTunnelRandomLogs(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DfTunnelRandomLog tunnelRandomLog, HttpServletRequest request, HttpServletResponse response,
- Model model) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys) && !Constants.ACTIVESYS_C.equals(searchActiveSys)) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page tunnelRandomLogPage = new Page();
- try {
- resetTime(tunnelRandomLog);
- // 请求参数校验
- dfLogService.queryConditionCheck(thread, start, tunnelRandomLog, DfTunnelRandomLog.class, page);
- if (Constants.ONLY_SEL_FROM_HIVE
- || (HiveSqlService.ifTimeGreaterThan48(tunnelRandomLog.getSearchFoundStartTime(),
- tunnelRandomLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- // 神通数据库查询打开 && (( A版&&查询开始时间>神通数据库数据A最早时间)||(
- // B版&&查询开始时间>神通数据库数据B最早时间))
- if (Constants.IS_SELECT_CLUSTER && ((Constants.ACTIVESYS_A.equals(searchActiveSys)
- && (tunnelRandomLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_A_START_TIME)
- || (Constants.ACTIVESYS_B.equals(searchActiveSys)
- && (tunnelRandomLog.getSearchFoundStartTimeCluster()*1000) > Constants.CLUSTER_B_START_TIME))) {
- // 打开神通数据库
- logger.info("开启神通数据库---" + System.currentTimeMillis());
- CustomerContextHolder.setCustomerType(CustomerContextHolder.DATA_SOURCE_H);// 开启数据源F
- // 拼凑rediskeyA、B版
- if (Constants.ACTIVESYS_A.equals(searchActiveSys)) {
- whichHive = "&CLUSTERA";
- // A版表名动态设置
- tunnelRandomLog.setTableName(Configurations.getStringProperty(DfTunnelRandomLog.class.getSimpleName() + "A",
- "t_xa_df_ip_port_log_hit_mpp").trim());
- } else if (Constants.ACTIVESYS_B.equals(searchActiveSys)) {
- whichHive = "&CLUSTERB";
- // B版表名动态设置
- tunnelRandomLog.setTableName(Configurations
- .getStringProperty(DfTunnelRandomLog.class.getSimpleName() + "B", "t_xa_df_tunnel_random_log_mpp")
- .trim());
- }
-
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 加上AB版的条件
- key = key + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- long startTime = System.currentTimeMillis();
- logger1.info("Redis查询开始:" + startTime);
-
- tunnelRandomLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
-
- long endTime = System.currentTimeMillis();
- logger1.info(tunnelRandomLogPage.getList().size());
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- long startTime = System.currentTimeMillis();
- tunnelRandomLogPage = dfLogService.findTunnelRandomPageCluster(
- new Page(request, response, DfTunnelRandomLog.class), tunnelRandomLog,
- searchActiveSys);
- long endTime = System.currentTimeMillis();
- if (Constants.IS_OPEN_REDIS) {
- new SaveRedisThread(key, tunnelRandomLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
-
- } else {
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List list = new ArrayList();
- tunnelRandomLogPage = new Page();
- List strList = redisDao.getList(key, startNum, endNum);
- if (null != strList && strList.size() > 0) {
- for (String str : strList) {
- DfTunnelRandomLog bean = (DfTunnelRandomLog) JsonMapper.fromJsonString(str,
- DfTunnelRandomLog.class);
- list.add(bean);
- }
-
- tunnelRandomLogPage.setList(list);
-
- } else {
- tunnelRandomLogPage.setList(new ArrayList());
- }
- } else {
-
- tunnelRandomLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DfSslLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, tunnelRandomLog, "DF_TUNNEL_RANDOM_LOG",
- getCol2Col(), orderBy, searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DfSslLog.class, "foundTime",
- "recvTime");
- if (tableMapping == null) {
- tunnelRandomLogPage.setList(new ArrayList());
- } else {
- List strList = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- strList = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- strList = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (strList.size() > 0) {
- String jsonString = JsonMapper.toJsonString(strList);
- List List = (java.util.List) JsonMapper
- .fromJsonList(jsonString, DfTunnelRandomLog.class);
- tunnelRandomLogPage.setList(List);
- } else {
- tunnelRandomLogPage.setList(new ArrayList());
- }
- }
- }
- if (Constants.IS_OPEN_REDIS && Constants.IS_GET_HIVECOUNT) {
- countKey = dfLogService.getJedisKey(request, true) + "&pageCount" + whichHive;
- // 判断key是否存在
- boolean countKeyExists = redisDao.exists(countKey);
- if (countKeyExists) {
- String count = redisDao.getString(countKey);
- tunnelRandomLogPage.setCount(Long.valueOf(count));
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(tunnelRandomLog, countKey,
- "DF_TUNNEL_RANDOM_LOG", getCol2Col(), searchActiveSys);
- tunnelRandomLogPage.setCount(Long.valueOf(hivePageCount));
- }
- } else {
- Long hivePageCount = HiveSqlService.getHivePageCount(tunnelRandomLog, countKey,
- "DF_TUNNEL_RANDOM_LOG", getCol2Col(), searchActiveSys);
- tunnelRandomLogPage.setCount(Long.valueOf(hivePageCount));
- }
- tunnelRandomLogPage.setPageNo(page.getPageNo());
- tunnelRandomLogPage.setPageSize(page.getPageSize());
- }
- } else {// 从oracle中查询数据
- if (Constants.IS_OPEN_REDIS) {
- // 根据查询条件获取key
- key = dfLogService.getJedisKey(request, false);
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- // 存在则直接从redis中查询
- if (keyExists) {
- tunnelRandomLogPage = (Page) JsonMapper.fromJsonString(redisDao.getString(key),
- Page.class);
- } else {
- // 不存在则查询数据库并保存查询结果到redis中
- tunnelRandomLogPage = dfLogService.findTunnelRandomPage(
- new Page(request, response, DfTunnelRandomLog.class), tunnelRandomLog,
- searchActiveSys);
-
- if (Constants.IS_OPEN_REDIS)
- new SaveRedisThread(key, tunnelRandomLogPage, Constants.ORACLE_EXPIRE).start();
- }
- }
- } catch (Exception e) {
- thread.setExceptionInfo(e.getMessage() + " " + e.getCause());
- e.printStackTrace();
- logger1.error(e);
- if (!(e instanceof RestServiceException)) {
- e = new RestServiceException(thread, System.currentTimeMillis() - start, "隧道协议随机封堵日志检索失败");
- }
- ((RestServiceException) e).setActiveSys(searchActiveSys);
- ((RestServiceException) e).setLogSource(logSource);
- throw ((RestServiceException) e);
- }
-
- return serviceResponse(thread, System.currentTimeMillis() - start, request, response, "隧道协议随机封堵日志检索成功",
- tunnelRandomLogPage, searchActiveSys, logSource);
- }
-
- public Map> getCol2Col() {
- Map> col2col = new HashMap>();
- Map startMap = new HashMap();
- startMap.put("start", "foundTime");
- col2col.put("searchFoundStartTime", startMap);
- Map endMap = new HashMap();
- endMap.put("end", "foundTime");
- col2col.put("searchFoundEndTime", endMap);
- return col2col;
- }
-
- /**
- * @Title: resetTime
- * @Description: TODO(日志开始结束时间为空则默认为本地存储时间)
- * @param @param
- * entity
- * @return Map 返回类型
- * @author (DDM)
- * @version V1.0
- */
- public void resetTime(LogEntity> entity) throws Exception {
- SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- Map map = DateUtils.getLocalTime(entity.getSearchFoundStartTime(),
- entity.getSearchFoundEndTime(), Constants.LOG_LOCAL_TIME, "log");
- entity.setSearchFoundStartTime(map.get("startTime"));
- entity.setSearchFoundEndTime(map.get("endTime"));
- entity.setSearchFoundStartTimeCluster(sdf.parse(map.get("startTime")).getTime() / 1000);
- entity.setSearchFoundEndTimeCluster(sdf.parse(map.get("endTime")).getTime() / 1000);
- }
-}
diff --git a/src/main/java/com/nis/web/controller/restful/DjLogSearchController.java b/src/main/java/com/nis/web/controller/restful/DjLogSearchController.java
deleted file mode 100644
index 6299d5a..0000000
--- a/src/main/java/com/nis/web/controller/restful/DjLogSearchController.java
+++ /dev/null
@@ -1,2304 +0,0 @@
-/**
-* @Title: DjLogSearchController.java
-* @Package com.nis.web.controller.restful
-* @Description: TODO(用一句话描述该文件做什么)
-* @author (zbc)
-* @date 2016年9月7日上午8:45:58
-* @version V1.0
-*/
-package com.nis.web.controller.restful;
-
-import java.sql.ResultSet;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.cxf.common.util.StringUtils;
-import org.apache.log4j.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.ui.Model;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RequestMethod;
-import org.springframework.web.bind.annotation.RequestParam;
-import org.springframework.web.bind.annotation.RestController;
-
-import com.nis.domain.LogEntity;
-import com.nis.domain.Page;
-import com.nis.domain.restful.DjDnsLog;
-import com.nis.domain.restful.DjFtpLog;
-import com.nis.domain.restful.DjHttpKeywordLog;
-import com.nis.domain.restful.DjHttpReqLog;
-import com.nis.domain.restful.DjHttpResLog;
-import com.nis.domain.restful.DjIpPortLog;
-import com.nis.domain.restful.DjIpsecLog;
-import com.nis.domain.restful.DjL2tpLog;
-import com.nis.domain.restful.DjMailLog;
-import com.nis.domain.restful.DjOpenvpnLog;
-import com.nis.domain.restful.DjPptpLog;
-import com.nis.domain.restful.DjSshLog;
-import com.nis.domain.restful.DjSslLog;
-import com.nis.restful.RestBusinessCode;
-import com.nis.restful.RestServiceException;
-import com.nis.util.Constants;
-import com.nis.util.DateUtils;
-import com.nis.util.HiveJDBC;
-import com.nis.util.JsonMapper;
-import com.nis.util.StringUtil;
-import com.nis.util.redis.RedisDao;
-import com.nis.util.redis.SaveRedisThread;
-import com.nis.web.controller.BaseRestController;
-import com.nis.web.service.HiveSqlService;
-import com.nis.web.service.SaveRequestLogThread;
-import com.nis.web.service.ServicesRequestLogService;
-import com.nis.web.service.restful.DjLogSearchService;
-import com.wordnik.swagger.annotations.ApiOperation;
-
-/**
- *
- * @ClassName: DjLogSearchController
- * @Description: TODO(这里用一句话描述这个类的作用)
- * @author (zbc)
- * @date 2016年9月7日上午8:45:58
- * @version V1.0
- */
-@RestController
-//@RequestMapping("${servicePath}/log/v1")
-@SuppressWarnings({ "rawtypes", "unchecked" })
-public class DjLogSearchController extends BaseRestController {
- protected final Logger logger1 = Logger.getLogger(this.getClass());
- protected String logSource = "0";
- @Autowired
- protected DjLogSearchService djLogService;
- @Autowired
- protected RedisDao redisDao;
- @Autowired
- protected ServicesRequestLogService servicesRequestLogService;
-
- @RequestMapping(value = "/djIpPortLogs", method = RequestMethod.GET)
- @ApiOperation(value = "端口监测分页获取", httpMethod = "GET", notes = "get log list")
- public Map djIpPortLogList(
- @RequestParam(value = "searchActiveSys", required = false, defaultValue = Constants.ACTIVESYS_B) String searchActiveSys,
- Page page, DjIpPortLog ipPortLog, Model model, HttpServletRequest request, HttpServletResponse response) {
- String whichHive = "&HIVEB";
- if (!Constants.ACTIVESYS_A.equals(searchActiveSys)
- && !Constants.ACTIVESYS_C.equals(searchActiveSys) ) {
- searchActiveSys = Constants.ACTIVESYS_B;
- whichHive = "&HIVEB";
- } else {
- whichHive = "&HIVEA";
- }
-
- long start = System.currentTimeMillis();
- SaveRequestLogThread thread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
- null);
- boolean keyExists = false;
- String key = "";
- String countKey = "";
- Page ipPortLogPage = null;
-
- try {
- resetTime(ipPortLog);
- // 请求参数校验
- djLogService.queryConditionCheck(thread, start, ipPortLog, DjIpPortLog.class, page);
- if (Constants.ONLY_SEL_FROM_HIVE || (HiveSqlService.ifTimeGreaterThan48(ipPortLog.getSearchFoundStartTime(),
- ipPortLog.getSearchFoundEndTime()) && Constants.SEL_FROM_HIVE)) {// 从数据库中查询数据
- logSource = "1";
- int startNum = (page.getPageNo() - 1) * page.getPageSize();
- int endNum = startNum + page.getPageSize() - 1;
-
- if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
- // 根据查询条件获取key
- key = djLogService.getJedisKey(request, true) + whichHive;
- // 判断key是否存在
- keyExists = redisDao.exists(key);
- }
- if (keyExists) {
- List ipPortList = new ArrayList();
- ipPortLogPage = new Page();
- List strlist = redisDao.getList(key, startNum, endNum);
- if (null != strlist && strlist.size() > 0) {
- if (null != strlist && strlist.size() > 0) {
- for (String str : strlist) {
- DjIpPortLog ipPort = (DjIpPortLog) JsonMapper.fromJsonString(str, DjIpPortLog.class);
- ipPortList.add(ipPort);
- }
- }
- ipPortLogPage.setList(ipPortList);
- } else {
- ipPortLogPage.setList(new ArrayList());
- }
- } else {
- ipPortLogPage = new Page();
- String orderBy = "";
- if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
- orderBy = page.getOrderBySql(DjIpPortLog.class.getSimpleName(), page.getOrderBy());
- } else {
- orderBy = "found_Time";
- }
- ResultSet rs = HiveSqlService.getResultSet(page, ipPortLog, "DJ_IP_PORT_LOG", getCol2Col(), orderBy,
- searchActiveSys);
- Map tableMapping = HiveJDBC.tableMapping(page, key, rs, DjIpPortLog.class,
- "foundTime", "recvTime");
- if (tableMapping == null) {
- ipPortLogPage.setList(new ArrayList());
- } else {
- List list = new ArrayList();
- if (tableMapping.get("obj").size() > page.getPageSize()) {
- list = tableMapping.get("obj").subList(0, page.getPageSize());
- } else {
- list = tableMapping.get("obj").subList(0, tableMapping.get("obj").size());
- }
- if (list.size() > 0) {
- String jsonString = JsonMapper.toJsonString(list);
- List List = (java.util.List