1:为日志从hive查询添加分页和排序

2:为日志查询添加总页数和总条数功能
This commit is contained in:
renkaige
2018-10-30 11:13:45 +08:00
parent c0325878e5
commit d144de73b5
5 changed files with 528 additions and 313 deletions

View File

@@ -641,6 +641,15 @@ public class Page<T> {
public int getMaxResults(){ public int getMaxResults(){
return getPageSize(); return getPageSize();
} }
public void setLast(int last) {
this.last = last;
}
/** /**
* @Title: getFiledsSql * @Title: getFiledsSql
* @Description: 将fields的属性名称替换为字段名称 * @Description: 将fields的属性名称替换为字段名称

View File

@@ -29,9 +29,16 @@ import com.nis.web.service.SpringContextHolder;
/** /**
* *
* <p>Title: HiveJDBCByDruid</p> * <p>
* <p>Description: 使用druid连接池对hive进行查询并解析结果</p> * Title: HiveJDBCByDruid
* <p>Company: IIE</p> * </p>
* <p>
* Description: 使用druid连接池对hive进行查询并解析结果
* </p>
* <p>
* Company: IIE
* </p>
*
* @author rkg * @author rkg
* @date 2018年8月20日 * @date 2018年8月20日
* *
@@ -169,6 +176,7 @@ public class LogJDBCByDruid {
/** /**
* 反射获取类中date类型的字段名称 * 反射获取类中date类型的字段名称
*
* @param type * @param type
* @return * @return
* @throws Exception * @throws Exception
@@ -240,6 +248,7 @@ public class LogJDBCByDruid {
return map; return map;
} }
public ResultSet query(String sql) throws Exception { public ResultSet query(String sql) throws Exception {
conn = getConnection(); conn = getConnection();
logger.info("连接数据中心日志库成功--------------------------"); logger.info("连接数据中心日志库成功--------------------------");
@@ -255,4 +264,25 @@ public class LogJDBCByDruid {
} }
public long getCount(String sql) throws Exception {
try {
conn = getConnection();
st = conn.createStatement();
rs = st.executeQuery(sql);
String countStr = null;
while (rs.next()) {
countStr = rs.getObject(1).toString();
break;
}
if (countStr == null || countStr.trim().equals("")) {
logger.info("获取数据中心日志总条数成功总共===================0条配置");
return 0l;
} else {
return Long.valueOf(countStr);
}
} finally {
closeConn();
}
}
} }

View File

@@ -140,6 +140,7 @@ public class BaseRestController {
/** /**
* 不将日志插入到数据库中,只返回到前台 * 不将日志插入到数据库中,只返回到前台
*
* @param time * @param time
* @param request * @param request
* @param response * @param response
@@ -223,6 +224,7 @@ public class BaseRestController {
/** /**
* 日志结果响应格式规范 * 日志结果响应格式规范
*
* @param auditLogThread * @param auditLogThread
* @param executedTime * @param executedTime
* @param request * @param request
@@ -408,4 +410,11 @@ public class BaseRestController {
return Configurations.getStringProperty(key, defaultTableName); return Configurations.getStringProperty(key, defaultTableName);
} }
protected int getLastPageNum(int totalCount, int pageSize) {
int pageNum = totalCount / pageSize;
if (totalCount % pageSize > 0) {
pageNum++;
}
return pageNum;
}
} }

View File

@@ -16,6 +16,7 @@ import org.springframework.web.bind.annotation.RestController;
import com.nis.domain.LogEntity; import com.nis.domain.LogEntity;
import com.nis.domain.Page; import com.nis.domain.Page;
import com.nis.domain.restful.DkBehaviorLog;
import com.nis.domain.restful.NtcAppLog; import com.nis.domain.restful.NtcAppLog;
import com.nis.domain.restful.NtcBgpLog; import com.nis.domain.restful.NtcBgpLog;
import com.nis.domain.restful.NtcDdosLog; import com.nis.domain.restful.NtcDdosLog;
@@ -70,8 +71,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcIpLogs(Page page, NtcIpLog ntcIpLog, Model model, HttpServletRequest request, public Map<String, ?> ntcIpLogs(Page page, NtcIpLog ntcIpLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcIpLog> ntcIpLogPage = null; Page<NtcIpLog> ntcIpLogPage = null;
try { try {
resetTime(ntcIpLog); resetTime(ntcIpLog);
@@ -88,13 +89,14 @@ public class NtcLogSearchController extends BaseRestController {
// ResultSet rs = HiveSqlService.getResultSet(page, ntcIpLog, // ResultSet rs = HiveSqlService.getResultSet(page, ntcIpLog,
// getTableName(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"), // getTableName(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
// getCol2Col(), orderBy, null); // getCol2Col(), orderBy, null);
// Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpLog.class, "foundTime", // Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs,
// NtcIpLog.class, "foundTime",
// "recvTime"); // "recvTime");
String sql = HiveSqlService.getSql(page, ntcIpLog, String sql = HiveSqlService.getSql(page, ntcIpLog,
getTableName(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"), getTableName(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"), getCol2Col(), orderBy,
getCol2Col(), orderBy, null); null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null,sql , NtcIpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcIpLog.class,
"recvTime"); "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcIpLogPage.setList(new ArrayList<NtcIpLog>()); ntcIpLogPage.setList(new ArrayList<NtcIpLog>());
} else { } else {
@@ -140,8 +142,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcHttpLogs(Page page, NtcHttpLog ntcHttpLog, Model model, HttpServletRequest request, public Map<String, ?> ntcHttpLogs(Page page, NtcHttpLog ntcHttpLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcHttpLog> ntcHttpLogPage = null; Page<NtcHttpLog> ntcHttpLogPage = null;
try { try {
@@ -157,10 +159,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcHttpLog, String sql = HiveSqlService.getSql(page, ntcHttpLog,
getTableName(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"), getTableName(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"), getCol2Col(), orderBy,
getCol2Col(), orderBy, null); null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcHttpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
"recvTime"); NtcHttpLog.class, "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcHttpLogPage.setList(new ArrayList()); ntcHttpLogPage.setList(new ArrayList());
} else { } else {
@@ -200,8 +202,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcDnsLogs(Page page, NtcDnsLog ntcDnsLog, Model model, HttpServletRequest request, public Map<String, ?> ntcDnsLogs(Page page, NtcDnsLog ntcDnsLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcDnsLog> ntcDnsLogPage = null; Page<NtcDnsLog> ntcDnsLogPage = null;
try { try {
@@ -217,10 +219,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcDnsLog, String sql = HiveSqlService.getSql(page, ntcDnsLog,
getTableName(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"), getTableName(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"), getCol2Col(), orderBy,
getCol2Col(), orderBy, null); null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDnsLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDnsLog.class,
"recvTime"); "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcDnsLogPage.setList(new ArrayList()); ntcDnsLogPage.setList(new ArrayList());
} else { } else {
@@ -260,8 +262,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcMailLogs(Page page, NtcMailLog ntcMailLog, Model model, HttpServletRequest request, public Map<String, ?> ntcMailLogs(Page page, NtcMailLog ntcMailLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcMailLog> ntcMailLogPage = null; Page<NtcMailLog> ntcMailLogPage = null;
try { try {
@@ -277,10 +279,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcMailLog, String sql = HiveSqlService.getSql(page, ntcMailLog,
getTableName(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"), getTableName(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"), getCol2Col(), orderBy,
getCol2Col(), orderBy, null); null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcMailLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
"recvTime"); NtcMailLog.class, "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcMailLogPage.setList(new ArrayList()); ntcMailLogPage.setList(new ArrayList());
} else { } else {
@@ -320,8 +322,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcSslLogs(Page page, NtcSslLog ntcSslLog, Model model, HttpServletRequest request, public Map<String, ?> ntcSslLogs(Page page, NtcSslLog ntcSslLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcSslLog> ntcSslLogPage = null; Page<NtcSslLog> ntcSslLogPage = null;
try { try {
@@ -337,10 +339,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcSslLog, String sql = HiveSqlService.getSql(page, ntcSslLog,
getTableName(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"), getTableName(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"), getCol2Col(), orderBy,
getCol2Col(), orderBy, null); null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSslLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSslLog.class,
"recvTime"); "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcSslLogPage.setList(new ArrayList()); ntcSslLogPage.setList(new ArrayList());
} else { } else {
@@ -380,8 +382,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcPptpLogs(Page page, NtcPptpLog ntcPptpLog, Model model, HttpServletRequest request, public Map<String, ?> ntcPptpLogs(Page page, NtcPptpLog ntcPptpLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcPptpLog> ntcPptpLogPage = null; Page<NtcPptpLog> ntcPptpLogPage = null;
try { try {
@@ -397,10 +399,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcPptpLog, String sql = HiveSqlService.getSql(page, ntcPptpLog,
getTableName(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"), getTableName(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"), getCol2Col(), orderBy,
getCol2Col(), orderBy, null); null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcPptpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
"recvTime"); NtcPptpLog.class, "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcPptpLogPage.setList(new ArrayList()); ntcPptpLogPage.setList(new ArrayList());
} else { } else {
@@ -439,8 +441,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcL2tpLogs(Page page, NtcL2tpLog ntcL2tpLog, Model model, HttpServletRequest request, public Map<String, ?> ntcL2tpLogs(Page page, NtcL2tpLog ntcL2tpLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcL2tpLog> ntcL2tpLogPage = null; Page<NtcL2tpLog> ntcL2tpLogPage = null;
try { try {
@@ -456,10 +458,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcL2tpLog, String sql = HiveSqlService.getSql(page, ntcL2tpLog,
getTableName(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"), getTableName(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"), getCol2Col(), orderBy,
getCol2Col(), orderBy, null); null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcL2tpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
"recvTime"); NtcL2tpLog.class, "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcL2tpLogPage.setList(new ArrayList()); ntcL2tpLogPage.setList(new ArrayList());
} else { } else {
@@ -499,8 +501,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcOpenvpnLogs(Page page, NtcOpenvpnLog ntcOpenvpnLog, Model model, public Map<String, ?> ntcOpenvpnLogs(Page page, NtcOpenvpnLog ntcOpenvpnLog, Model model,
HttpServletRequest request, HttpServletResponse response) { HttpServletRequest request, HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcOpenvpnLog> ntcOpenvpnLogPage = null; Page<NtcOpenvpnLog> ntcOpenvpnLogPage = null;
try { try {
@@ -516,10 +518,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcOpenvpnLog, String sql = HiveSqlService.getSql(page, ntcOpenvpnLog,
getTableName(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"), getTableName(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"), getCol2Col(),
getCol2Col(), orderBy, null); orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcOpenvpnLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
"recvTime"); NtcOpenvpnLog.class, "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcOpenvpnLogPage.setList(new ArrayList()); ntcOpenvpnLogPage.setList(new ArrayList());
} else { } else {
@@ -559,8 +561,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcIpsecLogs(Page page, NtcIpsecLog ntcIpsecLog, Model model, HttpServletRequest request, public Map<String, ?> ntcIpsecLogs(Page page, NtcIpsecLog ntcIpsecLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcIpsecLog> ntcIpsecLogPage = null; Page<NtcIpsecLog> ntcIpsecLogPage = null;
try { try {
@@ -576,10 +578,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcIpsecLog, String sql = HiveSqlService.getSql(page, ntcIpsecLog,
getTableName(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"), getTableName(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"), getCol2Col(),
getCol2Col(), orderBy, null); orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcIpsecLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
"recvTime"); NtcIpsecLog.class, "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcIpsecLogPage.setList(new ArrayList()); ntcIpsecLogPage.setList(new ArrayList());
} else { } else {
@@ -619,8 +621,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcSshLogs(Page page, NtcSshLog ntcSshLog, Model model, HttpServletRequest request, public Map<String, ?> ntcSshLogs(Page page, NtcSshLog ntcSshLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcSshLog> ntcSshLogPage = null; Page<NtcSshLog> ntcSshLogPage = null;
try { try {
@@ -636,10 +638,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcSshLog, String sql = HiveSqlService.getSql(page, ntcSshLog,
getTableName(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"), getTableName(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"), getCol2Col(), orderBy,
getCol2Col(), orderBy, null); null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSshLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSshLog.class,
"recvTime"); "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcSshLogPage.setList(new ArrayList()); ntcSshLogPage.setList(new ArrayList());
} else { } else {
@@ -679,8 +681,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcFtpLogs(Page page, NtcFtpLog ntcFtpLog, Model model, HttpServletRequest request, public Map<String, ?> ntcFtpLogs(Page page, NtcFtpLog ntcFtpLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcFtpLog> ntcFtpLogPage = null; Page<NtcFtpLog> ntcFtpLogPage = null;
try { try {
@@ -696,10 +698,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcFtpLog, String sql = HiveSqlService.getSql(page, ntcFtpLog,
getTableName(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"), getTableName(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"), getCol2Col(), orderBy,
getCol2Col(), orderBy, null); null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcFtpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcFtpLog.class,
"recvTime"); "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcFtpLogPage.setList(new ArrayList()); ntcFtpLogPage.setList(new ArrayList());
} else { } else {
@@ -732,13 +734,14 @@ public class NtcLogSearchController extends BaseRestController {
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "FTP日志检索成功", return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "FTP日志检索成功",
ntcFtpLogPage, 0); ntcFtpLogPage, 0);
} }
@RequestMapping(value = "/ntcAppLogs", method = RequestMethod.GET) @RequestMapping(value = "/ntcAppLogs", method = RequestMethod.GET)
@ApiOperation(value = "App日志查询", httpMethod = "GET", notes = "对应配置为“App管理”存储动作为阻断与监测的命中日志。对日志功能“APP策略日志”提供数据基础查询服务") @ApiOperation(value = "App日志查询", httpMethod = "GET", notes = "对应配置为“App管理”存储动作为阻断与监测的命中日志。对日志功能“APP策略日志”提供数据基础查询服务")
public Map<String, ?> ntcAppLogs(Page page, NtcAppLog ntcAppLog, Model model, HttpServletRequest request, public Map<String, ?> ntcAppLogs(Page page, NtcAppLog ntcAppLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcAppLog> ntcAppLogPage = null; Page<NtcAppLog> ntcAppLogPage = null;
try { try {
@@ -754,10 +757,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcAppLog, String sql = HiveSqlService.getSql(page, ntcAppLog,
getTableName(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"), getTableName(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"), getCol2Col(), orderBy,
getCol2Col(), orderBy, null); null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcAppLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcAppLog.class,
"recvTime"); "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcAppLogPage.setList(new ArrayList()); ntcAppLogPage.setList(new ArrayList());
} else { } else {
@@ -790,13 +793,14 @@ public class NtcLogSearchController extends BaseRestController {
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "App日志检索成功", return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "App日志检索成功",
ntcAppLogPage, 0); ntcAppLogPage, 0);
} }
@RequestMapping(value = "/ntcDdosLogs", method = RequestMethod.GET) @RequestMapping(value = "/ntcDdosLogs", method = RequestMethod.GET)
@ApiOperation(value = "DDos日志查询", httpMethod = "GET", notes = "对应配置为“DDOS日志监控”存储动作为丢弃的命中日志。对日志功能“DDOS日志监控”提供数据基础查询服务") @ApiOperation(value = "DDos日志查询", httpMethod = "GET", notes = "对应配置为“DDOS日志监控”存储动作为丢弃的命中日志。对日志功能“DDOS日志监控”提供数据基础查询服务")
public Map<String, ?> ntcDdosLogs(Page page, NtcDdosLog ntcDdosLog, Model model, HttpServletRequest request, public Map<String, ?> ntcDdosLogs(Page page, NtcDdosLog ntcDdosLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcDdosLog> ntcDdosLogPage = null; Page<NtcDdosLog> ntcDdosLogPage = null;
try { try {
@@ -812,10 +816,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcDdosLog, String sql = HiveSqlService.getSql(page, ntcDdosLog,
getTableName(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"), getTableName(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"), getCol2Col(), orderBy,
getCol2Col(), orderBy, null); null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDdosLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
"recvTime"); NtcDdosLog.class, "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcDdosLogPage.setList(new ArrayList()); ntcDdosLogPage.setList(new ArrayList());
} else { } else {
@@ -854,8 +858,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcP2pLogs(Page page, NtcP2pLog ntcP2pLog, Model model, HttpServletRequest request, public Map<String, ?> ntcP2pLogs(Page page, NtcP2pLog ntcP2pLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcP2pLog> ntcP2pLogPage = null; Page<NtcP2pLog> ntcP2pLogPage = null;
try { try {
@@ -872,11 +876,11 @@ public class NtcLogSearchController extends BaseRestController {
} }
String sql = HiveSqlService.getSql(page, ntcP2pLog, String sql = HiveSqlService.getSql(page, ntcP2pLog,
getTableName(NtcP2pLog.class.getSimpleName() + "HiveTable", "ntc_p2p_log"), getTableName(NtcP2pLog.class.getSimpleName() + "HiveTable", "ntc_p2p_log"), getCol2Col(), orderBy,
getCol2Col(), orderBy, null); null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcP2pLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcP2pLog.class,
"recvTime"); "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcP2pLogPage.setList(new ArrayList()); ntcP2pLogPage.setList(new ArrayList());
} else { } else {
@@ -915,8 +919,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcBgpLogs(Page page, NtcBgpLog ntcBgpLog, Model model, HttpServletRequest request, public Map<String, ?> ntcBgpLogs(Page page, NtcBgpLog ntcBgpLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcBgpLog> ntcBgpLogPage = null; Page<NtcBgpLog> ntcBgpLogPage = null;
try { try {
@@ -933,11 +937,11 @@ public class NtcLogSearchController extends BaseRestController {
} }
String sql = HiveSqlService.getSql(page, ntcBgpLog, String sql = HiveSqlService.getSql(page, ntcBgpLog,
getTableName(NtcBgpLog.class.getSimpleName() + "HiveTable", "ntc_bgp_log"), getTableName(NtcBgpLog.class.getSimpleName() + "HiveTable", "ntc_bgp_log"), getCol2Col(), orderBy,
getCol2Col(), orderBy, null); null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcBgpLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcBgpLog.class,
"recvTime"); "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
ntcBgpLogPage.setList(new ArrayList()); ntcBgpLogPage.setList(new ArrayList());
} else { } else {
@@ -976,8 +980,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcVoipLogs(Page page, NtcVoipLog ntcVoipLog, Model model, HttpServletRequest request, public Map<String, ?> ntcVoipLogs(Page page, NtcVoipLog ntcVoipLog, Model model, HttpServletRequest request,
HttpServletResponse response) { HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcVoipLog> logPage = null; Page<NtcVoipLog> logPage = null;
try { try {
@@ -993,10 +997,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time"; orderBy = "found_Time";
} }
String sql = HiveSqlService.getSql(page, ntcVoipLog, String sql = HiveSqlService.getSql(page, ntcVoipLog,
getTableName(NtcVoipLog.class.getSimpleName() + "HiveTable", "NTC_VOIP_LOG"), getTableName(NtcVoipLog.class.getSimpleName() + "HiveTable", "NTC_VOIP_LOG"), getCol2Col(), orderBy,
getCol2Col(), orderBy, null); null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcVoipLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
"recvTime"); NtcVoipLog.class, "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
} else { } else {
@@ -1027,16 +1031,17 @@ public class NtcLogSearchController extends BaseRestController {
@RequestMapping(value = "/ntcStreamMediaLogs", method = RequestMethod.GET) @RequestMapping(value = "/ntcStreamMediaLogs", method = RequestMethod.GET)
@ApiOperation(value = "流媒体协议日志查询", httpMethod = "GET", notes = "对日志功能“流媒体协议日志”提供数据基础查询服务") @ApiOperation(value = "流媒体协议日志查询", httpMethod = "GET", notes = "对日志功能“流媒体协议日志”提供数据基础查询服务")
public Map<String, ?> ntcStreamMediaLogs(Page page, NtcStreamingMediaLog ntcStreamMediaLog, Model model, HttpServletRequest request, public Map<String, ?> ntcStreamMediaLogs(Page page, NtcStreamingMediaLog ntcStreamMediaLog, Model model,
HttpServletResponse response) { HttpServletRequest request, HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcStreamingMediaLog> logPage = null; Page<NtcStreamingMediaLog> logPage = null;
try { try {
resetTime(ntcStreamMediaLog); resetTime(ntcStreamMediaLog);
ntcLogService.queryConditionCheck(auditLogThread, start, ntcStreamMediaLog, NtcStreamingMediaLog.class, page); ntcLogService.queryConditionCheck(auditLogThread, start, ntcStreamMediaLog, NtcStreamingMediaLog.class,
page);
logPage = new Page<NtcStreamingMediaLog>(); logPage = new Page<NtcStreamingMediaLog>();
logPage.setPageNo(page.getPageNo()); logPage.setPageNo(page.getPageNo());
logPage.setPageSize(page.getPageSize()); logPage.setPageSize(page.getPageSize());
@@ -1049,16 +1054,16 @@ public class NtcLogSearchController extends BaseRestController {
String sql = HiveSqlService.getSql(page, ntcStreamMediaLog, String sql = HiveSqlService.getSql(page, ntcStreamMediaLog,
getTableName(NtcStreamingMediaLog.class.getSimpleName() + "HiveTable", "NTC_STREAMING_MEDIA_LOG"), getTableName(NtcStreamingMediaLog.class.getSimpleName() + "HiveTable", "NTC_STREAMING_MEDIA_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcStreamingMediaLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
"recvTime"); NtcStreamingMediaLog.class, "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
} else { } else {
List list = tableMapping.get("obj"); List list = tableMapping.get("obj");
if (list.size() > 0) { if (list.size() > 0) {
String jsonString = JsonMapper.toJsonString(list); String jsonString = JsonMapper.toJsonString(list);
List<NtcStreamingMediaLog> List = (java.util.List<NtcStreamingMediaLog>) JsonMapper.fromJsonList(jsonString, List<NtcStreamingMediaLog> List = (java.util.List<NtcStreamingMediaLog>) JsonMapper
NtcStreamingMediaLog.class); .fromJsonList(jsonString, NtcStreamingMediaLog.class);
logPage.setList(List); logPage.setList(List);
logPage.setCount(List.size()); logPage.setCount(List.size());
@@ -1075,17 +1080,17 @@ public class NtcLogSearchController extends BaseRestController {
} }
throw ((RestServiceException) e); throw ((RestServiceException) e);
} }
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "流媒体协议日志检索成功", return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "流媒体协议日志检索成功", logPage,
logPage, 0); 0);
} }
@RequestMapping(value = "/ntcKeywordsUrlLogs", method = RequestMethod.GET) @RequestMapping(value = "/ntcKeywordsUrlLogs", method = RequestMethod.GET)
@ApiOperation(value = "NTC关键字转URL日志查询", httpMethod = "GET", notes = "对日志功能“NTC关键字转URL日志”提供数据基础查询服务") @ApiOperation(value = "NTC关键字转URL日志查询", httpMethod = "GET", notes = "对日志功能“NTC关键字转URL日志”提供数据基础查询服务")
public Map<String, ?> ntcKeywordsUrlLogs(Page page, NtcKeywordsUrlLog ntcKeywordsUrlLog, Model model, HttpServletRequest request, public Map<String, ?> ntcKeywordsUrlLogs(Page page, NtcKeywordsUrlLog ntcKeywordsUrlLog, Model model,
HttpServletResponse response) { HttpServletRequest request, HttpServletResponse response) {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
request, null); null);
Page<NtcKeywordsUrlLog> logPage = null; Page<NtcKeywordsUrlLog> logPage = null;
try { try {
@@ -1103,19 +1108,23 @@ public class NtcLogSearchController extends BaseRestController {
String sql = HiveSqlService.getSql(page, ntcKeywordsUrlLog, String sql = HiveSqlService.getSql(page, ntcKeywordsUrlLog,
getTableName(NtcKeywordsUrlLog.class.getSimpleName() + "HiveTable", "NTC_KEYWORDS_URL_LOG"), getTableName(NtcKeywordsUrlLog.class.getSimpleName() + "HiveTable", "NTC_KEYWORDS_URL_LOG"),
getCol2Col(), orderBy, null); getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcKeywordsUrlLog.class, "foundTime", Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
"recvTime"); NtcKeywordsUrlLog.class, "foundTime", "recvTime");
if (tableMapping == null) { if (tableMapping == null) {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
} else { } else {
List list = tableMapping.get("obj"); List list = tableMapping.get("obj");
if (list.size() > 0) { if (list.size() > 0) {
String jsonString = JsonMapper.toJsonString(list); String jsonString = JsonMapper.toJsonString(list);
List<NtcKeywordsUrlLog> List = (java.util.List<NtcKeywordsUrlLog>) JsonMapper.fromJsonList(jsonString, List<NtcKeywordsUrlLog> List = (java.util.List<NtcKeywordsUrlLog>) JsonMapper
NtcKeywordsUrlLog.class); .fromJsonList(jsonString, NtcKeywordsUrlLog.class);
logPage.setList(List); logPage.setList(List);
logPage.setCount(List.size()); Long hivePageCount = HiveSqlService.getLogCount(ntcKeywordsUrlLog,
getTableName(NtcKeywordsUrlLog.class.getSimpleName() + "HiveTable", "NTC_KEYWORDS_URL_LOG"),
getCol2Col());
logPage.setLast(getLastPageNum(hivePageCount.intValue(), page.getPageSize()));
logPage.setCount(hivePageCount);
} else { } else {
logPage.setList(new ArrayList()); logPage.setList(new ArrayList());
} }
@@ -1135,11 +1144,13 @@ public class NtcLogSearchController extends BaseRestController {
/** /**
* 判断开始和结束时间是否为null,如果为null则初始化时间 * 判断开始和结束时间是否为null,如果为null则初始化时间
*
* @param entity * @param entity
* @throws Exception * @throws Exception
*/ */
public void resetTime(LogEntity<?> entity) throws Exception { public void resetTime(LogEntity<?> entity) throws Exception {
if (StringUtil.isEmpty(entity.getSearchFoundStartTime())&& StringUtil.isEmpty(entity.getSearchFoundEndTime())) { if (StringUtil.isEmpty(entity.getSearchFoundStartTime())
&& StringUtil.isEmpty(entity.getSearchFoundEndTime())) {
Map<String, String> map = DateUtils.getLocalTime(entity.getSearchFoundStartTime(), Map<String, String> map = DateUtils.getLocalTime(entity.getSearchFoundStartTime(),
entity.getSearchFoundEndTime(), Constants.LOG_LOCAL_TIME, "minute"); entity.getSearchFoundEndTime(), Constants.LOG_LOCAL_TIME, "minute");
entity.setSearchFoundStartTime(map.get("startTime")); entity.setSearchFoundStartTime(map.get("startTime"));
@@ -1149,6 +1160,7 @@ public class NtcLogSearchController extends BaseRestController {
/** /**
* 将searchFoundStartTime,searchFoundEndTime与foundTime进行关联 * 将searchFoundStartTime,searchFoundEndTime与foundTime进行关联
*
* @return * @return
*/ */
public Map<String, Map<String, String>> getCol2Col() { public Map<String, Map<String, String>> getCol2Col() {

View File

@@ -50,6 +50,15 @@ public class HiveSqlService {
public static String getSql(Page page, Object bean, String tableName, Map<String, Map<String, String>> col2col, public static String getSql(Page page, Object bean, String tableName, Map<String, Map<String, String>> col2col,
String orderBy, String searchActiveSys) throws Exception { String orderBy, String searchActiveSys) throws Exception {
if (Constants.ISUSECLICKHOUSE) {
return getSqlByClickHouse(page, bean, tableName, col2col, orderBy, searchActiveSys);
} else {
return getSqlByHive(page, bean, tableName, col2col, orderBy, searchActiveSys);
}
}
public static String getSqlByClickHouse(Page page, Object bean, String tableName,
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
tableName = tableName.toLowerCase(); tableName = tableName.toLowerCase();
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields()); String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
StringBuffer sql = new StringBuffer(); StringBuffer sql = new StringBuffer();
@@ -73,9 +82,107 @@ public class HiveSqlService {
if (!StringUtil.isEmpty(bean)) { if (!StringUtil.isEmpty(bean)) {
Class<?> clazz = bean.getClass(); Class<?> clazz = bean.getClass();
Map<String, String> filedsType = null; Map<String, String> filedsType = null;
if (Constants.ISUSECLICKHOUSE) {
filedsType = getFiledsType(bean); filedsType = getFiledsType(bean);
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
Field[] fields = clazz.getDeclaredFields();
for (int i = 0; i < fields.length; i++) {
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
Field f = fields[i];
String key = f.getName();// 获取字段名
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
Object value = getFieldValue(bean, key);
if (!StringUtil.isEmpty(value)) {
setFieldValue(bean, key, value.toString().trim());
if (key.endsWith("Time")) {// 日期开始或结束的字段
if (col2col.containsKey(key)) {
Long partition = Long.parseLong(sdf2.format(sdf.parse(value.toString().trim())));
value = sdf.parse(value.toString().trim()).getTime() / 1000;
if (col2col.get(key).get("start") != null) {
whereSB.append(" and "
+ filedAndColumnMap.get(col2col.get(key).get("start")).toLowerCase()
+ ">=" + value);
} else {
whereSB.append(" and "
+ filedAndColumnMap.get(col2col.get(key).get("end")).toLowerCase() + "<"
+ value);
} }
}
} else {
if (key.toLowerCase().startsWith("search")) {
key = key.replace("search", "");
key = key.substring(0, 1).toLowerCase() + key.substring(1);
}
// clickhouse写法
String type = filedsType.get(key).trim();
if (type.equals("java.lang.String")) {
String field = filedAndColumnMap.get(key).toLowerCase();
if (field.equals("url")) {
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
} else {
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
}
} else if (type.equals("java.lang.Integer") || type.equals("int")
|| type.equals("java.lang.Long") || type.equals("long")) {
whereSB.append(" and " + filedAndColumnMap.get(key).toLowerCase() + "="
+ value.toString().trim());
}
}
}
}
}
}
}
if (whereSB.length() > 0) {
int indexOf = whereSB.indexOf("and") + "and".length();
sql.append(" where " + whereSB.substring(indexOf));
}
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
if (orderBy.toLowerCase().contains("asc") || orderBy.toLowerCase().contains("desc")) {
sql.append(" order by " + orderBy.toLowerCase());
} else {
sql.append(" order by " + orderBy.toLowerCase() + " desc");
}
sql.append(" limit " + startNum + "," + page.getPageSize());// clickhouse的分页与mysql相同
return sql.toString();
}
public static String getSqlByHive(Page page, Object bean, String tableName,
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
tableName = tableName.toLowerCase();
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
StringBuffer sql = new StringBuffer();
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
if (null == showColmun || showColmun.equals("")) {
for (String key : filedAndColumnMap.keySet()) {
if (!filedAndColumnMap.get(key).toLowerCase().equals("id")) {
sql.append(filedAndColumnMap.get(key) + ",");
}
}
} else {
sql.append(showColmun);
}
String sqlTrim = sql.toString().trim();
if (sqlTrim.endsWith(",")) {
sqlTrim = sqlTrim.substring(0, sqlTrim.length() - 1);
}
if (orderBy.toLowerCase().contains("asc") || orderBy.toLowerCase().contains("desc")) {
orderBy = " order by " + orderBy;
} else {
orderBy = " order by " + orderBy + " desc ";
}
sql.setLength(0);
sql.append(" select " + sqlTrim.toLowerCase() + " from (select " + sqlTrim.toLowerCase()
+ ",row_number() over(partition by found_time_partition " + orderBy + ") as row_num from "
+ tableName.toLowerCase() + " ");
StringBuffer whereSB = new StringBuffer();
if (!StringUtil.isEmpty(bean)) {
Class<?> clazz = bean.getClass();
Map<String, String> filedsType = null;
for (; clazz != Object.class; clazz = clazz.getSuperclass()) { for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
// 获取所有的字段包括public,private,protected,private // 获取所有的字段包括public,private,protected,private
// Field[] fields = bean.getClass().getDeclaredFields(); // Field[] fields = bean.getClass().getDeclaredFields();
@@ -126,12 +233,11 @@ public class HiveSqlService {
key = key.replace("search", ""); key = key.replace("search", "");
key = key.substring(0, 1).toLowerCase() + key.substring(1); key = key.substring(0, 1).toLowerCase() + key.substring(1);
} }
if (!Constants.ISUSECLICKHOUSE) {// hive写法
if (typeName.equals("java.lang.String")) { if (typeName.equals("java.lang.String")) {
String field = filedAndColumnMap.get(key); String field = filedAndColumnMap.get(key);
if (field.equals("url")) { if (field.equals("url")) {
whereSB.append( whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
" and " + field + " like '" + value.toString().trim() + "%'");
} else { } else {
whereSB.append(" and " + field + "='" + value.toString().trim() + "'"); whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
} }
@@ -140,31 +246,15 @@ public class HiveSqlService {
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim()); " and " + filedAndColumnMap.get(key) + "=" + value.toString().trim());
} else if (typeName.equals("java.lang.Long") || typeName.equals("long")) { } else if (typeName.equals("java.lang.Long") || typeName.equals("long")) {
whereSB.append(" and " + filedAndColumnMap.get(key) + "="
+ value.toString().trim() + "L");
}
} else {// clickhouse写法
String type = filedsType.get(key).trim();
if (type.equals("java.lang.String")) {
String field = filedAndColumnMap.get(key).toLowerCase();
if (field.equals("url")) {
whereSB.append( whereSB.append(
" and " + field + " like '" + value.toString().trim() + "%'"); " and " + filedAndColumnMap.get(key) + "=" + value.toString().trim() + "L");
} else {
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
}
} else if (type.equals("java.lang.Integer") || type.equals("int")
|| type.equals("java.lang.Long") || type.equals("long")) {
whereSB.append(" and " + filedAndColumnMap.get(key).toLowerCase() + "="
+ value.toString().trim());
}
} }
} }
} }
} }
} }
if (!Constants.ISUSECLICKHOUSE) {// hive需要加这个字段
if (null != foundTimePartStart) { if (null != foundTimePartStart) {
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L"); // sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
whereSB.append(" and found_time_partition>=" + foundTimePartStart); whereSB.append(" and found_time_partition>=" + foundTimePartStart);
@@ -173,7 +263,6 @@ public class HiveSqlService {
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L"); // sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
whereSB.append(" and found_time_partition<=" + foundTimePartEnd); whereSB.append(" and found_time_partition<=" + foundTimePartEnd);
} }
}
} }
} }
@@ -182,32 +271,101 @@ public class HiveSqlService {
int indexOf = whereSB.indexOf("and") + "and".length(); int indexOf = whereSB.indexOf("and") + "and".length();
sql.append(" where " + whereSB.substring(indexOf)); sql.append(" where " + whereSB.substring(indexOf));
} }
if (Constants.ISUSECLICKHOUSE) { Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
// Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1; Integer endNum = startNum - 1 + page.getPageSize();
Integer startNum = (page.getPageNo() - 1) * page.getPageSize(); sql.append(" ) t where row_Num between " + startNum + " and " + endNum);
// Integer endNum = startNum - 1 + page.getPageSize();
if (orderBy.toLowerCase().contains("asc") || orderBy.toLowerCase().contains("desc")) {
sql.append(" order by " + orderBy.toLowerCase());
} else {
sql.append(" order by " + orderBy.toLowerCase() + " desc");
}
sql.append(" limit " + startNum + "," + page.getPageSize());// clickhouse的分页与mysql相同
} else {
// sql.append(" order by " + orderBy + " limit 10000) t1) t2 where
// row_Num between " + startNum + " and " + endNum);
sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM);
logger.info("获取数据中心日志sql==================={}", sql); logger.info("获取数据中心日志sql==================={}", sql);
}
return sql.toString(); return sql.toString();
} }
public static Long getHivePageCount(Object bean, String countKey, String tableName, public static Long getLogCount(Object bean, String tableName, Map<String, Map<String, String>> col2col) throws Exception {
Map<String, Map<String, String>> col2col, String searchActiveSys) throws Exception { if (Constants.ISUSECLICKHOUSE) {
return getLogCountFromClickHouse(bean, tableName, col2col);
} else {
return getLogCountFromHive(bean, tableName, col2col);
}
}
public static Long getLogCountFromClickHouse(Object bean, String tableName,
Map<String, Map<String, String>> col2col) throws Exception {
tableName = tableName.toLowerCase(); tableName = tableName.toLowerCase();
StringBuffer sql = new StringBuffer(); StringBuffer sql = new StringBuffer();
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass()); Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
sql.append("select count(1) from " + tableName + " where 1=1 "); sql.append("select count(1) from " + tableName + " ");
StringBuffer whereSB = new StringBuffer();
if (!StringUtil.isEmpty(bean)) {
Class<?> clazz = bean.getClass();
Map<String, String> filedsType = null;
filedsType = getFiledsType(bean);
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
Field[] fields = clazz.getDeclaredFields();
for (int i = 0; i < fields.length; i++) {
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
Field f = fields[i];
String key = f.getName();// 获取字段名
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
Object value = getFieldValue(bean, key);
if (!StringUtil.isEmpty(value)) {
setFieldValue(bean, key, value.toString().trim());
if (key.endsWith("Time")) {// 日期开始或结束的字段
if (col2col.containsKey(key)) {
Long partition = Long.parseLong(sdf2.format(sdf.parse(value.toString().trim())));
value = sdf.parse(value.toString().trim()).getTime() / 1000;
if (col2col.get(key).get("start") != null) {
whereSB.append(" and "
+ filedAndColumnMap.get(col2col.get(key).get("start")).toLowerCase()
+ ">=" + value);
} else {
whereSB.append(" and "
+ filedAndColumnMap.get(col2col.get(key).get("end")).toLowerCase() + "<"
+ value);
}
}
} else {
if (key.toLowerCase().startsWith("search")) {
key = key.replace("search", "");
key = key.substring(0, 1).toLowerCase() + key.substring(1);
}
// clickhouse写法
String type = filedsType.get(key).trim();
if (type.equals("java.lang.String")) {
String field = filedAndColumnMap.get(key).toLowerCase();
if (field.equals("url")) {
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
} else {
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
}
} else if (type.equals("java.lang.Integer") || type.equals("int")
|| type.equals("java.lang.Long") || type.equals("long")) {
whereSB.append(" and " + filedAndColumnMap.get(key).toLowerCase() + "="
+ value.toString().trim());
}
}
}
}
}
}
}
if (whereSB.length() > 0) {
int indexOf = whereSB.indexOf("and") + "and".length();
sql.append(" where " + whereSB.substring(indexOf));
}
logger.info("获取数据中心日志总条数sql==================" + sql.toString());
Long count = new LogJDBCByDruid().getCount(sql.toString());
return count;
}
public static Long getLogCountFromHive(Object bean, String tableName, Map<String, Map<String, String>> col2col)
throws Exception {
tableName = tableName.toLowerCase();
StringBuffer sql = new StringBuffer();
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
sql.append("select count(1) from " + tableName + " ");
StringBuffer whereSB = new StringBuffer();
if (bean != null) { if (bean != null) {
Class<?> clazz = bean.getClass(); Class<?> clazz = bean.getClass();
for (; clazz != Object.class; clazz = clazz.getSuperclass()) { for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
@@ -227,12 +385,13 @@ public class HiveSqlService {
setFieldValue(bean, key, value.toString().trim()); setFieldValue(bean, key, value.toString().trim());
if (key.endsWith("Time")) {// 日期开始或结束的字段 if (key.endsWith("Time")) {// 日期开始或结束的字段
if (col2col.containsKey(key)) { if (col2col.containsKey(key)) {
Long partition = Long.parseLong(sdf2.format(sdf.parse(value.toString().trim())));
value = sdf.parse(value.toString().trim()).getTime() / 1000; value = sdf.parse(value.toString().trim()).getTime() / 1000;
if (key.toLowerCase().equals("searchfoundstarttime")) { if (key.toLowerCase().equals("searchfoundstarttime")) {
foundTimePartStart = Long.parseLong(value.toString()) / 3600L / 24L; foundTimePartStart = partition;
} }
if (key.toLowerCase().equals("searchfoundendtime")) { if (key.toLowerCase().equals("searchfoundendtime")) {
foundTimePartEnd = Long.parseLong(value.toString()) / 3600L / 24L; foundTimePartEnd = partition;
} }
if (col2col.get(key).get("start") != null) { if (col2col.get(key).get("start") != null) {
// sql.append(" and " + // sql.append(" and " +
@@ -240,15 +399,17 @@ public class HiveSqlService {
// + ">=to_date('" + // + ">=to_date('" +
// value.toString().trim() // value.toString().trim()
// + "','yyyy-mm-dd HH24:mi:ss')"); // + "','yyyy-mm-dd HH24:mi:ss')");
sql.append(" and " + filedAndColumnMap.get(col2col.get(key).get("start")) + ">=" whereSB.append(" and "
+ value); + filedAndColumnMap.get(col2col.get(key).get("start")).toLowerCase()
+ ">=" + value);
} else { } else {
// sql.append(" and " + // sql.append(" and " +
// filedAndColumnMap.get(col2col.get(key).get("end")) // filedAndColumnMap.get(col2col.get(key).get("end"))
// + "<=to_date('" + // + "<=to_date('" +
// value.toString().trim() // value.toString().trim()
// + "','yyyy-mm-dd HH24:mi:ss')"); // + "','yyyy-mm-dd HH24:mi:ss')");
sql.append(" and " + filedAndColumnMap.get(col2col.get(key).get("end")) + "<" whereSB.append(" and "
+ filedAndColumnMap.get(col2col.get(key).get("end")).toLowerCase() + "<"
+ value); + value);
} }
} }
@@ -259,15 +420,21 @@ public class HiveSqlService {
} }
if (typeName.equals("java.lang.String")) { if (typeName.equals("java.lang.String")) {
sql.append(" and " + filedAndColumnMap.get(key) + "='" + value.toString().trim() String field = filedAndColumnMap.get(key);
+ "'"); if (field.equals("url")) {
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
} else {
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
}
} else if (typeName.equals("java.lang.Integer") || typeName.equals("int")) { } else if (typeName.equals("java.lang.Integer") || typeName.equals("int")) {
sql.append(" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim()); whereSB.append(
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim());
} else if (typeName.equals("java.lang.Long") || typeName.equals("long")) { } else if (typeName.equals("java.lang.Long") || typeName.equals("long")) {
sql.append( whereSB.append(
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim() + "L"); " and " + filedAndColumnMap.get(key) + "=" + value.toString().trim() + "L");
} }
} }
} }
@@ -275,33 +442,21 @@ public class HiveSqlService {
} }
if (null != foundTimePartStart) { if (null != foundTimePartStart) {
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L"); // sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
sql.append(" and found_time_partition>=" + foundTimePartStart); whereSB.append(" and found_time_partition>=" + foundTimePartStart);
} }
if (null != foundTimePartEnd) { if (null != foundTimePartEnd) {
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L"); // sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
sql.append(" and found_time_partition<" + foundTimePartEnd); whereSB.append(" and found_time_partition<=" + foundTimePartEnd);
} }
} }
} }
if (whereSB.length() > 0) {
int indexOf = whereSB.indexOf("and") + "and".length();
sql.append(" where " + whereSB.substring(indexOf));
}
logger.info("获取数据中心日志总条数sql==================" + sql.toString()); logger.info("获取数据中心日志总条数sql==================" + sql.toString());
// ResultSet countRs = HiveJDBC.query(countSql.toString()); Long count = new LogJDBCByDruid().getCount(sql.toString());
ResultSet countRs = new LogJDBCByDruid().query(sql.toString());
String countStr = null;
while (countRs.next()) {
countStr = countRs.getObject(1).toString();
break;
}
if (countStr == null || countStr.equals("")) {
logger.info("获取数据中心日志总条数成功总共===================0条配置");
return 0l;
}
Long count = Long.valueOf(countStr);
logger.info("获取数据中心日志总条数成功总共===================" + count + "条配置");
// HiveJDBC.closeConn();
if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
new SaveRedisThread(countKey, count, Constants.HIVE_EXPIRE).start();
}
return count; return count;
} }