1:为日志从hive查询添加分页和排序

2:为日志查询添加总页数和总条数功能
This commit is contained in:
renkaige
2018-10-30 11:13:45 +08:00
parent c0325878e5
commit d144de73b5
5 changed files with 528 additions and 313 deletions

View File

@@ -641,6 +641,15 @@ public class Page<T> {
public int getMaxResults(){
return getPageSize();
}
public void setLast(int last) {
this.last = last;
}
/**
* @Title: getFiledsSql
* @Description: 将fields的属性名称替换为字段名称

View File

@@ -29,9 +29,16 @@ import com.nis.web.service.SpringContextHolder;
/**
*
* <p>Title: HiveJDBCByDruid</p>
* <p>Description: 使用druid连接池对hive进行查询并解析结果</p>
* <p>Company: IIE</p>
* <p>
* Title: HiveJDBCByDruid
* </p>
* <p>
* Description: 使用druid连接池对hive进行查询并解析结果
* </p>
* <p>
* Company: IIE
* </p>
*
* @author rkg
* @date 2018年8月20日
*
@@ -169,6 +176,7 @@ public class LogJDBCByDruid {
/**
* 反射获取类中date类型的字段名称
*
* @param type
* @return
* @throws Exception
@@ -240,6 +248,7 @@ public class LogJDBCByDruid {
return map;
}
public ResultSet query(String sql) throws Exception {
conn = getConnection();
logger.info("连接数据中心日志库成功--------------------------");
@@ -255,4 +264,25 @@ public class LogJDBCByDruid {
}
public long getCount(String sql) throws Exception {
try {
conn = getConnection();
st = conn.createStatement();
rs = st.executeQuery(sql);
String countStr = null;
while (rs.next()) {
countStr = rs.getObject(1).toString();
break;
}
if (countStr == null || countStr.trim().equals("")) {
logger.info("获取数据中心日志总条数成功总共===================0条配置");
return 0l;
} else {
return Long.valueOf(countStr);
}
} finally {
closeConn();
}
}
}

View File

@@ -140,6 +140,7 @@ public class BaseRestController {
/**
* 不将日志插入到数据库中,只返回到前台
*
* @param time
* @param request
* @param response
@@ -223,6 +224,7 @@ public class BaseRestController {
/**
* 日志结果响应格式规范
*
* @param auditLogThread
* @param executedTime
* @param request
@@ -408,4 +410,11 @@ public class BaseRestController {
return Configurations.getStringProperty(key, defaultTableName);
}
protected int getLastPageNum(int totalCount, int pageSize) {
int pageNum = totalCount / pageSize;
if (totalCount % pageSize > 0) {
pageNum++;
}
return pageNum;
}
}

View File

@@ -16,6 +16,7 @@ import org.springframework.web.bind.annotation.RestController;
import com.nis.domain.LogEntity;
import com.nis.domain.Page;
import com.nis.domain.restful.DkBehaviorLog;
import com.nis.domain.restful.NtcAppLog;
import com.nis.domain.restful.NtcBgpLog;
import com.nis.domain.restful.NtcDdosLog;
@@ -70,8 +71,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcIpLogs(Page page, NtcIpLog ntcIpLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcIpLog> ntcIpLogPage = null;
try {
resetTime(ntcIpLog);
@@ -88,13 +89,14 @@ public class NtcLogSearchController extends BaseRestController {
// ResultSet rs = HiveSqlService.getResultSet(page, ntcIpLog,
// getTableName(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
// getCol2Col(), orderBy, null);
// Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs, NtcIpLog.class, "foundTime",
// Map<String, List> tableMapping = HiveJDBC.tableMapping(page, null, rs,
// NtcIpLog.class, "foundTime",
// "recvTime");
String sql = HiveSqlService.getSql(page, ntcIpLog,
getTableName(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null,sql , NtcIpLog.class, "foundTime",
"recvTime");
getTableName(NtcIpLog.class.getSimpleName() + "HiveTable", "ntc_ip_log"), getCol2Col(), orderBy,
null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcIpLog.class,
"foundTime", "recvTime");
if (tableMapping == null) {
ntcIpLogPage.setList(new ArrayList<NtcIpLog>());
} else {
@@ -140,8 +142,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcHttpLogs(Page page, NtcHttpLog ntcHttpLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcHttpLog> ntcHttpLogPage = null;
try {
@@ -157,10 +159,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcHttpLog,
getTableName(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcHttpLog.class, "foundTime",
"recvTime");
getTableName(NtcHttpLog.class.getSimpleName() + "HiveTable", "ntc_http_log"), getCol2Col(), orderBy,
null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
NtcHttpLog.class, "foundTime", "recvTime");
if (tableMapping == null) {
ntcHttpLogPage.setList(new ArrayList());
} else {
@@ -200,8 +202,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcDnsLogs(Page page, NtcDnsLog ntcDnsLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcDnsLog> ntcDnsLogPage = null;
try {
@@ -217,10 +219,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcDnsLog,
getTableName(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDnsLog.class, "foundTime",
"recvTime");
getTableName(NtcDnsLog.class.getSimpleName() + "HiveTable", "ntc_dns_log"), getCol2Col(), orderBy,
null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDnsLog.class,
"foundTime", "recvTime");
if (tableMapping == null) {
ntcDnsLogPage.setList(new ArrayList());
} else {
@@ -260,8 +262,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcMailLogs(Page page, NtcMailLog ntcMailLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcMailLog> ntcMailLogPage = null;
try {
@@ -277,10 +279,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcMailLog,
getTableName(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcMailLog.class, "foundTime",
"recvTime");
getTableName(NtcMailLog.class.getSimpleName() + "HiveTable", "ntc_mail_log"), getCol2Col(), orderBy,
null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
NtcMailLog.class, "foundTime", "recvTime");
if (tableMapping == null) {
ntcMailLogPage.setList(new ArrayList());
} else {
@@ -320,8 +322,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcSslLogs(Page page, NtcSslLog ntcSslLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcSslLog> ntcSslLogPage = null;
try {
@@ -337,10 +339,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcSslLog,
getTableName(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSslLog.class, "foundTime",
"recvTime");
getTableName(NtcSslLog.class.getSimpleName() + "HiveTable", "ntc_ssl_log"), getCol2Col(), orderBy,
null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSslLog.class,
"foundTime", "recvTime");
if (tableMapping == null) {
ntcSslLogPage.setList(new ArrayList());
} else {
@@ -380,8 +382,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcPptpLogs(Page page, NtcPptpLog ntcPptpLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcPptpLog> ntcPptpLogPage = null;
try {
@@ -397,10 +399,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcPptpLog,
getTableName(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcPptpLog.class, "foundTime",
"recvTime");
getTableName(NtcPptpLog.class.getSimpleName() + "HiveTable", "ntc_pptp_log"), getCol2Col(), orderBy,
null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
NtcPptpLog.class, "foundTime", "recvTime");
if (tableMapping == null) {
ntcPptpLogPage.setList(new ArrayList());
} else {
@@ -439,8 +441,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcL2tpLogs(Page page, NtcL2tpLog ntcL2tpLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcL2tpLog> ntcL2tpLogPage = null;
try {
@@ -456,10 +458,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcL2tpLog,
getTableName(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcL2tpLog.class, "foundTime",
"recvTime");
getTableName(NtcL2tpLog.class.getSimpleName() + "HiveTable", "ntc_l2tp_log"), getCol2Col(), orderBy,
null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
NtcL2tpLog.class, "foundTime", "recvTime");
if (tableMapping == null) {
ntcL2tpLogPage.setList(new ArrayList());
} else {
@@ -499,8 +501,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcOpenvpnLogs(Page page, NtcOpenvpnLog ntcOpenvpnLog, Model model,
HttpServletRequest request, HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcOpenvpnLog> ntcOpenvpnLogPage = null;
try {
@@ -516,10 +518,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcOpenvpnLog,
getTableName(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcOpenvpnLog.class, "foundTime",
"recvTime");
getTableName(NtcOpenvpnLog.class.getSimpleName() + "HiveTable", "ntc_openvpn_log"), getCol2Col(),
orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
NtcOpenvpnLog.class, "foundTime", "recvTime");
if (tableMapping == null) {
ntcOpenvpnLogPage.setList(new ArrayList());
} else {
@@ -559,8 +561,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcIpsecLogs(Page page, NtcIpsecLog ntcIpsecLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcIpsecLog> ntcIpsecLogPage = null;
try {
@@ -576,10 +578,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcIpsecLog,
getTableName(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcIpsecLog.class, "foundTime",
"recvTime");
getTableName(NtcIpsecLog.class.getSimpleName() + "HiveTable", "ntc_ipsec_log"), getCol2Col(),
orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
NtcIpsecLog.class, "foundTime", "recvTime");
if (tableMapping == null) {
ntcIpsecLogPage.setList(new ArrayList());
} else {
@@ -619,8 +621,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcSshLogs(Page page, NtcSshLog ntcSshLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcSshLog> ntcSshLogPage = null;
try {
@@ -636,10 +638,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcSshLog,
getTableName(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSshLog.class, "foundTime",
"recvTime");
getTableName(NtcSshLog.class.getSimpleName() + "HiveTable", "ntc_ssh_log"), getCol2Col(), orderBy,
null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcSshLog.class,
"foundTime", "recvTime");
if (tableMapping == null) {
ntcSshLogPage.setList(new ArrayList());
} else {
@@ -679,8 +681,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcFtpLogs(Page page, NtcFtpLog ntcFtpLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcFtpLog> ntcFtpLogPage = null;
try {
@@ -696,10 +698,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcFtpLog,
getTableName(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcFtpLog.class, "foundTime",
"recvTime");
getTableName(NtcFtpLog.class.getSimpleName() + "HiveTable", "ntc_ftp_log"), getCol2Col(), orderBy,
null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcFtpLog.class,
"foundTime", "recvTime");
if (tableMapping == null) {
ntcFtpLogPage.setList(new ArrayList());
} else {
@@ -732,13 +734,14 @@ public class NtcLogSearchController extends BaseRestController {
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "FTP日志检索成功",
ntcFtpLogPage, 0);
}
@RequestMapping(value = "/ntcAppLogs", method = RequestMethod.GET)
@ApiOperation(value = "App日志查询", httpMethod = "GET", notes = "对应配置为“App管理”存储动作为阻断与监测的命中日志。对日志功能“APP策略日志”提供数据基础查询服务")
public Map<String, ?> ntcAppLogs(Page page, NtcAppLog ntcAppLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcAppLog> ntcAppLogPage = null;
try {
@@ -754,10 +757,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcAppLog,
getTableName(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcAppLog.class, "foundTime",
"recvTime");
getTableName(NtcAppLog.class.getSimpleName() + "HiveTable", "ntc_app_log"), getCol2Col(), orderBy,
null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcAppLog.class,
"foundTime", "recvTime");
if (tableMapping == null) {
ntcAppLogPage.setList(new ArrayList());
} else {
@@ -790,13 +793,14 @@ public class NtcLogSearchController extends BaseRestController {
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "App日志检索成功",
ntcAppLogPage, 0);
}
@RequestMapping(value = "/ntcDdosLogs", method = RequestMethod.GET)
@ApiOperation(value = "DDos日志查询", httpMethod = "GET", notes = "对应配置为“DDOS日志监控”存储动作为丢弃的命中日志。对日志功能“DDOS日志监控”提供数据基础查询服务")
public Map<String, ?> ntcDdosLogs(Page page, NtcDdosLog ntcDdosLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcDdosLog> ntcDdosLogPage = null;
try {
@@ -812,10 +816,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcDdosLog,
getTableName(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcDdosLog.class, "foundTime",
"recvTime");
getTableName(NtcDdosLog.class.getSimpleName() + "HiveTable", "ntc_ddos_log"), getCol2Col(), orderBy,
null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
NtcDdosLog.class, "foundTime", "recvTime");
if (tableMapping == null) {
ntcDdosLogPage.setList(new ArrayList());
} else {
@@ -854,8 +858,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcP2pLogs(Page page, NtcP2pLog ntcP2pLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcP2pLog> ntcP2pLogPage = null;
try {
@@ -872,11 +876,11 @@ public class NtcLogSearchController extends BaseRestController {
}
String sql = HiveSqlService.getSql(page, ntcP2pLog,
getTableName(NtcP2pLog.class.getSimpleName() + "HiveTable", "ntc_p2p_log"),
getCol2Col(), orderBy, null);
getTableName(NtcP2pLog.class.getSimpleName() + "HiveTable", "ntc_p2p_log"), getCol2Col(), orderBy,
null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcP2pLog.class, "foundTime",
"recvTime");
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcP2pLog.class,
"foundTime", "recvTime");
if (tableMapping == null) {
ntcP2pLogPage.setList(new ArrayList());
} else {
@@ -915,8 +919,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcBgpLogs(Page page, NtcBgpLog ntcBgpLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcBgpLog> ntcBgpLogPage = null;
try {
@@ -933,11 +937,11 @@ public class NtcLogSearchController extends BaseRestController {
}
String sql = HiveSqlService.getSql(page, ntcBgpLog,
getTableName(NtcBgpLog.class.getSimpleName() + "HiveTable", "ntc_bgp_log"),
getCol2Col(), orderBy, null);
getTableName(NtcBgpLog.class.getSimpleName() + "HiveTable", "ntc_bgp_log"), getCol2Col(), orderBy,
null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcBgpLog.class, "foundTime",
"recvTime");
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcBgpLog.class,
"foundTime", "recvTime");
if (tableMapping == null) {
ntcBgpLogPage.setList(new ArrayList());
} else {
@@ -976,8 +980,8 @@ public class NtcLogSearchController extends BaseRestController {
public Map<String, ?> ntcVoipLogs(Page page, NtcVoipLog ntcVoipLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcVoipLog> logPage = null;
try {
@@ -993,10 +997,10 @@ public class NtcLogSearchController extends BaseRestController {
orderBy = "found_Time";
}
String sql = HiveSqlService.getSql(page, ntcVoipLog,
getTableName(NtcVoipLog.class.getSimpleName() + "HiveTable", "NTC_VOIP_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcVoipLog.class, "foundTime",
"recvTime");
getTableName(NtcVoipLog.class.getSimpleName() + "HiveTable", "NTC_VOIP_LOG"), getCol2Col(), orderBy,
null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
NtcVoipLog.class, "foundTime", "recvTime");
if (tableMapping == null) {
logPage.setList(new ArrayList());
} else {
@@ -1027,16 +1031,17 @@ public class NtcLogSearchController extends BaseRestController {
@RequestMapping(value = "/ntcStreamMediaLogs", method = RequestMethod.GET)
@ApiOperation(value = "流媒体协议日志查询", httpMethod = "GET", notes = "对日志功能“流媒体协议日志”提供数据基础查询服务")
public Map<String, ?> ntcStreamMediaLogs(Page page, NtcStreamingMediaLog ntcStreamMediaLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
public Map<String, ?> ntcStreamMediaLogs(Page page, NtcStreamingMediaLog ntcStreamMediaLog, Model model,
HttpServletRequest request, HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcStreamingMediaLog> logPage = null;
try {
resetTime(ntcStreamMediaLog);
ntcLogService.queryConditionCheck(auditLogThread, start, ntcStreamMediaLog, NtcStreamingMediaLog.class, page);
ntcLogService.queryConditionCheck(auditLogThread, start, ntcStreamMediaLog, NtcStreamingMediaLog.class,
page);
logPage = new Page<NtcStreamingMediaLog>();
logPage.setPageNo(page.getPageNo());
logPage.setPageSize(page.getPageSize());
@@ -1049,16 +1054,16 @@ public class NtcLogSearchController extends BaseRestController {
String sql = HiveSqlService.getSql(page, ntcStreamMediaLog,
getTableName(NtcStreamingMediaLog.class.getSimpleName() + "HiveTable", "NTC_STREAMING_MEDIA_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcStreamingMediaLog.class, "foundTime",
"recvTime");
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
NtcStreamingMediaLog.class, "foundTime", "recvTime");
if (tableMapping == null) {
logPage.setList(new ArrayList());
} else {
List list = tableMapping.get("obj");
if (list.size() > 0) {
String jsonString = JsonMapper.toJsonString(list);
List<NtcStreamingMediaLog> List = (java.util.List<NtcStreamingMediaLog>) JsonMapper.fromJsonList(jsonString,
NtcStreamingMediaLog.class);
List<NtcStreamingMediaLog> List = (java.util.List<NtcStreamingMediaLog>) JsonMapper
.fromJsonList(jsonString, NtcStreamingMediaLog.class);
logPage.setList(List);
logPage.setCount(List.size());
@@ -1075,17 +1080,17 @@ public class NtcLogSearchController extends BaseRestController {
}
throw ((RestServiceException) e);
}
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "流媒体协议日志检索成功",
logPage, 0);
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "流媒体协议日志检索成功", logPage,
0);
}
@RequestMapping(value = "/ntcKeywordsUrlLogs", method = RequestMethod.GET)
@ApiOperation(value = "NTC关键字转URL日志查询", httpMethod = "GET", notes = "对日志功能“NTC关键字转URL日志”提供数据基础查询服务")
public Map<String, ?> ntcKeywordsUrlLogs(Page page, NtcKeywordsUrlLog ntcKeywordsUrlLog, Model model, HttpServletRequest request,
HttpServletResponse response) {
public Map<String, ?> ntcKeywordsUrlLogs(Page page, NtcKeywordsUrlLog ntcKeywordsUrlLog, Model model,
HttpServletRequest request, HttpServletResponse response) {
long start = System.currentTimeMillis();
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
request, null);
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
null);
Page<NtcKeywordsUrlLog> logPage = null;
try {
@@ -1103,19 +1108,23 @@ public class NtcLogSearchController extends BaseRestController {
String sql = HiveSqlService.getSql(page, ntcKeywordsUrlLog,
getTableName(NtcKeywordsUrlLog.class.getSimpleName() + "HiveTable", "NTC_KEYWORDS_URL_LOG"),
getCol2Col(), orderBy, null);
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, NtcKeywordsUrlLog.class, "foundTime",
"recvTime");
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql,
NtcKeywordsUrlLog.class, "foundTime", "recvTime");
if (tableMapping == null) {
logPage.setList(new ArrayList());
} else {
List list = tableMapping.get("obj");
if (list.size() > 0) {
String jsonString = JsonMapper.toJsonString(list);
List<NtcKeywordsUrlLog> List = (java.util.List<NtcKeywordsUrlLog>) JsonMapper.fromJsonList(jsonString,
NtcKeywordsUrlLog.class);
List<NtcKeywordsUrlLog> List = (java.util.List<NtcKeywordsUrlLog>) JsonMapper
.fromJsonList(jsonString, NtcKeywordsUrlLog.class);
logPage.setList(List);
logPage.setCount(List.size());
Long hivePageCount = HiveSqlService.getLogCount(ntcKeywordsUrlLog,
getTableName(NtcKeywordsUrlLog.class.getSimpleName() + "HiveTable", "NTC_KEYWORDS_URL_LOG"),
getCol2Col());
logPage.setLast(getLastPageNum(hivePageCount.intValue(), page.getPageSize()));
logPage.setCount(hivePageCount);
} else {
logPage.setList(new ArrayList());
}
@@ -1135,11 +1144,13 @@ public class NtcLogSearchController extends BaseRestController {
/**
* 判断开始和结束时间是否为null,如果为null则初始化时间
*
* @param entity
* @throws Exception
*/
public void resetTime(LogEntity<?> entity) throws Exception {
if (StringUtil.isEmpty(entity.getSearchFoundStartTime())&& StringUtil.isEmpty(entity.getSearchFoundEndTime())) {
if (StringUtil.isEmpty(entity.getSearchFoundStartTime())
&& StringUtil.isEmpty(entity.getSearchFoundEndTime())) {
Map<String, String> map = DateUtils.getLocalTime(entity.getSearchFoundStartTime(),
entity.getSearchFoundEndTime(), Constants.LOG_LOCAL_TIME, "minute");
entity.setSearchFoundStartTime(map.get("startTime"));
@@ -1149,6 +1160,7 @@ public class NtcLogSearchController extends BaseRestController {
/**
* 将searchFoundStartTime,searchFoundEndTime与foundTime进行关联
*
* @return
*/
public Map<String, Map<String, String>> getCol2Col() {

View File

@@ -50,6 +50,15 @@ public class HiveSqlService {
public static String getSql(Page page, Object bean, String tableName, Map<String, Map<String, String>> col2col,
String orderBy, String searchActiveSys) throws Exception {
if (Constants.ISUSECLICKHOUSE) {
return getSqlByClickHouse(page, bean, tableName, col2col, orderBy, searchActiveSys);
} else {
return getSqlByHive(page, bean, tableName, col2col, orderBy, searchActiveSys);
}
}
public static String getSqlByClickHouse(Page page, Object bean, String tableName,
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
tableName = tableName.toLowerCase();
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
StringBuffer sql = new StringBuffer();
@@ -73,9 +82,107 @@ public class HiveSqlService {
if (!StringUtil.isEmpty(bean)) {
Class<?> clazz = bean.getClass();
Map<String, String> filedsType = null;
if (Constants.ISUSECLICKHOUSE) {
filedsType = getFiledsType(bean);
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
Field[] fields = clazz.getDeclaredFields();
for (int i = 0; i < fields.length; i++) {
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
Field f = fields[i];
String key = f.getName();// 获取字段名
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
Object value = getFieldValue(bean, key);
if (!StringUtil.isEmpty(value)) {
setFieldValue(bean, key, value.toString().trim());
if (key.endsWith("Time")) {// 日期开始或结束的字段
if (col2col.containsKey(key)) {
Long partition = Long.parseLong(sdf2.format(sdf.parse(value.toString().trim())));
value = sdf.parse(value.toString().trim()).getTime() / 1000;
if (col2col.get(key).get("start") != null) {
whereSB.append(" and "
+ filedAndColumnMap.get(col2col.get(key).get("start")).toLowerCase()
+ ">=" + value);
} else {
whereSB.append(" and "
+ filedAndColumnMap.get(col2col.get(key).get("end")).toLowerCase() + "<"
+ value);
}
}
} else {
if (key.toLowerCase().startsWith("search")) {
key = key.replace("search", "");
key = key.substring(0, 1).toLowerCase() + key.substring(1);
}
// clickhouse写法
String type = filedsType.get(key).trim();
if (type.equals("java.lang.String")) {
String field = filedAndColumnMap.get(key).toLowerCase();
if (field.equals("url")) {
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
} else {
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
}
} else if (type.equals("java.lang.Integer") || type.equals("int")
|| type.equals("java.lang.Long") || type.equals("long")) {
whereSB.append(" and " + filedAndColumnMap.get(key).toLowerCase() + "="
+ value.toString().trim());
}
}
}
}
}
}
}
if (whereSB.length() > 0) {
int indexOf = whereSB.indexOf("and") + "and".length();
sql.append(" where " + whereSB.substring(indexOf));
}
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
if (orderBy.toLowerCase().contains("asc") || orderBy.toLowerCase().contains("desc")) {
sql.append(" order by " + orderBy.toLowerCase());
} else {
sql.append(" order by " + orderBy.toLowerCase() + " desc");
}
sql.append(" limit " + startNum + "," + page.getPageSize());// clickhouse的分页与mysql相同
return sql.toString();
}
public static String getSqlByHive(Page page, Object bean, String tableName,
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
tableName = tableName.toLowerCase();
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
StringBuffer sql = new StringBuffer();
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
if (null == showColmun || showColmun.equals("")) {
for (String key : filedAndColumnMap.keySet()) {
if (!filedAndColumnMap.get(key).toLowerCase().equals("id")) {
sql.append(filedAndColumnMap.get(key) + ",");
}
}
} else {
sql.append(showColmun);
}
String sqlTrim = sql.toString().trim();
if (sqlTrim.endsWith(",")) {
sqlTrim = sqlTrim.substring(0, sqlTrim.length() - 1);
}
if (orderBy.toLowerCase().contains("asc") || orderBy.toLowerCase().contains("desc")) {
orderBy = " order by " + orderBy;
} else {
orderBy = " order by " + orderBy + " desc ";
}
sql.setLength(0);
sql.append(" select " + sqlTrim.toLowerCase() + " from (select " + sqlTrim.toLowerCase()
+ ",row_number() over(partition by found_time_partition " + orderBy + ") as row_num from "
+ tableName.toLowerCase() + " ");
StringBuffer whereSB = new StringBuffer();
if (!StringUtil.isEmpty(bean)) {
Class<?> clazz = bean.getClass();
Map<String, String> filedsType = null;
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
// 获取所有的字段包括public,private,protected,private
// Field[] fields = bean.getClass().getDeclaredFields();
@@ -126,12 +233,11 @@ public class HiveSqlService {
key = key.replace("search", "");
key = key.substring(0, 1).toLowerCase() + key.substring(1);
}
if (!Constants.ISUSECLICKHOUSE) {// hive写法
if (typeName.equals("java.lang.String")) {
String field = filedAndColumnMap.get(key);
if (field.equals("url")) {
whereSB.append(
" and " + field + " like '" + value.toString().trim() + "%'");
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
} else {
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
}
@@ -140,31 +246,15 @@ public class HiveSqlService {
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim());
} else if (typeName.equals("java.lang.Long") || typeName.equals("long")) {
whereSB.append(" and " + filedAndColumnMap.get(key) + "="
+ value.toString().trim() + "L");
}
} else {// clickhouse写法
String type = filedsType.get(key).trim();
if (type.equals("java.lang.String")) {
String field = filedAndColumnMap.get(key).toLowerCase();
if (field.equals("url")) {
whereSB.append(
" and " + field + " like '" + value.toString().trim() + "%'");
} else {
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
}
} else if (type.equals("java.lang.Integer") || type.equals("int")
|| type.equals("java.lang.Long") || type.equals("long")) {
whereSB.append(" and " + filedAndColumnMap.get(key).toLowerCase() + "="
+ value.toString().trim());
}
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim() + "L");
}
}
}
}
}
if (!Constants.ISUSECLICKHOUSE) {// hive需要加这个字段
if (null != foundTimePartStart) {
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
whereSB.append(" and found_time_partition>=" + foundTimePartStart);
@@ -173,7 +263,6 @@ public class HiveSqlService {
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
whereSB.append(" and found_time_partition<=" + foundTimePartEnd);
}
}
}
}
@@ -182,32 +271,101 @@ public class HiveSqlService {
int indexOf = whereSB.indexOf("and") + "and".length();
sql.append(" where " + whereSB.substring(indexOf));
}
if (Constants.ISUSECLICKHOUSE) {
// Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
// Integer endNum = startNum - 1 + page.getPageSize();
if (orderBy.toLowerCase().contains("asc") || orderBy.toLowerCase().contains("desc")) {
sql.append(" order by " + orderBy.toLowerCase());
} else {
sql.append(" order by " + orderBy.toLowerCase() + " desc");
}
sql.append(" limit " + startNum + "," + page.getPageSize());// clickhouse的分页与mysql相同
} else {
// sql.append(" order by " + orderBy + " limit 10000) t1) t2 where
// row_Num between " + startNum + " and " + endNum);
sql.append(" limit " + Constants.EVERY_GETHIVEDATANUM);
Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
Integer endNum = startNum - 1 + page.getPageSize();
sql.append(" ) t where row_Num between " + startNum + " and " + endNum);
logger.info("获取数据中心日志sql==================={}", sql);
}
return sql.toString();
}
public static Long getHivePageCount(Object bean, String countKey, String tableName,
Map<String, Map<String, String>> col2col, String searchActiveSys) throws Exception {
public static Long getLogCount(Object bean, String tableName, Map<String, Map<String, String>> col2col) throws Exception {
if (Constants.ISUSECLICKHOUSE) {
return getLogCountFromClickHouse(bean, tableName, col2col);
} else {
return getLogCountFromHive(bean, tableName, col2col);
}
}
public static Long getLogCountFromClickHouse(Object bean, String tableName,
Map<String, Map<String, String>> col2col) throws Exception {
tableName = tableName.toLowerCase();
StringBuffer sql = new StringBuffer();
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
sql.append("select count(1) from " + tableName + " where 1=1 ");
sql.append("select count(1) from " + tableName + " ");
StringBuffer whereSB = new StringBuffer();
if (!StringUtil.isEmpty(bean)) {
Class<?> clazz = bean.getClass();
Map<String, String> filedsType = null;
filedsType = getFiledsType(bean);
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
Field[] fields = clazz.getDeclaredFields();
for (int i = 0; i < fields.length; i++) {
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
Field f = fields[i];
String key = f.getName();// 获取字段名
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
Object value = getFieldValue(bean, key);
if (!StringUtil.isEmpty(value)) {
setFieldValue(bean, key, value.toString().trim());
if (key.endsWith("Time")) {// 日期开始或结束的字段
if (col2col.containsKey(key)) {
Long partition = Long.parseLong(sdf2.format(sdf.parse(value.toString().trim())));
value = sdf.parse(value.toString().trim()).getTime() / 1000;
if (col2col.get(key).get("start") != null) {
whereSB.append(" and "
+ filedAndColumnMap.get(col2col.get(key).get("start")).toLowerCase()
+ ">=" + value);
} else {
whereSB.append(" and "
+ filedAndColumnMap.get(col2col.get(key).get("end")).toLowerCase() + "<"
+ value);
}
}
} else {
if (key.toLowerCase().startsWith("search")) {
key = key.replace("search", "");
key = key.substring(0, 1).toLowerCase() + key.substring(1);
}
// clickhouse写法
String type = filedsType.get(key).trim();
if (type.equals("java.lang.String")) {
String field = filedAndColumnMap.get(key).toLowerCase();
if (field.equals("url")) {
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
} else {
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
}
} else if (type.equals("java.lang.Integer") || type.equals("int")
|| type.equals("java.lang.Long") || type.equals("long")) {
whereSB.append(" and " + filedAndColumnMap.get(key).toLowerCase() + "="
+ value.toString().trim());
}
}
}
}
}
}
}
if (whereSB.length() > 0) {
int indexOf = whereSB.indexOf("and") + "and".length();
sql.append(" where " + whereSB.substring(indexOf));
}
logger.info("获取数据中心日志总条数sql==================" + sql.toString());
Long count = new LogJDBCByDruid().getCount(sql.toString());
return count;
}
public static Long getLogCountFromHive(Object bean, String tableName, Map<String, Map<String, String>> col2col)
throws Exception {
tableName = tableName.toLowerCase();
StringBuffer sql = new StringBuffer();
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
sql.append("select count(1) from " + tableName + " ");
StringBuffer whereSB = new StringBuffer();
if (bean != null) {
Class<?> clazz = bean.getClass();
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
@@ -227,12 +385,13 @@ public class HiveSqlService {
setFieldValue(bean, key, value.toString().trim());
if (key.endsWith("Time")) {// 日期开始或结束的字段
if (col2col.containsKey(key)) {
Long partition = Long.parseLong(sdf2.format(sdf.parse(value.toString().trim())));
value = sdf.parse(value.toString().trim()).getTime() / 1000;
if (key.toLowerCase().equals("searchfoundstarttime")) {
foundTimePartStart = Long.parseLong(value.toString()) / 3600L / 24L;
foundTimePartStart = partition;
}
if (key.toLowerCase().equals("searchfoundendtime")) {
foundTimePartEnd = Long.parseLong(value.toString()) / 3600L / 24L;
foundTimePartEnd = partition;
}
if (col2col.get(key).get("start") != null) {
// sql.append(" and " +
@@ -240,15 +399,17 @@ public class HiveSqlService {
// + ">=to_date('" +
// value.toString().trim()
// + "','yyyy-mm-dd HH24:mi:ss')");
sql.append(" and " + filedAndColumnMap.get(col2col.get(key).get("start")) + ">="
+ value);
whereSB.append(" and "
+ filedAndColumnMap.get(col2col.get(key).get("start")).toLowerCase()
+ ">=" + value);
} else {
// sql.append(" and " +
// filedAndColumnMap.get(col2col.get(key).get("end"))
// + "<=to_date('" +
// value.toString().trim()
// + "','yyyy-mm-dd HH24:mi:ss')");
sql.append(" and " + filedAndColumnMap.get(col2col.get(key).get("end")) + "<"
whereSB.append(" and "
+ filedAndColumnMap.get(col2col.get(key).get("end")).toLowerCase() + "<"
+ value);
}
}
@@ -259,15 +420,21 @@ public class HiveSqlService {
}
if (typeName.equals("java.lang.String")) {
sql.append(" and " + filedAndColumnMap.get(key) + "='" + value.toString().trim()
+ "'");
String field = filedAndColumnMap.get(key);
if (field.equals("url")) {
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
} else {
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
}
} else if (typeName.equals("java.lang.Integer") || typeName.equals("int")) {
sql.append(" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim());
whereSB.append(
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim());
} else if (typeName.equals("java.lang.Long") || typeName.equals("long")) {
sql.append(
whereSB.append(
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim() + "L");
}
}
}
@@ -275,33 +442,21 @@ public class HiveSqlService {
}
if (null != foundTimePartStart) {
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
sql.append(" and found_time_partition>=" + foundTimePartStart);
whereSB.append(" and found_time_partition>=" + foundTimePartStart);
}
if (null != foundTimePartEnd) {
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
sql.append(" and found_time_partition<" + foundTimePartEnd);
whereSB.append(" and found_time_partition<=" + foundTimePartEnd);
}
}
}
if (whereSB.length() > 0) {
int indexOf = whereSB.indexOf("and") + "and".length();
sql.append(" where " + whereSB.substring(indexOf));
}
logger.info("获取数据中心日志总条数sql==================" + sql.toString());
// ResultSet countRs = HiveJDBC.query(countSql.toString());
ResultSet countRs = new LogJDBCByDruid().query(sql.toString());
String countStr = null;
while (countRs.next()) {
countStr = countRs.getObject(1).toString();
break;
}
if (countStr == null || countStr.equals("")) {
logger.info("获取数据中心日志总条数成功总共===================0条配置");
return 0l;
}
Long count = Long.valueOf(countStr);
logger.info("获取数据中心日志总条数成功总共===================" + count + "条配置");
// HiveJDBC.closeConn();
if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
new SaveRedisThread(countKey, count, Constants.HIVE_EXPIRE).start();
}
Long count = new LogJDBCByDruid().getCount(sql.toString());
return count;
}