1:优化日志查询代码,删除冗余代码,优化查询组合,方便后期扩展及日志整体修改
This commit is contained in:
@@ -15,7 +15,6 @@ import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
import org.apache.ibatis.mapping.ResultMap;
|
||||
import org.apache.ibatis.mapping.ResultMapping;
|
||||
import org.apache.ibatis.session.SqlSessionFactory;
|
||||
@@ -23,9 +22,9 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.alibaba.druid.pool.DruidDataSource;
|
||||
import com.jolbox.bonecp.BoneCPDataSource;
|
||||
import com.nis.domain.Page;
|
||||
import com.nis.web.service.SpringContextHolder;
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -62,25 +61,17 @@ public class LogJDBCByDruid {
|
||||
}
|
||||
|
||||
/**
|
||||
* 将结果利用反射映射成对象集合
|
||||
* 根据sql从数据中心中获取日志并set到list中
|
||||
*
|
||||
* @param rs
|
||||
* resultSet
|
||||
* @param page
|
||||
* @param sql
|
||||
* @param entityClass
|
||||
* 实体类
|
||||
* @param obj
|
||||
* 那些字段需要转换为date类型(由于数据中心表结构中没有date类型数据,其日期用long型表示,界面中需要显示yyyy-MM-dd
|
||||
* hh:mm:ss形式,所以需要将long转换为date)
|
||||
* @return
|
||||
* @throws Exception
|
||||
*/
|
||||
public Map<String, List<Object>> tableMapping(Page<?> page, String redisKey, String sql, Class<?> entityClass,
|
||||
Object... obj) throws Exception {
|
||||
Map<String, List<Object>> mapList = new HashMap<String, List<Object>>();
|
||||
public <T> void getTableData(Page<T> page, String sql, Class<?> entityClass) throws Exception {
|
||||
List<T> listObject = new ArrayList<T>();
|
||||
try {
|
||||
Map<String, String> filedAndColumnMap = getColumn2FiledMap(entityClass);
|
||||
List<Object> listString = new ArrayList<Object>();
|
||||
List<Object> listObject = new ArrayList<Object>();
|
||||
// 不从Object... obj中获取需要date类型的字段了,调用的时候容易漏写,改为反射获取date类型的字段
|
||||
List<String> columnList = getDateColumn(entityClass);
|
||||
conn = getConnection();
|
||||
@@ -100,8 +91,6 @@ public class LogJDBCByDruid {
|
||||
long time = 0L;
|
||||
time = Long.parseLong(value.toString());
|
||||
map.put(filedName, time == 0L ? null : new Date(time * 1000));
|
||||
// map.put(filedName, new
|
||||
// Date(Long.parseLong("1476583810000")));
|
||||
} else {
|
||||
map.put(filedName, value);
|
||||
}
|
||||
@@ -109,48 +98,16 @@ public class LogJDBCByDruid {
|
||||
map.put(filedName, null);
|
||||
}
|
||||
}
|
||||
listString.add(JsonMapper.toJsonString(map2Obj(map, entityClass)));
|
||||
listObject.add(map2Obj(map, entityClass));
|
||||
listObject.add((T) map2Obj(map, entityClass));
|
||||
}
|
||||
if (null == listString || listString.size() == 0 || null == listObject || listObject.size() == 0) {
|
||||
return null;
|
||||
if (null == listObject || listObject.size() == 0) {
|
||||
page.setList(new ArrayList());
|
||||
} else {
|
||||
// 暂时没有往缓存写的操作
|
||||
// if (Constants.IS_OPEN_REDIS && Constants.DATACENTER_OPEN_REDIS) {
|
||||
// new SaveRedisListThread(redisKey, listString, Constants.HIVE_EXPIRE).start();
|
||||
// }
|
||||
}
|
||||
if (Constants.ISUSECLICKHOUSE) {// sql查询时已经分页了
|
||||
mapList.put("str", listString);
|
||||
mapList.put("obj", listObject);
|
||||
} else {
|
||||
// sublist包前不包后,0-30实际获取的是0-29的数据
|
||||
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
|
||||
Integer endNum = startNum - 1 + page.getPageSize() + 1;
|
||||
if (listString.size() >= startNum) {
|
||||
if (listString.size() >= endNum) {
|
||||
mapList.put("str", listString.subList(startNum, endNum));
|
||||
} else {
|
||||
mapList.put("str", listString.subList(startNum, listString.size()));
|
||||
}
|
||||
|
||||
} else {
|
||||
mapList.put("str", new ArrayList<Object>());
|
||||
}
|
||||
if (listObject.size() >= startNum) {
|
||||
if (listObject.size() >= endNum) {
|
||||
mapList.put("obj", listObject.subList(startNum, endNum));
|
||||
} else {
|
||||
mapList.put("obj", listObject.subList(startNum, listObject.size()));
|
||||
}
|
||||
} else {
|
||||
mapList.put("obj", new ArrayList<Object>());
|
||||
}
|
||||
page.setList(listObject);
|
||||
}
|
||||
} finally {
|
||||
closeConn();
|
||||
}
|
||||
return mapList;
|
||||
}
|
||||
|
||||
public void closeConn() {
|
||||
@@ -249,22 +206,7 @@ public class LogJDBCByDruid {
|
||||
|
||||
}
|
||||
|
||||
public ResultSet query(String sql) throws Exception {
|
||||
conn = getConnection();
|
||||
logger.info("连接数据中心日志库成功--------------------------");
|
||||
st = conn.createStatement();
|
||||
// logger.info("开始选择{}数据库--------------------------", Constants.HIVEDBNAME);
|
||||
// String hiveAName = "use " + Constants.HIVEDBNAME;
|
||||
// st.execute(hiveAName);
|
||||
// logger.info("选择数据库{}成功,开始执行查询", Constants.HIVEDBNAME);
|
||||
// logger.info("选择数据库{}成功,开始执行查询", Constants.HIVEDBNAME);
|
||||
rs = st.executeQuery(sql);
|
||||
logger.info("执行查询语句成功sql={}", sql);
|
||||
return rs;
|
||||
|
||||
}
|
||||
|
||||
public long getCount(String sql) throws Exception {
|
||||
public <T> void getCount(Page<T> page, String sql) throws Exception {
|
||||
try {
|
||||
conn = getConnection();
|
||||
st = conn.createStatement();
|
||||
@@ -276,13 +218,24 @@ public class LogJDBCByDruid {
|
||||
}
|
||||
if (countStr == null || countStr.trim().equals("")) {
|
||||
logger.info("获取数据中心日志总条数成功总共===================0条配置");
|
||||
return 0l;
|
||||
page.setCount(0l);
|
||||
page.setLast(1);
|
||||
} else {
|
||||
return Long.valueOf(countStr);
|
||||
Long count = Long.valueOf(countStr);
|
||||
page.setCount(count);
|
||||
page.setLast(getLastPageNum(count.intValue(), page.getPageSize()));
|
||||
}
|
||||
} finally {
|
||||
closeConn();
|
||||
}
|
||||
}
|
||||
|
||||
private int getLastPageNum(int totalCount, int pageSize) {
|
||||
int pageNum = totalCount / pageSize;
|
||||
if (totalCount % pageSize > 0) {
|
||||
pageNum++;
|
||||
}
|
||||
return pageNum;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -24,16 +24,17 @@ import org.springframework.web.bind.WebDataBinder;
|
||||
import org.springframework.web.bind.annotation.InitBinder;
|
||||
import org.springframework.web.bind.annotation.RequestMethod;
|
||||
|
||||
import com.nis.domain.LogEntity;
|
||||
import com.nis.domain.restful.ConfigCommonSource;
|
||||
import com.nis.restful.RestBusinessCode;
|
||||
import com.nis.restful.RestConstants;
|
||||
import com.nis.restful.RestResult;
|
||||
import com.nis.restful.RestServiceException;
|
||||
import com.nis.util.Configurations;
|
||||
import com.nis.util.Constants;
|
||||
import com.nis.util.DateUtils;
|
||||
import com.nis.web.service.AuditLogThread;
|
||||
import com.nis.web.service.ServicesRequestLogService;
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
|
||||
/**
|
||||
* @ClassName: BaseRestController
|
||||
@@ -402,19 +403,19 @@ public class BaseRestController {
|
||||
return thread;
|
||||
|
||||
}
|
||||
|
||||
protected String getTableName(String key, String defaultTableName) {
|
||||
if (Constants.ISUSECLICKHOUSE) {
|
||||
key = key.replace("HiveTable", "ClickHouseTable");
|
||||
}
|
||||
return Configurations.getStringProperty(key, defaultTableName);
|
||||
}
|
||||
|
||||
protected int getLastPageNum(int totalCount, int pageSize) {
|
||||
int pageNum = totalCount / pageSize;
|
||||
if (totalCount % pageSize > 0) {
|
||||
pageNum++;
|
||||
}
|
||||
return pageNum;
|
||||
/**
|
||||
* 判断开始和结束时间是否为null,如果为null则初始化时间
|
||||
*
|
||||
* @param entity
|
||||
* @throws Exception
|
||||
*/
|
||||
protected void resetTime(LogEntity<?> entity) throws Exception {
|
||||
if (StringUtil.isEmpty(entity.getSearchFoundStartTime())
|
||||
&& StringUtil.isEmpty(entity.getSearchFoundEndTime())) {
|
||||
Map<String, String> map = DateUtils.getLocalTime(entity.getSearchFoundStartTime(),
|
||||
entity.getSearchFoundEndTime(), Constants.LOG_LOCAL_TIME, "minute");
|
||||
entity.setSearchFoundStartTime(map.get("startTime"));
|
||||
entity.setSearchFoundEndTime(map.get("endTime"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,33 +1,24 @@
|
||||
package com.nis.web.controller.restful;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.ui.Model;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMethod;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import com.nis.domain.LogEntity;
|
||||
import com.nis.domain.Page;
|
||||
import com.nis.domain.restful.DkBehaviorLog;
|
||||
import com.nis.domain.restful.PxyHttpLog;
|
||||
import com.nis.restful.RestServiceException;
|
||||
import com.nis.util.Configurations;
|
||||
import com.nis.util.Constants;
|
||||
import com.nis.util.DateUtils;
|
||||
import com.nis.util.LogJDBCByDruid;
|
||||
import com.nis.util.JsonMapper;
|
||||
import com.nis.web.controller.BaseRestController;
|
||||
import com.nis.web.service.AuditLogThread;
|
||||
import com.nis.web.service.HiveSqlService;
|
||||
import com.nis.web.service.LogDataService;
|
||||
import com.nis.web.service.ServicesRequestLogService;
|
||||
import com.nis.web.service.restful.NtcLogService;
|
||||
import com.wordnik.swagger.annotations.Api;
|
||||
@@ -35,9 +26,16 @@ import com.wordnik.swagger.annotations.ApiOperation;
|
||||
|
||||
/**
|
||||
*
|
||||
* <p>Title: LogController</p>
|
||||
* <p>Description: 日志查询controller</p>
|
||||
* <p>Company: IIE</p>
|
||||
* <p>
|
||||
* Title: LogController
|
||||
* </p>
|
||||
* <p>
|
||||
* Description: 日志查询controller
|
||||
* </p>
|
||||
* <p>
|
||||
* Company: IIE
|
||||
* </p>
|
||||
*
|
||||
* @author rkg
|
||||
* @date 2018年7月2日
|
||||
*
|
||||
@@ -51,48 +49,20 @@ public class LogController extends BaseRestController {
|
||||
|
||||
@Autowired
|
||||
protected NtcLogService ntcLogService;
|
||||
@Autowired
|
||||
private LogDataService logDataService;
|
||||
|
||||
@RequestMapping(value = "/dkBehaviorLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "行为识别日志查询", httpMethod = "GET", notes = "对日志功能“行为识别”提供数据基础查询服务")
|
||||
public Map<String, ?> dkBehaviorLogs(Page page, DkBehaviorLog dkBehaviorLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
public Map<String, ?> dkBehaviorLogs(Page page, DkBehaviorLog dkBehaviorLog, Model model,
|
||||
HttpServletRequest request, HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
|
||||
Page<DkBehaviorLog> logPage = null;
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
try {
|
||||
resetTime(dkBehaviorLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, dkBehaviorLog, DkBehaviorLog.class, page);
|
||||
logPage = new Page<DkBehaviorLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(DkBehaviorLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, dkBehaviorLog,
|
||||
getTableName(DkBehaviorLog.class.getSimpleName() + "HiveTable", "DK_BEHAVIOR_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, DkBehaviorLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<DkBehaviorLog> List = (java.util.List<DkBehaviorLog>) JsonMapper.fromJsonList(jsonString,
|
||||
DkBehaviorLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
logDataService.getData(page, dkBehaviorLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -102,50 +72,21 @@ public class LogController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "行为识别日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "行为识别日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
@RequestMapping(value = "/pxyHttpLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "PXY HTTP日志查询", httpMethod = "GET", notes = "对日志功能“控制策略”-“HTTP日志”提供数据基础查询服务")
|
||||
public Map<String, ?> pxyHttpLogs(Page page, PxyHttpLog pxyHttpLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<PxyHttpLog> logPage = null;
|
||||
try {
|
||||
resetTime(pxyHttpLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, pxyHttpLog, PxyHttpLog.class, page);
|
||||
logPage = new Page<PxyHttpLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(PxyHttpLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, pxyHttpLog,
|
||||
getTableName(PxyHttpLog.class.getSimpleName() + "HiveTable", "PXY_HTTP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, PxyHttpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<PxyHttpLog> List = (java.util.List<PxyHttpLog>) JsonMapper.fromJsonList(jsonString,
|
||||
PxyHttpLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
logDataService.getData(page, pxyHttpLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -155,40 +96,8 @@ public class LogController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "PXY HTTP日志检索成功",
|
||||
logPage, 0);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
*判断开始和结束时间是否为null,如果为null则初始化时间
|
||||
* @param entity
|
||||
* @throws Exception
|
||||
*/
|
||||
public void resetTime(LogEntity<?> entity) throws Exception {
|
||||
if (StringUtil.isEmpty(entity.getSearchFoundStartTime())&& StringUtil.isEmpty(entity.getSearchFoundEndTime())) {
|
||||
Map<String, String> map = DateUtils.getLocalTime(entity.getSearchFoundStartTime(),
|
||||
entity.getSearchFoundEndTime(), Constants.LOG_LOCAL_TIME, "minute");
|
||||
entity.setSearchFoundStartTime(map.get("startTime"));
|
||||
entity.setSearchFoundEndTime(map.get("endTime"));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 将searchFoundStartTime,searchFoundEndTime与foundTime进行关联
|
||||
* @return
|
||||
*/
|
||||
public Map<String, Map<String, String>> getCol2Col() {
|
||||
Map<String, Map<String, String>> col2col = new HashMap<String, Map<String, String>>();
|
||||
Map<String, String> startMap = new HashMap<String, String>();
|
||||
startMap.put("start", "foundTime");
|
||||
col2col.put("searchFoundStartTime", startMap);
|
||||
Map<String, String> endMap = new HashMap<String, String>();
|
||||
endMap.put("end", "foundTime");
|
||||
col2col.put("searchFoundEndTime", endMap);
|
||||
return col2col;
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "PXY HTTP日志检索成功", page,
|
||||
0);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,22 +1,16 @@
|
||||
package com.nis.web.controller.restful;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
import org.apache.ibatis.annotations.Delete;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.ui.Model;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMethod;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import com.nis.domain.LogEntity;
|
||||
import com.nis.domain.Page;
|
||||
import com.nis.domain.restful.MmAvIpLog;
|
||||
import com.nis.domain.restful.MmAvUrlLog;
|
||||
@@ -36,12 +30,9 @@ import com.nis.domain.restful.MmVoipAccountLog;
|
||||
import com.nis.domain.restful.MmVoipIpLog;
|
||||
import com.nis.restful.RestServiceException;
|
||||
import com.nis.util.Constants;
|
||||
import com.nis.util.DateUtils;
|
||||
import com.nis.util.JsonMapper;
|
||||
import com.nis.util.LogJDBCByDruid;
|
||||
import com.nis.web.controller.BaseRestController;
|
||||
import com.nis.web.service.AuditLogThread;
|
||||
import com.nis.web.service.HiveSqlService;
|
||||
import com.nis.web.service.LogDataService;
|
||||
import com.nis.web.service.ServicesRequestLogService;
|
||||
import com.nis.web.service.restful.NtcLogService;
|
||||
import com.wordnik.swagger.annotations.Api;
|
||||
@@ -64,48 +55,21 @@ public class MmLogSearchController extends BaseRestController {
|
||||
|
||||
@Autowired
|
||||
protected NtcLogService ntcLogService;
|
||||
@Autowired
|
||||
private LogDataService logDataService;
|
||||
|
||||
@RequestMapping(value = "/mmAvIpLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "音视频IP日志查询", httpMethod = "GET", notes = "对日志功能“音视频IP日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmAvIpLogs(Page page, MmAvIpLog mmAvIpLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmAvIpLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmAvIpLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmAvIpLog, MmAvIpLog.class, page);
|
||||
logPage = new Page<MmAvIpLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmAvIpLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmAvIpLog,
|
||||
getTableName(MmAvIpLog.class.getSimpleName() + "HiveTable", "MM_AV_IP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmAvIpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmAvIpLog> List = (java.util.List<MmAvIpLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmAvIpLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
logDataService.getData(page, mmAvIpLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -115,50 +79,21 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "音视频IP日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "音视频IP日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
@RequestMapping(value = "/mmAvUrlLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "音视频URL日志查询", httpMethod = "GET", notes = "对日志功能“音视频URL日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmAvUrlLogs(Page page, MmAvUrlLog mmAvUrlLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmAvUrlLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmAvUrlLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmAvUrlLog, MmAvUrlLog.class, page);
|
||||
logPage = new Page<MmAvUrlLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmAvUrlLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmAvUrlLog,
|
||||
getTableName(MmAvUrlLog.class.getSimpleName() + "HiveTable", "MM_AV_URL_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmAvUrlLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmAvUrlLog> List = (java.util.List<MmAvUrlLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmAvUrlLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
logDataService.getData(page, mmAvUrlLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -168,50 +103,21 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "音视频URL日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "音视频URL日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
@RequestMapping(value = "/mmPicIpLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "图片IP日志查询", httpMethod = "GET", notes = "对日志功能“图片IP日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmPicIpLogs(Page page, MmPicIpLog mmPicIpLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmPicIpLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmPicIpLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmPicIpLog, MmPicIpLog.class, page);
|
||||
logPage = new Page<MmPicIpLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmPicIpLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmPicIpLog,
|
||||
getTableName(MmPicIpLog.class.getSimpleName() + "HiveTable", "MM_PIC_IP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPicIpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmPicIpLog> List = (java.util.List<MmPicIpLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmPicIpLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
logDataService.getData(page, mmPicIpLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -221,50 +127,21 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "图片IP日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "图片IP日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
@RequestMapping(value = "/mmPicUrlLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "图片URL日志查询", httpMethod = "GET", notes = "对日志功能“图片URL日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmPicUrlLogs(Page page, MmPicUrlLog mmPicUrlLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmPicUrlLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmPicUrlLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmPicUrlLog, MmPicUrlLog.class, page);
|
||||
logPage = new Page<MmPicUrlLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmPicUrlLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmPicUrlLog,
|
||||
getTableName(MmPicUrlLog.class.getSimpleName() + "HiveTable", "MM_PIC_URL_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPicUrlLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmPicUrlLog> List = (java.util.List<MmPicUrlLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmPicUrlLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
logDataService.getData(page, mmPicUrlLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -274,51 +151,23 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "图片URL日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "图片URL日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
@RequestMapping(value = "/mmVoipIpLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "VoIP IP日志查询", httpMethod = "GET", notes = "对日志功能“VoIP IP日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmVoipIpLogs(Page page, MmVoipIpLog mmVoipIpLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmVoipIpLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmVoipIpLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmVoipIpLog, MmVoipIpLog.class, page);
|
||||
logPage = new Page<MmVoipIpLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmVoipIpLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmVoipIpLog,
|
||||
getTableName(MmVoipIpLog.class.getSimpleName() + "HiveTable", "MM_VOIP_IP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmVoipIpLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmVoipIpLog> List = (java.util.List<MmVoipIpLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmVoipIpLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
logDataService.getData(page, mmVoipIpLog);
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -328,51 +177,23 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "VoIP IP日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "VoIP IP日志检索成功", page,
|
||||
0);
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
@RequestMapping(value = "/mmVoipAccountLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "VoIP Account日志查询", httpMethod = "GET", notes = "对日志功能“VoIP Account日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmVoipAccountLogs(Page page, MmVoipAccountLog mmVoipLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
public Map<String, ?> mmVoipAccountLogs(Page page, MmVoipAccountLog mmVoipLog, Model model,
|
||||
HttpServletRequest request, HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmVoipAccountLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmVoipLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmVoipLog, MmVoipAccountLog.class, page);
|
||||
logPage = new Page<MmVoipAccountLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmVoipAccountLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmVoipLog,
|
||||
getTableName(MmVoipAccountLog.class.getSimpleName() + "HiveTable", "MM_VOIP_ACCOUNT_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmVoipAccountLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmVoipAccountLog> List = (java.util.List<MmVoipAccountLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmVoipAccountLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
logDataService.getData(page, mmVoipLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -383,50 +204,21 @@ public class MmLogSearchController extends BaseRestController {
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "VoIP Account日志检索成功",
|
||||
logPage, 0);
|
||||
page, 0);
|
||||
}
|
||||
|
||||
@RequestMapping(value = "/mmSampleAudioLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "音频样例日志查询", httpMethod = "GET", notes = "对日志功能“音频样例日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmSampleAudioLogs(Page page, MmSampleAudioLog mmSampleAudioLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
public Map<String, ?> mmSampleAudioLogs(Page page, MmSampleAudioLog mmSampleAudioLog, Model model,
|
||||
HttpServletRequest request, HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmSampleAudioLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmSampleAudioLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmSampleAudioLog, MmSampleAudioLog.class, page);
|
||||
logPage = new Page<MmSampleAudioLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmSampleAudioLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmSampleAudioLog,
|
||||
getTableName(MmSampleAudioLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_AUDIO_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleAudioLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmSampleAudioLog> List = (java.util.List<MmSampleAudioLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmSampleAudioLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
logDataService.getData(page, mmSampleAudioLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -436,50 +228,21 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "音频样例日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "音频样例日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
@RequestMapping(value = "/mmSampleVideoLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "视频样例日志查询", httpMethod = "GET", notes = "对日志功能“视频样例日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmSampleVideoLogs(Page page, MmSampleVideoLog mmSampleVideoLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
public Map<String, ?> mmSampleVideoLogs(Page page, MmSampleVideoLog mmSampleVideoLog, Model model,
|
||||
HttpServletRequest request, HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmSampleVideoLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmSampleVideoLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmSampleVideoLog, MmSampleVideoLog.class, page);
|
||||
logPage = new Page<MmSampleVideoLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmSampleVideoLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmSampleVideoLog,
|
||||
getTableName(MmSampleVideoLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VIDEO_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleVideoLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmSampleVideoLog> List = (java.util.List<MmSampleVideoLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmSampleVideoLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
logDataService.getData(page, mmSampleVideoLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -489,50 +252,22 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "视频样例日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "视频样例日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
@RequestMapping(value = "/mmPornAudioLevelLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "音频色情日志查询", httpMethod = "GET", notes = "对日志功能“音频色情日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmPornAudioLevelLogs(Page page, MmPornAudioLevelLog mmPornAudioLevelLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
public Map<String, ?> mmPornAudioLevelLogs(Page page, MmPornAudioLevelLog mmPornAudioLevelLog, Model model,
|
||||
HttpServletRequest request, HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmPornAudioLevelLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmPornAudioLevelLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmPornAudioLevelLog, MmPornAudioLevelLog.class, page);
|
||||
logPage = new Page<MmPornAudioLevelLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmPornAudioLevelLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmPornAudioLevelLog,
|
||||
getTableName(MmPornAudioLevelLog.class.getSimpleName() + "HiveTable", "MM_PORN_AUDIO_LEVEL_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPornAudioLevelLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmPornAudioLevelLog> List = (java.util.List<MmPornAudioLevelLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmPornAudioLevelLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmPornAudioLevelLog, MmPornAudioLevelLog.class,
|
||||
page);
|
||||
logDataService.getData(page, mmPornAudioLevelLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -542,50 +277,22 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "音频色情日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "音频色情日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
@RequestMapping(value = "/mmPornVideoLevelLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "视频色情日志查询", httpMethod = "GET", notes = "对日志功能“视频色情日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmPornVideoLevelLogs(Page page, MmPornVideoLevelLog mmPornVideoLevelLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
public Map<String, ?> mmPornVideoLevelLogs(Page page, MmPornVideoLevelLog mmPornVideoLevelLog, Model model,
|
||||
HttpServletRequest request, HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmPornVideoLevelLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmPornVideoLevelLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmPornVideoLevelLog, MmPornVideoLevelLog.class, page);
|
||||
logPage = new Page<MmPornVideoLevelLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmPornVideoLevelLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmPornVideoLevelLog,
|
||||
getTableName(MmPornVideoLevelLog.class.getSimpleName() + "HiveTable", "MM_PRON_VIDEO_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmPornVideoLevelLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmPornVideoLevelLog> List = (java.util.List<MmPornVideoLevelLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmPornVideoLevelLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmPornVideoLevelLog, MmPornVideoLevelLog.class,
|
||||
page);
|
||||
logDataService.getData(page, mmPornVideoLevelLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -595,51 +302,21 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "视频色情日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "视频色情日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
@RequestMapping(value = "/mmSamplePicLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "图片样例日志查询", httpMethod = "GET", notes = "对日志功能“图片样例日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmSamplePicLogs(Page page, MmSamplePicLog mmSamplePicLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
public Map<String, ?> mmSamplePicLogs(Page page, MmSamplePicLog mmSamplePicLog, Model model,
|
||||
HttpServletRequest request, HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmSamplePicLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmSamplePicLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmSamplePicLog, MmSamplePicLog.class, page);
|
||||
logPage = new Page<MmSamplePicLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmSamplePicLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmSamplePicLog,
|
||||
getTableName(MmSamplePicLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_PIC_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSamplePicLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmSamplePicLog> List = (java.util.List<MmSamplePicLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmSamplePicLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
logDataService.getData(page, mmSamplePicLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -649,50 +326,21 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "图片样例日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "图片样例日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
@RequestMapping(value = "/mmSampleVoipLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "VOIP样例日志查询", httpMethod = "GET", notes = "对日志功能“VOIP样例日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmSampleVoipLogs(Page page, MmSampleVoipLog mmSampleVoipLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
public Map<String, ?> mmSampleVoipLogs(Page page, MmSampleVoipLog mmSampleVoipLog, Model model,
|
||||
HttpServletRequest request, HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmSampleVoipLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmSampleVoipLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmSampleVoipLog, MmSampleVoipLog.class, page);
|
||||
logPage = new Page<MmSampleVoipLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmSampleVoipLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmSampleVoipLog,
|
||||
getTableName(MmSampleVoipLog.class.getSimpleName() + "HiveTable", "MM_SAMPLE_VOIP_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSampleVoipLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmSampleVoipLog> List = (java.util.List<MmSampleVoipLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmSampleVoipLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
logDataService.getData(page, mmSampleVoipLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -702,51 +350,22 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "VOIP样例日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "VOIP样例日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
@RequestMapping(value = "/mmSpeakerRecognizationLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "说话人识别日志查询", httpMethod = "GET", notes = "对日志功能“说话人识别日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmSpeakerRecognizationLogs(Page page, MmSpeakerRecognizationLog mmSpeakerRecognizationLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
public Map<String, ?> mmSpeakerRecognizationLogs(Page page, MmSpeakerRecognizationLog mmSpeakerRecognizationLog,
|
||||
Model model, HttpServletRequest request, HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmSpeakerRecognizationLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmSpeakerRecognizationLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmSpeakerRecognizationLog, MmSpeakerRecognizationLog.class, page);
|
||||
logPage = new Page<MmSpeakerRecognizationLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmSpeakerRecognizationLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmSpeakerRecognizationLog,
|
||||
getTableName(MmSpeakerRecognizationLog.class.getSimpleName() + "HiveTable", "MM_SPEAKER_RECOGNIZATION_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmSpeakerRecognizationLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmSpeakerRecognizationLog> List = (java.util.List<MmSpeakerRecognizationLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmSpeakerRecognizationLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmSpeakerRecognizationLog,
|
||||
MmSpeakerRecognizationLog.class, page);
|
||||
logDataService.getData(page, mmSpeakerRecognizationLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -756,51 +375,22 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "说话人识别日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "说话人识别日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
@RequestMapping(value = "/mmLogoDetectionLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "台标识别日志查询", httpMethod = "GET", notes = "对日志功能“台标识别IP日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmLogoDetectionLogs(Page page, MmLogoDetectionLog mmLogoDetectionLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
public Map<String, ?> mmLogoDetectionLogs(Page page, MmLogoDetectionLog mmLogoDetectionLog, Model model,
|
||||
HttpServletRequest request, HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmLogoDetectionLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmLogoDetectionLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmLogoDetectionLog, MmLogoDetectionLog.class, page);
|
||||
logPage = new Page<MmLogoDetectionLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmLogoDetectionLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmLogoDetectionLog,
|
||||
getTableName(MmLogoDetectionLog.class.getSimpleName() + "HiveTable", "MM_LOGO_DETECTION_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmLogoDetectionLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmLogoDetectionLog> List = (java.util.List<MmLogoDetectionLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmLogoDetectionLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmLogoDetectionLog, MmLogoDetectionLog.class,
|
||||
page);
|
||||
logDataService.getData(page, mmLogoDetectionLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -810,51 +400,22 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "台标识别日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "台标识别日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
@RequestMapping(value = "/mmFaceRecognizationLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "人脸识别日志查询", httpMethod = "GET", notes = "对日志功能“人脸识别日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmFaceRecognizationLogs(Page page, MmFaceRecognizationLog mmFaceRecognizationLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
public Map<String, ?> mmFaceRecognizationLogs(Page page, MmFaceRecognizationLog mmFaceRecognizationLog, Model model,
|
||||
HttpServletRequest request, HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmFaceRecognizationLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmFaceRecognizationLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmFaceRecognizationLog, MmFaceRecognizationLog.class, page);
|
||||
logPage = new Page<MmFaceRecognizationLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmFaceRecognizationLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmFaceRecognizationLog,
|
||||
getTableName(MmFaceRecognizationLog.class.getSimpleName() + "HiveTable", "MM_FACE_RECOGNIZATION_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmFaceRecognizationLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmFaceRecognizationLog> List = (java.util.List<MmFaceRecognizationLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmFaceRecognizationLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmFaceRecognizationLog,
|
||||
MmFaceRecognizationLog.class, page);
|
||||
logDataService.getData(page, mmFaceRecognizationLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -864,51 +425,21 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "人脸识别日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "人脸识别日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
@RequestMapping(value = "/mmFileDigestLogs", method = RequestMethod.GET)
|
||||
@ApiOperation(value = "文件摘要日志查询", httpMethod = "GET", notes = "对日志功能“文件摘要日志”提供数据基础查询服务")
|
||||
public Map<String, ?> mmFileDigestLogs(Page page, MmFileDigestLog mmFileDigestLog, Model model, HttpServletRequest request,
|
||||
HttpServletResponse response) {
|
||||
public Map<String, ?> mmFileDigestLogs(Page page, MmFileDigestLog mmFileDigestLog, Model model,
|
||||
HttpServletRequest request, HttpServletResponse response) {
|
||||
long start = System.currentTimeMillis();
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET,
|
||||
request, null);
|
||||
AuditLogThread auditLogThread = super.saveRequestLog(servicesRequestLogService, Constants.OPACTION_GET, request,
|
||||
null);
|
||||
|
||||
Page<MmFileDigestLog> logPage = null;
|
||||
try {
|
||||
resetTime(mmFileDigestLog);
|
||||
ntcLogService.queryConditionCheck(auditLogThread, start, mmFileDigestLog, MmFileDigestLog.class, page);
|
||||
logPage = new Page<MmFileDigestLog>();
|
||||
logPage.setPageNo(page.getPageNo());
|
||||
logPage.setPageSize(page.getPageSize());
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(MmFileDigestLog.class.getSimpleName(), page.getOrderBy());
|
||||
} else {
|
||||
orderBy = "found_Time";
|
||||
}
|
||||
String sql = HiveSqlService.getSql(page, mmFileDigestLog,
|
||||
getTableName(MmFileDigestLog.class.getSimpleName() + "HiveTable", "MM_FILE_DIGEST_LOG"),
|
||||
getCol2Col(), orderBy, null);
|
||||
Map<String, List<Object>> tableMapping = new LogJDBCByDruid().tableMapping(page, null, sql, MmFileDigestLog.class, "foundTime",
|
||||
"recvTime");
|
||||
if (tableMapping == null) {
|
||||
logPage.setList(new ArrayList());
|
||||
} else {
|
||||
List list = tableMapping.get("obj");
|
||||
if (list.size() > 0) {
|
||||
String jsonString = JsonMapper.toJsonString(list);
|
||||
List<MmFileDigestLog> List = (java.util.List<MmFileDigestLog>) JsonMapper.fromJsonList(jsonString,
|
||||
MmFileDigestLog.class);
|
||||
logPage.setList(List);
|
||||
logPage.setCount(List.size());
|
||||
|
||||
} else {
|
||||
logPage.setList(new ArrayList());
|
||||
}
|
||||
}
|
||||
logDataService.getData(page, mmFileDigestLog);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
auditLogThread.setExceptionInfo(e.getMessage() + " " + e.getCause());
|
||||
@@ -918,37 +449,9 @@ public class MmLogSearchController extends BaseRestController {
|
||||
}
|
||||
throw ((RestServiceException) e);
|
||||
}
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "文件摘要日志检索成功",
|
||||
logPage, 0);
|
||||
return serviceLogResponse(auditLogThread, System.currentTimeMillis() - start, request, "文件摘要日志检索成功", page, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
*判断开始和结束时间是否为null,如果为null则初始化时间
|
||||
* @param entity
|
||||
* @throws Exception
|
||||
*/
|
||||
public void resetTime(LogEntity<?> entity) throws Exception {
|
||||
if (StringUtil.isEmpty(entity.getSearchFoundStartTime())&& StringUtil.isEmpty(entity.getSearchFoundEndTime())) {
|
||||
Map<String, String> map = DateUtils.getLocalTime(entity.getSearchFoundStartTime(),
|
||||
entity.getSearchFoundEndTime(), Constants.LOG_LOCAL_TIME, "minute");
|
||||
entity.setSearchFoundStartTime(map.get("startTime"));
|
||||
entity.setSearchFoundEndTime(map.get("endTime"));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 将searchFoundStartTime,searchFoundEndTime与foundTime进行关联
|
||||
* @return
|
||||
*/
|
||||
public Map<String, Map<String, String>> getCol2Col() {
|
||||
Map<String, Map<String, String>> col2col = new HashMap<String, Map<String, String>>();
|
||||
Map<String, String> startMap = new HashMap<String, String>();
|
||||
startMap.put("start", "foundTime");
|
||||
col2col.put("searchFoundStartTime", startMap);
|
||||
Map<String, String> endMap = new HashMap<String, String>();
|
||||
endMap.put("end", "foundTime");
|
||||
col2col.put("searchFoundEndTime", endMap);
|
||||
return col2col;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -18,16 +18,17 @@ import org.apache.ibatis.mapping.ResultMapping;
|
||||
import org.apache.ibatis.session.SqlSessionFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.alibaba.druid.pool.DruidDataSource;
|
||||
import com.nis.domain.Page;
|
||||
import com.nis.util.Configurations;
|
||||
import com.nis.util.Constants;
|
||||
import com.nis.util.LogJDBCByDruid;
|
||||
import com.zdjizhi.utils.StringUtil;
|
||||
import com.nis.util.redis.SaveRedisThread;
|
||||
|
||||
public class HiveSqlService {
|
||||
private final static Logger logger = LoggerFactory.getLogger(HiveSqlService.class);
|
||||
@Service
|
||||
public class LogDataService {
|
||||
private final static Logger logger = LoggerFactory.getLogger(LogDataService.class);
|
||||
static DruidDataSource datasource = null;
|
||||
Connection conn = null;
|
||||
ResultSet rs = null;
|
||||
@@ -35,32 +36,47 @@ public class HiveSqlService {
|
||||
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
|
||||
private static SimpleDateFormat sdf2 = new SimpleDateFormat("yyyyMMdd");
|
||||
|
||||
private static Map<String, String> getFiledsType(Object o) {
|
||||
Field[] fields = o.getClass().getSuperclass().getDeclaredFields();
|
||||
Field[] superfields = o.getClass().getDeclaredFields();
|
||||
Map<String, String> infoMap = new HashMap<String, String>();
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
infoMap.put(fields[i].getName(), fields[i].getType().toString().replace("class", ""));
|
||||
}
|
||||
for (int i = 0; i < superfields.length; i++) {
|
||||
infoMap.put(superfields[i].getName(), superfields[i].getType().toString().replace("class", ""));
|
||||
}
|
||||
return infoMap;
|
||||
private static Map<String, Map<String, String>> col2col = new HashMap<String, Map<String, String>>();
|
||||
static {
|
||||
Map<String, String> startMap = new HashMap<String, String>();
|
||||
startMap.put("start", "foundTime");
|
||||
col2col.put("searchFoundStartTime", startMap);
|
||||
Map<String, String> endMap = new HashMap<String, String>();
|
||||
endMap.put("end", "foundTime");
|
||||
col2col.put("searchFoundEndTime", endMap);
|
||||
}
|
||||
|
||||
public static String getSql(Page page, Object bean, String tableName, Map<String, Map<String, String>> col2col,
|
||||
String orderBy, String searchActiveSys) throws Exception {
|
||||
private String getTableName(String key, String defaultTableName) {
|
||||
if (Constants.ISUSECLICKHOUSE) {
|
||||
return getSqlByClickHouse(page, bean, tableName, col2col, orderBy, searchActiveSys);
|
||||
} else {
|
||||
return getSqlByHive(page, bean, tableName, col2col, orderBy, searchActiveSys);
|
||||
key = key.replace("HiveTable", "ClickHouseTable");
|
||||
}
|
||||
return Configurations.getStringProperty(key, defaultTableName);
|
||||
}
|
||||
|
||||
public static String getSqlByClickHouse(Page page, Object bean, String tableName,
|
||||
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
|
||||
public <T> void getData(Page<T> page, Object obj) throws Exception {
|
||||
String className = obj.getClass().getSimpleName();
|
||||
String tableName = getTableName(className + "HiveTable", "");
|
||||
if (tableName == null || tableName.trim().equals("")) {
|
||||
throw new RuntimeException("日志类" + className + "对应的表名为空,请检查配置文件");
|
||||
}
|
||||
String orderBy = "";
|
||||
if (null != page.getOrderBy() && !page.getOrderBy().equals("")) {
|
||||
orderBy = Page.getOrderBySql(className, page.getOrderBy());
|
||||
} else {
|
||||
orderBy = " found_Time desc";
|
||||
}
|
||||
if (Constants.ISUSECLICKHOUSE) {
|
||||
getDataFromClickHouse(page, obj, tableName, className, orderBy);
|
||||
} else {
|
||||
getDataFromHive(page, obj, tableName, className, orderBy);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public <T> void getDataFromClickHouse(Page<T> page, Object bean, String tableName, String className, String orderBy)
|
||||
throws Exception {
|
||||
tableName = tableName.toLowerCase();
|
||||
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
|
||||
String showColmun = getFiledsSql(className, page.getFields());
|
||||
StringBuffer sql = new StringBuffer();
|
||||
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
|
||||
if (null == showColmun || showColmun.equals("")) {
|
||||
@@ -78,6 +94,10 @@ public class HiveSqlService {
|
||||
}
|
||||
sql.setLength(0);
|
||||
sql.append(" select " + sqlTrim.toLowerCase() + " from " + tableName.toLowerCase() + " t ");
|
||||
|
||||
StringBuffer countSql = new StringBuffer();
|
||||
countSql.append("select count(1) from " + tableName + " ");
|
||||
|
||||
StringBuffer whereSB = new StringBuffer();
|
||||
if (!StringUtil.isEmpty(bean)) {
|
||||
Class<?> clazz = bean.getClass();
|
||||
@@ -95,7 +115,6 @@ public class HiveSqlService {
|
||||
setFieldValue(bean, key, value.toString().trim());
|
||||
if (key.endsWith("Time")) {// 日期开始或结束的字段
|
||||
if (col2col.containsKey(key)) {
|
||||
Long partition = Long.parseLong(sdf2.format(sdf.parse(value.toString().trim())));
|
||||
value = sdf.parse(value.toString().trim()).getTime() / 1000;
|
||||
if (col2col.get(key).get("start") != null) {
|
||||
whereSB.append(" and "
|
||||
@@ -112,7 +131,6 @@ public class HiveSqlService {
|
||||
key = key.replace("search", "");
|
||||
key = key.substring(0, 1).toLowerCase() + key.substring(1);
|
||||
}
|
||||
|
||||
// clickhouse写法
|
||||
String type = filedsType.get(key).trim();
|
||||
if (type.equals("java.lang.String")) {
|
||||
@@ -139,7 +157,10 @@ public class HiveSqlService {
|
||||
if (whereSB.length() > 0) {
|
||||
int indexOf = whereSB.indexOf("and") + "and".length();
|
||||
sql.append(" where " + whereSB.substring(indexOf));
|
||||
countSql.append(" where " + whereSB.substring(indexOf));
|
||||
|
||||
}
|
||||
|
||||
Integer startNum = (page.getPageNo() - 1) * page.getPageSize();
|
||||
if (orderBy.toLowerCase().contains("asc") || orderBy.toLowerCase().contains("desc")) {
|
||||
sql.append(" order by " + orderBy.toLowerCase());
|
||||
@@ -147,14 +168,13 @@ public class HiveSqlService {
|
||||
sql.append(" order by " + orderBy.toLowerCase() + " desc");
|
||||
}
|
||||
sql.append(" limit " + startNum + "," + page.getPageSize());// clickhouse的分页与mysql相同
|
||||
|
||||
return sql.toString();
|
||||
searchFromDataCenter(page, bean, sql, countSql);
|
||||
}
|
||||
|
||||
public static String getSqlByHive(Page page, Object bean, String tableName,
|
||||
Map<String, Map<String, String>> col2col, String orderBy, String searchActiveSys) throws Exception {
|
||||
public <T> void getDataFromHive(Page<T> page, Object bean, String tableName, String className, String orderBy)
|
||||
throws Exception {
|
||||
tableName = tableName.toLowerCase();
|
||||
String showColmun = getFiledsSql(bean.getClass().getSimpleName(), page.getFields());
|
||||
String showColmun = getFiledsSql(className, page.getFields());
|
||||
StringBuffer sql = new StringBuffer();
|
||||
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
|
||||
if (null == showColmun || showColmun.equals("")) {
|
||||
@@ -179,10 +199,12 @@ public class HiveSqlService {
|
||||
sql.append(" select " + sqlTrim.toLowerCase() + " from (select " + sqlTrim.toLowerCase()
|
||||
+ ",row_number() over(partition by found_time_partition " + orderBy + ") as row_num from "
|
||||
+ tableName.toLowerCase() + " ");
|
||||
StringBuffer countSql = new StringBuffer();
|
||||
countSql.append("select count(1) from " + tableName + " ");
|
||||
|
||||
StringBuffer whereSB = new StringBuffer();
|
||||
if (!StringUtil.isEmpty(bean)) {
|
||||
Class<?> clazz = bean.getClass();
|
||||
Map<String, String> filedsType = null;
|
||||
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
|
||||
// 获取所有的字段包括public,private,protected,private
|
||||
// Field[] fields = bean.getClass().getDeclaredFields();
|
||||
@@ -266,198 +288,38 @@ public class HiveSqlService {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
if (whereSB.length() > 0) {
|
||||
int indexOf = whereSB.indexOf("and") + "and".length();
|
||||
sql.append(" where " + whereSB.substring(indexOf));
|
||||
countSql.append(" where " + whereSB.substring(indexOf));
|
||||
|
||||
}
|
||||
Integer startNum = (page.getPageNo() - 1) * page.getPageSize() + 1;
|
||||
Integer endNum = startNum - 1 + page.getPageSize();
|
||||
sql.append(" ) t where row_Num between " + startNum + " and " + endNum);
|
||||
logger.info("获取数据中心日志sql==================={}", sql);
|
||||
return sql.toString();
|
||||
searchFromDataCenter(page, bean, sql, countSql);
|
||||
}
|
||||
|
||||
public static Long getLogCount(Object bean, String tableName, Map<String, Map<String, String>> col2col) throws Exception {
|
||||
if (Constants.ISUSECLICKHOUSE) {
|
||||
return getLogCountFromClickHouse(bean, tableName, col2col);
|
||||
} else {
|
||||
return getLogCountFromHive(bean, tableName, col2col);
|
||||
}
|
||||
}
|
||||
|
||||
public static Long getLogCountFromClickHouse(Object bean, String tableName,
|
||||
Map<String, Map<String, String>> col2col) throws Exception {
|
||||
tableName = tableName.toLowerCase();
|
||||
StringBuffer sql = new StringBuffer();
|
||||
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
|
||||
sql.append("select count(1) from " + tableName + " ");
|
||||
StringBuffer whereSB = new StringBuffer();
|
||||
if (!StringUtil.isEmpty(bean)) {
|
||||
Class<?> clazz = bean.getClass();
|
||||
Map<String, String> filedsType = null;
|
||||
filedsType = getFiledsType(bean);
|
||||
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
|
||||
Field[] fields = clazz.getDeclaredFields();
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
|
||||
Field f = fields[i];
|
||||
String key = f.getName();// 获取字段名
|
||||
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
|
||||
Object value = getFieldValue(bean, key);
|
||||
if (!StringUtil.isEmpty(value)) {
|
||||
setFieldValue(bean, key, value.toString().trim());
|
||||
if (key.endsWith("Time")) {// 日期开始或结束的字段
|
||||
if (col2col.containsKey(key)) {
|
||||
Long partition = Long.parseLong(sdf2.format(sdf.parse(value.toString().trim())));
|
||||
value = sdf.parse(value.toString().trim()).getTime() / 1000;
|
||||
if (col2col.get(key).get("start") != null) {
|
||||
whereSB.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("start")).toLowerCase()
|
||||
+ ">=" + value);
|
||||
} else {
|
||||
whereSB.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("end")).toLowerCase() + "<"
|
||||
+ value);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (key.toLowerCase().startsWith("search")) {
|
||||
key = key.replace("search", "");
|
||||
key = key.substring(0, 1).toLowerCase() + key.substring(1);
|
||||
}
|
||||
|
||||
// clickhouse写法
|
||||
String type = filedsType.get(key).trim();
|
||||
if (type.equals("java.lang.String")) {
|
||||
String field = filedAndColumnMap.get(key).toLowerCase();
|
||||
if (field.equals("url")) {
|
||||
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
|
||||
} else {
|
||||
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
|
||||
}
|
||||
} else if (type.equals("java.lang.Integer") || type.equals("int")
|
||||
|| type.equals("java.lang.Long") || type.equals("long")) {
|
||||
whereSB.append(" and " + filedAndColumnMap.get(key).toLowerCase() + "="
|
||||
+ value.toString().trim());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
if (whereSB.length() > 0) {
|
||||
int indexOf = whereSB.indexOf("and") + "and".length();
|
||||
sql.append(" where " + whereSB.substring(indexOf));
|
||||
}
|
||||
logger.info("获取数据中心日志总条数sql==================" + sql.toString());
|
||||
Long count = new LogJDBCByDruid().getCount(sql.toString());
|
||||
return count;
|
||||
}
|
||||
|
||||
public static Long getLogCountFromHive(Object bean, String tableName, Map<String, Map<String, String>> col2col)
|
||||
private <T> void searchFromDataCenter(Page<T> page, Object bean, StringBuffer selSql, StringBuffer countSql)
|
||||
throws Exception {
|
||||
tableName = tableName.toLowerCase();
|
||||
StringBuffer sql = new StringBuffer();
|
||||
Map<String, String> filedAndColumnMap = getFiledAndColumnMap(bean.getClass());
|
||||
sql.append("select count(1) from " + tableName + " ");
|
||||
StringBuffer whereSB = new StringBuffer();
|
||||
if (bean != null) {
|
||||
Class<?> clazz = bean.getClass();
|
||||
for (; clazz != Object.class; clazz = clazz.getSuperclass()) {
|
||||
// 获取所有的字段包括public,private,protected,private
|
||||
// Field[] fields = bean.getClass().getDeclaredFields();
|
||||
Field[] fields = clazz.getDeclaredFields();
|
||||
Long foundTimePartStart = null;
|
||||
Long foundTimePartEnd = null;
|
||||
new LogJDBCByDruid().getTableData(page, selSql.toString(), bean.getClass());
|
||||
if (page.getList() != null && page.getList().size() > 0) {
|
||||
new LogJDBCByDruid().getCount(page, countSql.toString());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static Map<String, String> getFiledsType(Object o) {
|
||||
Field[] fields = o.getClass().getSuperclass().getDeclaredFields();
|
||||
Field[] superfields = o.getClass().getDeclaredFields();
|
||||
Map<String, String> infoMap = new HashMap<String, String>();
|
||||
for (int i = 0; i < fields.length; i++) {
|
||||
// 现在gwall日志表结构中只有数值和字符串两种类型,数值都是int类型没有bigint所以不需要加L,
|
||||
Field f = fields[i];
|
||||
String key = f.getName();// 获取字段名
|
||||
String typeName = f.getType().getName();
|
||||
if (f.getType().getName().equals("java.lang.String") && key.startsWith("search")) {
|
||||
Object value = getFieldValue(bean, key);
|
||||
if (!StringUtil.isEmpty(value)) {
|
||||
setFieldValue(bean, key, value.toString().trim());
|
||||
if (key.endsWith("Time")) {// 日期开始或结束的字段
|
||||
if (col2col.containsKey(key)) {
|
||||
Long partition = Long.parseLong(sdf2.format(sdf.parse(value.toString().trim())));
|
||||
value = sdf.parse(value.toString().trim()).getTime() / 1000;
|
||||
if (key.toLowerCase().equals("searchfoundstarttime")) {
|
||||
foundTimePartStart = partition;
|
||||
infoMap.put(fields[i].getName(), fields[i].getType().toString().replace("class", ""));
|
||||
}
|
||||
if (key.toLowerCase().equals("searchfoundendtime")) {
|
||||
foundTimePartEnd = partition;
|
||||
for (int i = 0; i < superfields.length; i++) {
|
||||
infoMap.put(superfields[i].getName(), superfields[i].getType().toString().replace("class", ""));
|
||||
}
|
||||
if (col2col.get(key).get("start") != null) {
|
||||
// sql.append(" and " +
|
||||
// filedAndColumnMap.get(col2col.get(key).get("start"))
|
||||
// + ">=to_date('" +
|
||||
// value.toString().trim()
|
||||
// + "','yyyy-mm-dd HH24:mi:ss')");
|
||||
whereSB.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("start")).toLowerCase()
|
||||
+ ">=" + value);
|
||||
} else {
|
||||
// sql.append(" and " +
|
||||
// filedAndColumnMap.get(col2col.get(key).get("end"))
|
||||
// + "<=to_date('" +
|
||||
// value.toString().trim()
|
||||
// + "','yyyy-mm-dd HH24:mi:ss')");
|
||||
whereSB.append(" and "
|
||||
+ filedAndColumnMap.get(col2col.get(key).get("end")).toLowerCase() + "<"
|
||||
+ value);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (key.toLowerCase().startsWith("search")) {
|
||||
key = key.replace("search", "");
|
||||
key = key.substring(0, 1).toLowerCase() + key.substring(1);
|
||||
}
|
||||
|
||||
if (typeName.equals("java.lang.String")) {
|
||||
String field = filedAndColumnMap.get(key);
|
||||
if (field.equals("url")) {
|
||||
whereSB.append(" and " + field + " like '" + value.toString().trim() + "%'");
|
||||
} else {
|
||||
whereSB.append(" and " + field + "='" + value.toString().trim() + "'");
|
||||
}
|
||||
} else if (typeName.equals("java.lang.Integer") || typeName.equals("int")) {
|
||||
whereSB.append(
|
||||
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim());
|
||||
|
||||
} else if (typeName.equals("java.lang.Long") || typeName.equals("long")) {
|
||||
whereSB.append(
|
||||
" and " + filedAndColumnMap.get(key) + "=" + value.toString().trim() + "L");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
if (null != foundTimePartStart) {
|
||||
// sql.append(" and found_time_partition>=" + foundTimePartStart + "L");
|
||||
whereSB.append(" and found_time_partition>=" + foundTimePartStart);
|
||||
}
|
||||
if (null != foundTimePartEnd) {
|
||||
// sql.append(" and found_time_partition<" + foundTimePartEnd + "L");
|
||||
whereSB.append(" and found_time_partition<=" + foundTimePartEnd);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
if (whereSB.length() > 0) {
|
||||
int indexOf = whereSB.indexOf("and") + "and".length();
|
||||
sql.append(" where " + whereSB.substring(indexOf));
|
||||
}
|
||||
logger.info("获取数据中心日志总条数sql==================" + sql.toString());
|
||||
Long count = new LogJDBCByDruid().getCount(sql.toString());
|
||||
return count;
|
||||
return infoMap;
|
||||
}
|
||||
|
||||
public static String getFiledsSql(String mapName, String fileds) throws Exception {
|
||||
@@ -172,7 +172,7 @@ serviceValidate=true
|
||||
maxPageSize=100000
|
||||
|
||||
#是否开启Debug模式
|
||||
isDebug=true
|
||||
isDebug=false
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user