feat: ASW-10 新增 pcap 相关接口

This commit is contained in:
shizhendong
2024-07-23 11:37:47 +08:00
parent 5e52c711bc
commit 4f8084eaf8
14 changed files with 1767 additions and 20 deletions

View File

@@ -0,0 +1,41 @@
package net.geedge.asw.common.config;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component;
/**
* Spring Context 工具类
*/
@Component
public class SpringContextUtils implements ApplicationContextAware {
private static ApplicationContext applicationContext;
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
SpringContextUtils.applicationContext = applicationContext;
}
public static Object getBean(String name) {
return applicationContext.getBean(name);
}
public static <T> T getBean(Class<T> requiredType) {
return applicationContext.getBean(requiredType);
}
public static String getProperty(String key, String defaultValue) {
return applicationContext.getEnvironment().getProperty(key, defaultValue);
}
public static <T> T getBean(String name, Class<T> requiredType) {
return applicationContext.getBean(name, requiredType);
}
public static Class<? extends Object> getType(String name) {
return applicationContext.getType(name);
}
}

View File

@@ -0,0 +1,58 @@
package net.geedge.asw.module.runner.controller;
import cn.hutool.log.Log;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import net.geedge.asw.common.util.R;
import net.geedge.asw.common.util.RCode;
import net.geedge.asw.common.util.T;
import net.geedge.asw.module.runner.entity.PcapEntity;
import net.geedge.asw.module.runner.service.IPcapService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.util.Map;
@RestController
@RequestMapping("/api/v1/pcap")
public class PcapController {
private static final Log log = Log.get();
@Autowired
private IPcapService pcapService;
@GetMapping("/{id}")
public R detail(@PathVariable("id") String id) {
PcapEntity pcapEntity = pcapService.queryInfo(id);
return R.ok().putData("record", pcapEntity);
}
@GetMapping
public R list(@RequestParam Map<String, Object> params) {
T.VerifyUtil.is(params).notNull()
.and(T.MapUtil.getStr(params, "workspaceId")).notEmpty(RCode.WORKSPACE_ID_CANNOT_EMPTY);
Page page = pcapService.queryList(params);
return R.ok(page);
}
@PostMapping
public R add(@RequestParam(value = "file", required = true) MultipartFile file,
@RequestParam(required = false) String tags,
@RequestParam(required = false) String workbookId,
@RequestParam(required = false) String workspaceId) throws IOException {
T.VerifyUtil.is(workspaceId).notEmpty(RCode.WORKSPACE_ID_CANNOT_EMPTY);
PcapEntity pcapEntity = pcapService.savePcap(file.getResource(), tags, workbookId, workspaceId);
return R.ok().putData("id", pcapEntity.getId());
}
@DeleteMapping("/{id}")
public R delete(@PathVariable("id") String id) {
pcapService.deletePcap(id);
return R.ok();
}
}

View File

@@ -1,10 +1,16 @@
package net.geedge.asw.module.runner.dao;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import net.geedge.asw.module.runner.entity.PcapEntity;
import org.apache.ibatis.annotations.Mapper;
import java.util.List;
import java.util.Map;
@Mapper
public interface PcapDao extends BaseMapper<PcapEntity>{
List<PcapEntity> queryList(Page page, Map<String, Object> params);
}

View File

@@ -1,9 +1,13 @@
package net.geedge.asw.module.runner.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import net.geedge.asw.module.app.entity.ApplicationEntity;
import net.geedge.asw.module.app.entity.PackageEntity;
@Data
@TableName("pcap")
@@ -27,4 +31,20 @@ public class PcapEntity {
private String createUserId;
private String workspaceId;
@TableField(exist = false)
private String jobId;
@TableField(exist = false)
private ApplicationEntity application;
@TableField(exist = false)
@JsonProperty(value = "package")
private PackageEntity pkg;
@TableField(exist = false)
private RunnerEntity runner;
@TableField(exist = false)
private PlaybookEntity playbook;
}

View File

@@ -5,7 +5,6 @@ import com.baomidou.mybatisplus.extension.service.IService;
import net.geedge.asw.module.runner.entity.JobEntity;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -23,6 +22,6 @@ public interface IJobService extends IService<JobEntity>{
void appendTraceLogStrToFile(String jobId, String content) throws RuntimeException;
void updateJobResult(String jobId, String state, MultipartFile pcapFile) throws IOException;
void updateJobResult(String jobId, String state, MultipartFile pcapFile);
}

View File

@@ -1,12 +1,22 @@
package net.geedge.asw.module.runner.service;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.IService;
import net.geedge.asw.module.runner.entity.PcapEntity;
import org.springframework.core.io.Resource;
import java.io.InputStream;
import java.util.Map;
public interface IPcapService extends IService<PcapEntity>{
void savePcap(String jobId, InputStream inputStream);
PcapEntity queryInfo(String id);
Page queryList(Map<String, Object> params);
PcapEntity savePcap(String jobId, Resource fileResource);
PcapEntity savePcap(Resource fileResource,String... params);
void deletePcap(String id);
}

View File

@@ -14,6 +14,7 @@ import net.geedge.asw.module.app.service.IApplicationService;
import net.geedge.asw.module.app.service.IPackageService;
import net.geedge.asw.module.runner.dao.JobDao;
import net.geedge.asw.module.runner.entity.JobEntity;
import net.geedge.asw.module.runner.entity.PcapEntity;
import net.geedge.asw.module.runner.entity.PlaybookEntity;
import net.geedge.asw.module.runner.entity.RunnerEntity;
import net.geedge.asw.module.runner.service.IJobService;
@@ -29,7 +30,6 @@ import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -163,19 +163,22 @@ public class JobServiceImpl extends ServiceImpl<JobDao, JobEntity> implements IJ
@Override
@Transactional(rollbackFor = Exception.class)
public void updateJobResult(String jobId, String state, MultipartFile pcapFile) throws IOException {
// update job status
public void updateJobResult(String jobId, String state, MultipartFile pcapFile) {
String pcapId = T.StrUtil.EMPTY;
// save pcap file
if (T.ObjectUtil.isNotNull(pcapFile)) {
PcapEntity pcapEntity = pcapService.savePcap(jobId, pcapFile.getResource());
pcapId = pcapEntity.getId();
}
// update job status&pcap_id
state = T.StrUtil.equals("success", state) ? RunnerConstant.JobStatus.PASSED.getValue() : state;
this.update(new LambdaUpdateWrapper<JobEntity>()
.set(JobEntity::getStatus, state)
.set(T.StrUtil.isNotEmpty(pcapId), JobEntity::getPcapId, pcapId)
.set(JobEntity::getEndTimestamp, System.currentTimeMillis())
.eq(JobEntity::getId, jobId)
);
// save pcap file
if (T.ObjectUtil.isNotNull(pcapFile)) {
pcapService.savePcap(jobId, pcapFile.getInputStream());
}
}
}

View File

@@ -1,21 +1,172 @@
package net.geedge.asw.module.runner.service.impl;
import cn.dev33.satoken.stp.StpUtil;
import cn.hutool.log.Log;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import net.geedge.asw.common.util.RCode;
import net.geedge.asw.common.util.T;
import net.geedge.asw.module.app.entity.ApplicationEntity;
import net.geedge.asw.module.app.entity.PackageEntity;
import net.geedge.asw.module.app.service.IApplicationService;
import net.geedge.asw.module.app.service.IPackageService;
import net.geedge.asw.module.runner.dao.PcapDao;
import net.geedge.asw.module.runner.entity.JobEntity;
import net.geedge.asw.module.runner.entity.PcapEntity;
import net.geedge.asw.module.runner.entity.PlaybookEntity;
import net.geedge.asw.module.runner.entity.RunnerEntity;
import net.geedge.asw.module.runner.service.IJobService;
import net.geedge.asw.module.runner.service.IPcapService;
import net.geedge.asw.module.runner.service.IPlaybookService;
import net.geedge.asw.module.runner.service.IRunnerService;
import net.geedge.asw.module.runner.util.PcapParserThread;
import net.geedge.asw.module.runner.util.RunnerConstant;
import net.geedge.asw.module.workbook.service.IWorkbookResourceService;
import net.geedge.asw.module.workbook.util.WorkbookConstant;
import org.apache.commons.io.FileUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.Resource;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.InputStream;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Map;
@Service
public class PcapServiceImpl extends ServiceImpl<PcapDao, PcapEntity> implements IPcapService {
private static final Log log = Log.get();
@Autowired
private IJobService jobService;
@Autowired
private IRunnerService runnerService;
@Autowired
private IPlaybookService playbookService;
@Autowired
private IPackageService packageService;
@Autowired
private IApplicationService applicationService;
@Autowired
private IWorkbookResourceService workbookResourceService;
@Override
public PcapEntity queryInfo(String id) {
PcapEntity pcap = this.getById(id);
T.VerifyUtil.is(pcap).notNull(RCode.SYS_RECORD_NOT_FOUND);
JobEntity job = jobService.getOne(new LambdaQueryWrapper<JobEntity>().eq(JobEntity::getPcapId, pcap.getId()));
if (T.ObjectUtil.isNotNull(job)) {
pcap.setJobId(job.getId());
RunnerEntity runner = runnerService.getById(job.getRunnerId());
pcap.setRunner(runner);
PackageEntity pkg = packageService.getById(job.getPackageId());
pcap.setPkg(pkg);
PlaybookEntity playbook = playbookService.getById(job.getPlaybookId());
pcap.setPlaybook(playbook);
if (T.ObjectUtil.isNotNull(playbook)) {
ApplicationEntity application = applicationService.getById(playbook.getAppId());
pcap.setApplication(application);
}
}
return pcap;
}
@Override
public Page queryList(Map<String, Object> params) {
Page page = T.PageUtil.getPage(params);
List<PcapEntity> pcapList = this.getBaseMapper().queryList(page, params);
page.setRecords(pcapList);
return page;
}
@Override
public void savePcap(String jobId, InputStream inputStream) {
// TODO
public PcapEntity savePcap(String jobId, Resource fileResource) {
JobEntity job = jobService.getById(jobId);
return this.savePcap(fileResource, job.getTags(), job.getWorkbookId(), job.getWorkspaceId(), job.getCreateUserId());
}
}
@Override
public PcapEntity savePcap(Resource fileResource, String... params) {
String tags = T.ArrayUtil.get(params, 0);
String workbookId = T.ArrayUtil.get(params, 1);
String workspaceId = T.ArrayUtil.get(params, 2);
String createUserId = T.StrUtil.emptyToDefault(T.ArrayUtil.get(params, 3), StpUtil.getLoginIdAsString());
PcapEntity entity = new PcapEntity();
try {
entity.setName(fileResource.getFilename());
entity.setTags(T.StrUtil.emptyToDefault(tags, ""));
byte[] bytes = fileResource.getInputStream().readAllBytes();
entity.setSize((long) bytes.length);
entity.setStatus(RunnerConstant.PcapStatus.UPLOADED.getValue());
entity.setCreateTimestamp(System.currentTimeMillis());
entity.setCreateUserId(createUserId);
entity.setWorkspaceId(workspaceId);
// path
File destination = T.FileUtil.file(T.WebPathUtil.getRootPath(), workspaceId, fileResource.getFilename());
FileUtils.copyInputStreamToFile(fileResource.getInputStream(), destination);
entity.setPath(destination.getPath());
// md5
String md5Hex = T.DigestUtil.md5Hex(destination);
entity.setMd5(md5Hex);
// 根据文件 md5值 判断是否已上存在,存在则响应当前实体
PcapEntity findPcapByMd5 = this.getOne(new LambdaQueryWrapper<PcapEntity>().eq(PcapEntity::getMd5, md5Hex));
if (T.ObjectUtil.isNotNull(findPcapByMd5)) {
// 删除本次记录的文件
T.FileUtil.del(destination);
return findPcapByMd5;
}
// save
this.save(entity);
// workbook resource
workbookResourceService.saveResource(workbookId, entity.getId(), WorkbookConstant.ResourceType.PCAP.getValue());
// parser
PcapParserThread pcapParserThread = new PcapParserThread();
pcapParserThread.setPcapEntity(entity);
T.ThreadUtil.execAsync(pcapParserThread);
} catch (IOException e) {
log.error(e, "[savePcap] [error] [workspaceId: {}]", workspaceId);
}
return entity;
}
@Override
@Transactional(rollbackFor = Exception.class)
public void deletePcap(String pcapId) {
PcapEntity pcap = this.getById(pcapId);
// remove file
T.FileUtil.del(pcap.getPath());
// remove
this.removeById(pcapId);
// update job pcap_id
jobService.update(new LambdaUpdateWrapper<JobEntity>()
.set(JobEntity::getPcapId, "")
.eq(JobEntity::getPcapId, pcapId)
);
}
}

View File

@@ -0,0 +1,80 @@
package net.geedge.asw.module.runner.util;
import cn.hutool.log.Log;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import lombok.Data;
import net.geedge.asw.common.config.SpringContextUtils;
import net.geedge.asw.common.util.T;
import net.geedge.asw.module.runner.entity.PcapEntity;
import net.geedge.asw.module.runner.service.IPcapService;
import java.io.File;
@Data
public class PcapParserThread implements Runnable {
private Log log = Log.get();
private PcapEntity pcapEntity;
private IPcapService pcapService;
private void init() {
pcapService = SpringContextUtils.getBean(IPcapService.class);
// analyzing
this.updatePcapStatus(RunnerConstant.PcapStatus.ANALYZING.getValue());
}
@Override
public void run() {
Thread.currentThread().setName("pcap-parser-thread-" + pcapEntity.getId());
log.info("job pcap parser start");
if (log.isDebugEnabled()) {
log.debug("pcapInfo: {}", T.JSONUtil.toJsonStr(pcapEntity));
}
try {
log.info("job pcap parser run start");
// init
this.init();
// parser
this.parser();
log.info("job pcap parser run end");
} catch (Exception e) {
log.error(e, "job pcap parser error, pcap: {}", pcapEntity.getId());
} finally {
// completed
this.updatePcapStatus(RunnerConstant.PcapStatus.COMPLETED.getValue());
log.info("job pcap parser end");
}
}
/**
* parser
*/
private void parser() {
String id = pcapEntity.getId();
String path = pcapEntity.getPath();
SignatureExtract signatureExtract = new SignatureExtract(id, path);
// signature
String signature = signatureExtract.signature();
// 保存结果,和 pcap 文件同目录文件名pcap_id_signature.json
String parentPath = T.FileUtil.getParent(path, 1);
File signatureFile = T.FileUtil.file(parentPath, id + "_signature.json");
T.FileUtil.del(signatureFile);
T.FileUtil.writeUtf8String(signature, signatureFile);
// TODO
}
/**
* update pcap status
*
* @param status
*/
private void updatePcapStatus(String status) {
pcapService.update(new LambdaUpdateWrapper<PcapEntity>()
.set(PcapEntity::getStatus, status)
.eq(PcapEntity::getId, pcapEntity.getId())
);
}
}

View File

@@ -0,0 +1,179 @@
package net.geedge.asw.module.runner.util;
import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONConfig;
import cn.hutool.log.Log;
import lombok.Data;
import lombok.NoArgsConstructor;
import net.geedge.asw.common.config.SpringContextUtils;
import net.geedge.asw.common.util.ASWException;
import net.geedge.asw.common.util.T;
import org.apache.commons.lang3.time.StopWatch;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
@Data
@NoArgsConstructor
public class SignatureExtract {
private static final Log log = Log.get();
private String id;
private String path;
private String tsharkPath = "/usr/bin/tshark";
public SignatureExtract(String id, String path) {
this.id = id;
this.path = path;
}
/**
* signature
*
* @return
*/
public String signature() {
log.info("[signature] [begin] [id: {}] [path: {}]", id, path);
StopWatch sw = new StopWatch();
sw.start();
try {
this.tsharkPath = SpringContextUtils.getProperty("tshark.path", "/usr/bin/tshark");
log.info("[signature] [tshark path: {}]", this.tsharkPath);
SignatureUtil signatureObject = new SignatureUtil(this.tsharkPath);
signatureObject.getStreamSignatureFromTshrak(path);
List<Map<String, String>> allFrameSignatureDictList = signatureObject.getOutputDictList();
// Get basic information of TCP data streams
List<Map<String, Object>> tcpStreamBasicInfoList = SignatureUtil.getTCPStreamBaseInfo(allFrameSignatureDictList);
List<Map<String, Object>> tcpStreamAllInfoList = T.ListUtil.list(false, tcpStreamBasicInfoList);
// Get other information of TCP data streams
// Processing data stream by stream
for (int i = 0; i < tcpStreamAllInfoList.size(); i++) {
String streamID = T.MapUtil.getStr(tcpStreamAllInfoList.get(i), "StreamID");
// Get all the Frame IDs of the data stream
List<Map<String, String>> tcpFrameSignatureList = signatureObject.getOneTcpFrameSignatureList(streamID);
// Merge signature information from all Frame IDs
// TCP data flow analysis
this.tcpDataFlowAnalysis(signatureObject, tcpStreamAllInfoList.get(i), tcpFrameSignatureList);
// General data flow analysis (common, ip, dns, http, ssl)
this.generalDataFlowAnalysis(signatureObject, tcpStreamAllInfoList.get(i), tcpFrameSignatureList);
}
// Get basic information of UDP data streams
List<Map<String, Object>> udpStreamBaseInfo = SignatureUtil.getUDPStreamBaseInfo(allFrameSignatureDictList);
List<Map<String, Object>> udpStreamAllInfoList = T.ListUtil.list(false, udpStreamBaseInfo);
// Get other information of UDP data streams
// Processing data stream by stream
for (int i = 0; i < udpStreamAllInfoList.size(); i++) {
String streamID = T.MapUtil.getStr(udpStreamAllInfoList.get(i), "StreamID");
// Get all the Frame IDs of the data stream
List<Map<String, String>> udpFrameSignatureList = signatureObject.getOneUdpFrameSignatureList(streamID);
// Merge signature information from all Frame IDs
// UDP data flow analysis
this.udpDataFlowAnalysis(signatureObject, udpStreamAllInfoList.get(i), udpFrameSignatureList);
// General data flow analysis (common, ip, dns, http, ssl)
this.generalDataFlowAnalysis(signatureObject, udpStreamAllInfoList.get(i), udpFrameSignatureList);
}
// result
List<Object> resultOutputDict = T.ListUtil.list(true);
resultOutputDict.addAll(tcpStreamAllInfoList);
resultOutputDict.addAll(udpStreamAllInfoList);
JSONConfig jsonConfig = new JSONConfig();
jsonConfig.setKeyComparator(Comparator.comparing(String::toString));
JSONArray jsonArray = new JSONArray(resultOutputDict, jsonConfig);
return jsonArray.toJSONString(0);
} catch (Exception e) {
log.error(e, "[signature] [error] [id: {}] [path: {}]", id, path);
throw new ASWException("pcap file parse error. pcap id: " + id);
} finally {
sw.stop();
log.info("[signature] [finshed] [id: {}] [Run Time: {}]", id, sw.toString());
}
}
/**
* data field
*
* @param signatureObject
* @param streamDict
* @param frameSignatureList
*/
private void generalDataFlowAnalysis(SignatureUtil signatureObject, Map<String, Object> streamDict, List<Map<String, String>> frameSignatureList) {
// common
streamDict.put("common.server_fqdn", signatureObject.ssl_extensions_server_name(frameSignatureList));
streamDict.put("common.app_id", new String[]{"unknow"});
if (T.MapUtil.getStr(frameSignatureList.get(0), "ip.proto").equals("6")) {
streamDict.put("srcport", signatureObject.tcp_srcport(frameSignatureList));
streamDict.put("dstport", signatureObject.tcp_dstport(frameSignatureList));
} else {
streamDict.put("srcport", signatureObject.udp_srcport(frameSignatureList));
streamDict.put("dstport", signatureObject.udp_dstport(frameSignatureList));
}
// ip
streamDict.put("ip.src", signatureObject.ip_src(frameSignatureList));
streamDict.put("ip.dst", signatureObject.ip_dst(frameSignatureList));
streamDict.put("ip.proto", signatureObject.ip_proto(frameSignatureList));
streamDict.put("heartbeat_flag", signatureObject.heartbeat_flag(frameSignatureList));
// dns
streamDict.put("dns.qry.name", signatureObject.dns_qry_name(frameSignatureList));
// http
streamDict.put("http.request.full_uri", signatureObject.http_request_full_uri(frameSignatureList));
streamDict.put("http.request.header", signatureObject.http_request_header(frameSignatureList));
streamDict.put("http.response.header", signatureObject.http_response_header(frameSignatureList));
// ssl
streamDict.put("ssl.handshake.certificate.algorithm_identifier", signatureObject.ssl_algorithm_identifier(frameSignatureList));
streamDict.put("ssl.handshake.certificate.serial_number", signatureObject.ssl_serial_number(frameSignatureList));
streamDict.put("ssl.handshake.certificate.issuer_common_name", signatureObject.ssl_issuer_common_name(frameSignatureList));
streamDict.put("ssl.handshake.certificate.issuer_organization_name", signatureObject.ssl_issuer_organization_name(frameSignatureList));
streamDict.put("ssl.handshake.certificate.issuer_country_name", signatureObject.ssl_issuer_country_name(frameSignatureList));
streamDict.put("ssl.handshake.certificate.subject_common_name", signatureObject.ssl_subject_common_name(frameSignatureList));
streamDict.put("ssl.handshake.certificate.subject_organization_name", signatureObject.ssl_subject_organization_name(frameSignatureList));
streamDict.put("ssl.handshake.certificate.subject_country_name", signatureObject.ssl_subject_country_name(frameSignatureList));
streamDict.put("ssl.handshake.certificate.not_valid_before", signatureObject.ssl_not_valid_before(frameSignatureList));
streamDict.put("ssl.handshake.certificate.not_valid_after", signatureObject.ssl_not_valid_after(frameSignatureList));
streamDict.put("ssl.handshake.certificate.algorithm_id", signatureObject.ssl_algorithm_id(frameSignatureList));
streamDict.put("ssl.analysis.ja3", signatureObject.ssl_ja3(frameSignatureList));
streamDict.put("ssl.analysis.sni_absent", signatureObject.ssl_sni_absent(frameSignatureList));
streamDict.put("ssl.analysis.ech_enabled", signatureObject.ssl_ech_enabled(frameSignatureList));
streamDict.put("ssl.analysis.esni_enabled", signatureObject.ssl_analysis_esni_enabled(frameSignatureList));
}
/**
* udp
*
* @param signatureObject
* @param streamDict
* @param frameSignatureList
*/
private void udpDataFlowAnalysis(SignatureUtil signatureObject, Map<String, Object> streamDict, List<Map<String, String>> frameSignatureList) {
streamDict.put("udp.payload.c2s_first_data", signatureObject.udp_c2s_first_data(frameSignatureList));
streamDict.put("udp.payload.s2c_first_data", signatureObject.udp_s2c_first_data(frameSignatureList));
streamDict.put("udp.payload.c2s_first_data_len", signatureObject.udp_c2s_first_data_len(frameSignatureList));
streamDict.put("udp.payload.s2c_first_data_len", signatureObject.udp_s2c_first_data_len(frameSignatureList));
streamDict.put("udp.payload", signatureObject.udp_get_payload(frameSignatureList));
}
/**
* tcp
*
* @param signatureObject
* @param streamDict
* @param frameSignatureList
*/
private void tcpDataFlowAnalysis(SignatureUtil signatureObject, Map<String, Object> streamDict, List<Map<String, String>> frameSignatureList) {
streamDict.put("tcp.payload.c2s_first_data", signatureObject.tcp_c2s_first_data(frameSignatureList));
streamDict.put("tcp.payload.s2c_first_data", signatureObject.tcp_s2c_first_data(frameSignatureList));
streamDict.put("tcp.payload.c2s_first_data_len", signatureObject.tcp_c2s_first_data_len(frameSignatureList));
streamDict.put("tcp.payload.s2c_first_data_len", signatureObject.tcp_s2c_first_data_len(frameSignatureList));
streamDict.put("tcp.payload", signatureObject.tcp_get_payload(frameSignatureList));
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -5,7 +5,6 @@
<resultMap type="net.geedge.asw.module.runner.entity.JobEntity" id="jobResultMap">
<id property="id" column="id"/>
<result property="id" column="id"/>
<result property="playbookId" column="playbook_id"/>
<result property="packageId" column="package_id"/>
<result property="runnerId" column="runner_id"/>

View File

@@ -0,0 +1,126 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="net.geedge.asw.module.runner.dao.PcapDao">
<resultMap type="net.geedge.asw.module.runner.entity.PcapEntity" id="pcapResultMap">
<id property="id" column="id"/>
<result property="name" column="name"/>
<result property="tags" column="tags"/>
<result property="description" column="description"/>
<result property="path" column="path"/>
<result property="size" column="size"/>
<result property="connections" column="connections"/>
<result property="hosts" column="hosts"/>
<result property="md5" column="md5"/>
<result property="connectionTimeFirst" column="connection_time_first"/>
<result property="connectionTimeLast" column="connection_time_last"/>
<result property="protocols" column="protocols"/>
<result property="status" column="status"/>
<result property="createTimestamp" column="create_timestamp"/>
<result property="createUserId" column="create_user_id"/>
<result property="workspaceId" column="workspace_id"/>
<result property="jobId" column="jobId"/>
<association property="application" columnPrefix="app_" javaType="net.geedge.asw.module.app.entity.ApplicationEntity">
<id property="id" column="id"/>
<result property="name" column="name"/>
</association>
<association property="pkg" columnPrefix="pkg_" javaType="net.geedge.asw.module.app.entity.PackageEntity">
<id property="id" column="id"/>
<result property="platform" column="platform"/>
<result property="identifier" column="identifier"/>
<result property="version" column="version"/>
<result property="logo" column="logo"/>
</association>
<association property="runner" columnPrefix="run_" javaType="net.geedge.asw.module.runner.entity.RunnerEntity">
<id property="id" column="id"/>
<result property="name" column="name"/>
</association>
<association property="playbook" columnPrefix="pb_" javaType="net.geedge.asw.module.runner.entity.PlaybookEntity">
<id property="id" column="id"/>
<result property="name" column="name"/>
</association>
</resultMap>
<select id="queryList" resultMap="pcapResultMap">
SELECT
pcap.*,
job.id AS jobId,
app.id AS app_id,
app.name AS app_name,
pkg.id AS pkg_id,
pkg.platform AS pkg_platform,
pkg.version AS pkg_version,
pkg.logo AS pkg_logo,
pkg.identifier AS pkg_identifier,
run.id AS run_id,
run.name AS run_name,
pb.id AS pb_id,
pb.name AS pb_name
FROM
pcap pcap
left join job job on pcap.id = job.pcap_id
LEFT JOIN runner run ON job.runner_id = run.id
LEFT JOIN package pkg ON job.package_id = pkg.id
LEFT JOIN playbook pb ON job.playbook_id = pb.id
LEFT JOIN application app ON pb.app_id = app.id
LEFT JOIN workbook_resource wr ON pcap.id = wr.resource_id AND wr.resource_type = 'pcap'
<where>
<if test="params.ids != null and params.ids != ''">
pcap.id in
<foreach item="id" collection="params.ids.split(',')" separator="," open="(" close=")">#{id}</foreach>
</if>
<if test="params.jobIds != null and params.jobIds != ''">
AND job.id in
<foreach item="id" collection="params.jobIds.split(',')" separator="," open="(" close=")">#{id}</foreach>
</if>
<if test="params.appIds != null and params.appIds != ''">
AND app.id in
<foreach item="id" collection="params.appIds.split(',')" separator="," open="(" close=")">#{id}</foreach>
</if>
<if test="params.packageIds != null and params.packageIds != ''">
AND pkg.id in
<foreach item="id" collection="params.packageIds.split(',')" separator="," open="(" close=")">#{id}</foreach>
</if>
<if test="params.runnerIds != null and params.runnerIds != ''">
AND run.id in
<foreach item="id" collection="params.runnerIds.split(',')" separator="," open="(" close=")">#{id}</foreach>
</if>
<if test="params.playbooks != null and params.playbooks != ''">
AND pb.id in
<foreach item="id" collection="params.playbooks.split(',')" separator="," open="(" close=")">#{id}</foreach>
</if>
<if test="params.workbookId != null and params.workbookId != ''">
AND wr.workbook_id = #{params.workbookId}
</if>
<if test="params.workspaceId != null and params.workspaceId != ''">
AND pcap.workspace_id = #{params.workspaceId}
</if>
</where>
GROUP BY
pcap.id
<if test="params.orderBy == null or params.orderBy == ''">
ORDER BY pcap.id
</if>
</select>
</mapper>

View File

@@ -224,8 +224,8 @@ CREATE TABLE `pcap` (
`connections` bigint(20) NOT NULL DEFAULT 0 COMMENT '连接数量',
`hosts` bigint(20) NOT NULL DEFAULT 0 COMMENT 'IP数量',
`md5` varchar(64) NOT NULL DEFAULT '' COMMENT '摘要值,根据文件md5值判断是否已上存在存在则响应当前id',
`connection_time_first` bigint(20) NOT NULL DEFAULT (UNIX_TIMESTAMP(NOW()) * 1000) COMMENT '连接开始时间',
`connection_time_last` bigint(20) NOT NULL DEFAULT (UNIX_TIMESTAMP(NOW()) * 1000) COMMENT '连接结束时间',
`connection_time_first` bigint(20) NOT NULL DEFAULT -1 COMMENT '连接开始时间',
`connection_time_last` bigint(20) NOT NULL DEFAULT -1 COMMENT '连接结束时间',
`protocols` varchar(64) NOT NULL DEFAULT '' COMMENT '包含的协议,多个逗号分隔',
`status` varchar(64) NOT NULL DEFAULT '' COMMENT '状态,可选值 UploadedAnalyzingCompleted',
`create_timestamp` bigint(20) NOT NULL COMMENT '创建时间戳',