feat: 调整 pcap 增删改查接口

This commit is contained in:
shizhendong
2024-07-30 13:53:59 +08:00
parent 4f8084eaf8
commit 4a2d7f9adf
14 changed files with 90 additions and 117 deletions

View File

@@ -1,6 +1,7 @@
package net.geedge.asw.module.runner.controller;
import cn.hutool.log.Log;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import net.geedge.asw.common.util.R;
import net.geedge.asw.common.util.RCode;
@@ -8,10 +9,12 @@ import net.geedge.asw.common.util.T;
import net.geedge.asw.module.runner.entity.PcapEntity;
import net.geedge.asw.module.runner.service.IPcapService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.util.List;
import java.util.Map;
@RestController
@@ -39,20 +42,63 @@ public class PcapController {
}
@PostMapping
public R add(@RequestParam(value = "file", required = true) MultipartFile file,
@RequestParam(required = false) String tags,
@Transactional(rollbackFor = Exception.class)
public R add(@RequestParam(value = "files", required = true) List<MultipartFile> fileList,
@RequestParam(value = "descriptions", required = false) List<String> descriptionList,
@RequestParam(required = false) String workbookId,
@RequestParam(required = false) String workspaceId) throws IOException {
T.VerifyUtil.is(workspaceId).notEmpty(RCode.WORKSPACE_ID_CANNOT_EMPTY);
PcapEntity pcapEntity = pcapService.savePcap(file.getResource(), tags, workbookId, workspaceId);
return R.ok().putData("id", pcapEntity.getId());
List<Object> recordList = T.ListUtil.list(true);
for (int i = 0; i < fileList.size(); i++) {
MultipartFile file = fileList.get(i);
String description = T.StrUtil.emptyToDefault(T.CollUtil.get(descriptionList, i), "");
PcapEntity pcapEntity = pcapService.savePcap(file.getResource(), description, workbookId, workspaceId);
recordList.add(
T.MapUtil.builder()
.put("id", pcapEntity.getId())
.build()
);
}
return R.ok().putData("records", recordList);
}
@DeleteMapping("/{id}")
public R delete(@PathVariable("id") String id) {
pcapService.deletePcap(id);
@PutMapping
@Transactional(rollbackFor = Exception.class)
public R update(@RequestBody List<Map<String, String>> body) {
List<Object> recordList = T.ListUtil.list(true);
for (Map<String, String> map : body) {
String id = T.MapUtil.getStr(map, "id", "");
if (T.StrUtil.isEmpty(id)) {
continue;
}
String description = T.MapUtil.getStr(map, "description", "");
pcapService.update(new LambdaUpdateWrapper<PcapEntity>()
.eq(PcapEntity::getId, id)
.set(PcapEntity::getDescription, description)
);
recordList.add(
T.MapUtil.builder()
.put("id", id)
.build()
);
}
return R.ok().putData("records", recordList);
}
@DeleteMapping
public R delete(String[] ids) {
T.VerifyUtil.is(ids).notEmpty();
pcapService.deletePcap(ids);
return R.ok();
}
@PutMapping("/parse2session")
public R parse2session(String[] ids) {
T.VerifyUtil.is(ids).notEmpty();
// pcapService.parse2session(ids);
return R.ok();
}
}

View File

@@ -1,10 +0,0 @@
package net.geedge.asw.module.runner.dao;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import net.geedge.asw.module.runner.entity.DecodeRecordEntity;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface DecodeRecordDao extends BaseMapper<DecodeRecordEntity>{
}

View File

@@ -1,19 +0,0 @@
package net.geedge.asw.module.runner.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
@Data
@TableName("decode_record")
public class DecodeRecordEntity {
@TableId(type = IdType.ASSIGN_UUID)
private String id;
private String pcapId;
private Long streamId;
private String streamAttributes;
private String workspaceId;
}

View File

@@ -16,16 +16,10 @@ public class PcapEntity {
@TableId(type = IdType.ASSIGN_UUID)
private String id;
private String name;
private String tags;
private String description;
private String path;
private Long size;
private Long connections;
private Long hosts;
private String md5;
private Long connectionTimeFirst;
private Long connectionTimeLast;
private String protocols;
private String status;
private Long createTimestamp;
private String createUserId;

View File

@@ -1,8 +0,0 @@
package net.geedge.asw.module.runner.service;
import com.baomidou.mybatisplus.extension.service.IService;
import net.geedge.asw.module.runner.entity.DecodeRecordEntity;
public interface IDecodeRecordService extends IService<DecodeRecordEntity>{
}

View File

@@ -17,6 +17,8 @@ public interface IPcapService extends IService<PcapEntity>{
PcapEntity savePcap(Resource fileResource,String... params);
void deletePcap(String id);
void deletePcap(String... ids);
void parse2session(String... ids);
}

View File

@@ -1,13 +0,0 @@
package net.geedge.asw.module.runner.service.impl;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import net.geedge.asw.module.runner.dao.DecodeRecordDao;
import net.geedge.asw.module.runner.entity.DecodeRecordEntity;
import net.geedge.asw.module.runner.service.IDecodeRecordService;
import org.springframework.stereotype.Service;
@Service
public class DecodeRecordServiceImpl extends ServiceImpl<DecodeRecordDao, DecodeRecordEntity> implements IDecodeRecordService {
}

View File

@@ -6,6 +6,7 @@ import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import net.geedge.asw.common.util.ASWException;
import net.geedge.asw.common.util.RCode;
import net.geedge.asw.common.util.T;
import net.geedge.asw.module.app.entity.ApplicationEntity;
@@ -21,7 +22,6 @@ import net.geedge.asw.module.runner.service.IJobService;
import net.geedge.asw.module.runner.service.IPcapService;
import net.geedge.asw.module.runner.service.IPlaybookService;
import net.geedge.asw.module.runner.service.IRunnerService;
import net.geedge.asw.module.runner.util.PcapParserThread;
import net.geedge.asw.module.runner.util.RunnerConstant;
import net.geedge.asw.module.workbook.service.IWorkbookResourceService;
import net.geedge.asw.module.workbook.util.WorkbookConstant;
@@ -101,7 +101,7 @@ public class PcapServiceImpl extends ServiceImpl<PcapDao, PcapEntity> implements
@Override
public PcapEntity savePcap(Resource fileResource, String... params) {
String tags = T.ArrayUtil.get(params, 0);
String description = T.ArrayUtil.get(params, 0);
String workbookId = T.ArrayUtil.get(params, 1);
String workspaceId = T.ArrayUtil.get(params, 2);
String createUserId = T.StrUtil.emptyToDefault(T.ArrayUtil.get(params, 3), StpUtil.getLoginIdAsString());
@@ -109,7 +109,7 @@ public class PcapServiceImpl extends ServiceImpl<PcapDao, PcapEntity> implements
PcapEntity entity = new PcapEntity();
try {
entity.setName(fileResource.getFilename());
entity.setTags(T.StrUtil.emptyToDefault(tags, ""));
entity.setDescription(description);
byte[] bytes = fileResource.getInputStream().readAllBytes();
entity.setSize((long) bytes.length);
@@ -141,32 +141,35 @@ public class PcapServiceImpl extends ServiceImpl<PcapDao, PcapEntity> implements
// workbook resource
workbookResourceService.saveResource(workbookId, entity.getId(), WorkbookConstant.ResourceType.PCAP.getValue());
// parser
PcapParserThread pcapParserThread = new PcapParserThread();
pcapParserThread.setPcapEntity(entity);
T.ThreadUtil.execAsync(pcapParserThread);
} catch (IOException e) {
log.error(e, "[savePcap] [error] [workspaceId: {}]", workspaceId);
throw new ASWException(RCode.ERROR);
}
return entity;
}
@Override
@Transactional(rollbackFor = Exception.class)
public void deletePcap(String pcapId) {
PcapEntity pcap = this.getById(pcapId);
public void deletePcap(String... ids) {
for (String id : ids) {
PcapEntity pcap = this.getById(id);
// remove file
T.FileUtil.del(pcap.getPath());
// remove
this.removeById(pcapId);
this.removeById(id);
// update job pcap_id
jobService.update(new LambdaUpdateWrapper<JobEntity>()
.set(JobEntity::getPcapId, "")
.eq(JobEntity::getPcapId, pcapId)
.eq(JobEntity::getPcapId, id)
);
}
}
@Override
public void parse2session(String... ids) {
}
}

View File

@@ -36,9 +36,11 @@ public class RunnerConstant {
public enum PcapStatus {
UPLOADED("Uploaded"),
ANALYZING("Analyzing"),
PARSING("Parsing"),
COMPLETED("Completed");
INDEXED("Indexed"),
ERROR("Error");
private String value;

View File

@@ -15,6 +15,7 @@ import java.util.List;
import java.util.Map;
@Data
@Deprecated
@NoArgsConstructor
public class SignatureExtract {

View File

@@ -23,6 +23,7 @@ import java.util.stream.Collectors;
* tshark 解析 pcap 文件
*/
@Data
@Deprecated
public class SignatureUtil {
private static final Log log = Log.get();

View File

@@ -12,8 +12,7 @@ public class WorkbookConstant {
PACKAGE("package"),
SIGNATURE("signature"),
JOB("job"),
PCAP("pcap"),
DECODE_RECORD("decode_record");
PCAP("pcap");
private String value;

View File

@@ -6,16 +6,10 @@
<resultMap type="net.geedge.asw.module.runner.entity.PcapEntity" id="pcapResultMap">
<id property="id" column="id"/>
<result property="name" column="name"/>
<result property="tags" column="tags"/>
<result property="description" column="description"/>
<result property="path" column="path"/>
<result property="size" column="size"/>
<result property="connections" column="connections"/>
<result property="hosts" column="hosts"/>
<result property="md5" column="md5"/>
<result property="connectionTimeFirst" column="connection_time_first"/>
<result property="connectionTimeLast" column="connection_time_last"/>
<result property="protocols" column="protocols"/>
<result property="status" column="status"/>
<result property="createTimestamp" column="create_timestamp"/>
<result property="createUserId" column="create_user_id"/>
@@ -81,6 +75,10 @@
<foreach item="id" collection="params.ids.split(',')" separator="," open="(" close=")">#{id}</foreach>
</if>
<if test="params.q != null and params.q != ''">
AND ( locate(#{params.q}, pcap.name) OR locate(#{params.q}, pcap.description) )
</if>
<if test="params.jobIds != null and params.jobIds != ''">
AND job.id in
<foreach item="id" collection="params.jobIds.split(',')" separator="," open="(" close=")">#{id}</foreach>

View File

@@ -217,16 +217,10 @@ DROP TABLE IF EXISTS `pcap`;
CREATE TABLE `pcap` (
`id` varchar(64) NOT NULL COMMENT '主键',
`name` varchar(256) NOT NULL DEFAULT '' COMMENT '文件名称',
`tags` varchar(256) NOT NULL DEFAULT '' COMMENT '标签',
`description` text NOT NULL DEFAULT '' COMMENT '描述信息',
`path` varchar(64) NOT NULL DEFAULT '' COMMENT 'PCAP文件路径',
`size` bigint(20) NOT NULL DEFAULT 0 COMMENT '文件大小',
`connections` bigint(20) NOT NULL DEFAULT 0 COMMENT '连接数量',
`hosts` bigint(20) NOT NULL DEFAULT 0 COMMENT 'IP数量',
`md5` varchar(64) NOT NULL DEFAULT '' COMMENT '摘要值,根据文件md5值判断是否已上存在存在则响应当前id',
`connection_time_first` bigint(20) NOT NULL DEFAULT -1 COMMENT '连接开始时间',
`connection_time_last` bigint(20) NOT NULL DEFAULT -1 COMMENT '连接结束时间',
`protocols` varchar(64) NOT NULL DEFAULT '' COMMENT '包含的协议,多个逗号分隔',
`status` varchar(64) NOT NULL DEFAULT '' COMMENT '状态,可选值 UploadedAnalyzingCompleted',
`create_timestamp` bigint(20) NOT NULL COMMENT '创建时间戳',
`create_user_id` varchar(64) NOT NULL COMMENT '创建人',
@@ -234,23 +228,6 @@ CREATE TABLE `pcap` (
PRIMARY KEY (`id`) USING BTREE,
KEY `idx_name` (`name`) USING BTREE,
KEY `idx_md5` (`md5`) USING BTREE,
KEY `idx_tags` (`tags`) USING BTREE,
KEY `idx_workspace_id` (`workspace_id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/**
* 新增 decode_record 表
*/
DROP TABLE IF EXISTS `decode_record`;
CREATE TABLE `decode_record` (
`id` varchar(64) NOT NULL COMMENT '主键',
`pcap_id` varchar(64) NOT NULL DEFAULT '' COMMENT 'PCAP文件ID',
`stream_id` bigint(20) NOT NULL DEFAULT 0 COMMENT '流ID',
`stream_attributes` text NOT NULL DEFAULT '' COMMENT '流属性',
`workspace_id` varchar(64) NOT NULL DEFAULT '' COMMENT '工作空间ID',
PRIMARY KEY (`id`) USING BTREE,
KEY `idx_pcap_id` (`pcap_id`) USING BTREE,
KEY `idx_stream_id` (`stream_id`) USING BTREE,
KEY `idx_workspace_id` (`workspace_id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
@@ -359,7 +336,7 @@ DROP TABLE IF EXISTS `workbook_resource`;
CREATE TABLE `workbook_resource` (
`id` varchar(64) NOT NULL COMMENT '主键',
`workbook_id` varchar(64) NOT NULL COMMENT 'workbook ID',
`resource_type` varchar(64) NOT NULL COMMENT '资源类型 可选值package,signature,job,pcap,decode_record',
`resource_type` varchar(64) NOT NULL COMMENT '资源类型 可选值package,signature,job,pcap',
`resource_id` varchar(64) NOT NULL COMMENT '资源id',
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;