8 Commits

Author SHA1 Message Date
shizhendong
04ddd58d02 feat: ASW-205 新增 application rename 接口 2024-11-28 15:37:07 +08:00
zhangshuai
3611f9226c feat: ASW-199 调整 cron 校验 2024-11-28 15:02:42 +08:00
zhangshuai
0c5c22f567 feat: ASW-199 rcode 补充提交 2024-11-28 14:55:49 +08:00
zhangshuai
e91b0639ce feat: ASW-199 job 接口开发 2024-11-28 14:33:59 +08:00
shizhendong
feedb7a4b4 feat: 补充 workspace_delete 菜单按钮 2024-11-28 14:20:38 +08:00
zhangshuai
28909428fa fix: 修复job 取消后未删除队列中job 2024-11-28 10:30:49 +08:00
zhangshuai
b398e32f2d fix: job 添加job_run和job_edit按钮 2024-11-27 15:09:02 +08:00
zhangshuai
7cafa08aee fix: 修复下载 pcap 文件名称错误 2024-11-26 16:13:47 +08:00
50 changed files with 830 additions and 539 deletions

View File

@@ -14,10 +14,10 @@ import net.geedge.asw.module.environment.entity.EnvironmentEntity;
import net.geedge.asw.module.environment.entity.EnvironmentSessionEntity;
import net.geedge.asw.module.environment.service.IEnvironmentService;
import net.geedge.asw.module.environment.service.IEnvironmentSessionService;
import net.geedge.asw.module.runner.entity.JobEntity;
import net.geedge.asw.module.runner.util.JobQueueManager;
import net.geedge.asw.module.runner.service.IJobService;
import net.geedge.asw.module.runner.util.RunnerConstant;
import net.geedge.asw.module.job.entity.JobEntity;
import net.geedge.asw.module.job.util.JobQueueManager;
import net.geedge.asw.module.job.service.IJobService;
import net.geedge.asw.module.job.util.JobConstant;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.stereotype.Component;
@@ -47,13 +47,13 @@ public class SetupRunner implements CommandLineRunner{
@Override
public void run(String... args) throws Exception {
log.info("Setup inited");
List<JobEntity> pendingJobs = jobService.list(new LambdaQueryWrapper<JobEntity>().eq(JobEntity::getStatus, RunnerConstant.JobStatus.PENDING.getValue()));
List<JobEntity> pendingJobs = jobService.list(new LambdaQueryWrapper<JobEntity>().eq(JobEntity::getStatus, JobConstant.JobStatus.PENDING.getValue()));
pendingJobs.forEach(jobQueueManager::addJob);
log.info("[SetupRunner] [init pending job to JobQueueManager]");
log.info("[SetupRunner] [begin interrupted running job]");
List<JobEntity> runningJobs = jobService.list(new LambdaQueryWrapper<JobEntity>().eq(JobEntity::getStatus, RunnerConstant.JobStatus.RUNNING.getValue()));
List<JobEntity> runningJobs = jobService.list(new LambdaQueryWrapper<JobEntity>().eq(JobEntity::getStatus, JobConstant.JobStatus.RUNNING.getValue()));
for (JobEntity runningJob : runningJobs) {
String id = runningJob.getId();
EnvironmentEntity environment = environmentService.getById(runningJob.getEnvId());
@@ -71,7 +71,7 @@ public class SetupRunner implements CommandLineRunner{
JSONObject result = T.JSONUtil.toBean(body, JSONObject.class);
JSONObject data = result.getJSONObject("data");
String status = data.getStr("status");
if (RunnerConstant.JobStatus.RUNNING.getValue().equals(status)){
if (JobConstant.JobStatus.RUNNING.getValue().equals(status)){
HttpRequest request = T.HttpUtil.createRequest(Method.DELETE, String.format("%s/api/v1/env/playbook/%s", url, runningJob.getId()));
request.header("Authorization", token);
request.execute();

View File

@@ -4,8 +4,8 @@ import cn.hutool.log.Log;
import jakarta.annotation.PostConstruct;
import net.geedge.asw.common.util.T;
import net.geedge.asw.module.environment.job.JobEnvironmentStatusChecker;
import net.geedge.asw.module.runner.job.JobPlaybookExecResultChecker;
import net.geedge.asw.module.runner.job.JobPlaybookExecutor;
import net.geedge.asw.module.job.job.JobPlaybookExecResultChecker;
import net.geedge.asw.module.job.job.JobPlaybookExecutor;
import net.geedge.asw.module.sys.service.ISysConfigService;
import org.quartz.*;
import org.springframework.beans.factory.annotation.Autowired;

View File

@@ -111,6 +111,16 @@ public class Constants {
*/
public static final List<String> ANDROID_PACKAGE_TYPE_LIST = T.ListUtil.of("xapk", "apk");
/**
* job cfg type
*/
public static final List<String> JOB_CFG_TYPE_LIST= T.ListUtil.of("asap", "cron");
/**
* job cfg status
*/
public static final List<String> JOB_CFG_STATUS_LIST= T.ListUtil.of("enabled", "disabled");
public static final String EMPTY_FILE_MD5 = "d41d8cd98f00b204e9800998ecf8427e";
/**

View File

@@ -106,6 +106,18 @@ public enum RCode {
ENVIRONMENT_ID_CANNOT_EMPTY(601005, "environment id cannot be empty"),
//job
JOB_CFG_TYPE_CANNOT_EMPTY(701001, "Job configuration type cannot be empty"),
JOB_CFG_STATUS_CANNOT_EMPTY(701002, "Job configuration status cannot be empty"),
JOB_CFG_STATUS_ERROR(701003, "Job configuration status error"),
JOB_CFG_TYPE_ERROR(701004, "Job configuration type error"),
JOB_CFG_CRON_CANNOT_EMPTY(701005, "Job configuration cron cannot be empty"),
JOB_CFG_CRON_ERROR(701006, "Job configuration cron is not a valid cron expression"),
JOB_CFG_NANE_ALREADY_EXISTS(701007, "Job configuration name already exists"),
JOB_CFG_NOT_EXIST(701008, "Job configuration does not exist"),
SUCCESS(200, "success"); // 成功

View File

@@ -15,6 +15,7 @@ import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.List;
import java.util.Map;
@@ -57,6 +58,19 @@ public class ApplicationController {
return R.ok();
}
@PostMapping("/{workspaceId}/branch/{branchName}/application/{applicationName}/rename")
public R renameApplication(@PathVariable("workspaceId") String workspaceId,
@PathVariable("branchName") String branchName,
@PathVariable("applicationName") String applicationName,
@RequestBody Map<String, String> body) {
String newName = T.MapUtil.getStr(body, "name");
T.VerifyUtil.is(newName).notEmpty(RCode.PARAM_CANNOT_EMPTY);
// url decode
applicationName = T.URLUtil.decode(applicationName, StandardCharsets.UTF_8);
applicationService.renameApplication(workspaceId, branchName, applicationName, newName);
return R.ok();
}
@PostMapping("/{workspaceId}/branch/{branchName}/application/commit")
public synchronized R updateApplication(@PathVariable("workspaceId") String workspaceId,

View File

@@ -16,6 +16,8 @@ public interface IApplicationService {
void newApplication(String workspaceId, String branch, String name);
void renameApplication(String workspaceId, String branch, String oldName, String newName);
void deleteApplication(String workspaceId, String branch, String name);
void updateApplication(String workspaceId, String branch, String lastCommitId, String message, List<Map<String, String>> updateContent);

View File

@@ -244,23 +244,21 @@ public class ApplicationServiceImpl implements IApplicationService {
File gitDir = workspaceService.getGitDir(workspaceId);
try (Repository repository = JGitUtils.openRepository(gitDir)) {
Map<String, ObjectId> filePathAndBlobIdMap = T.MapUtil.newHashMap(true);
for (String str : T.ListUtil.of("README.md", "meta.json", "signature.json")) {
String savePath = T.StrUtil.concat(true, "applications/", name, "/", str);
for (String filename : T.ListUtil.of("README.md", "meta.json", "signature.json")) {
String fileContent = T.StrUtil.EMPTY;
if ("meta.json".equals(str)) {
if ("meta.json".equals(filename)) {
JSONObject jsonObject = T.JSONUtil.parseObj(this.metaJsonTemplate);
jsonObject.set("id", T.StrUtil.uuid());
jsonObject.set("name", name);
jsonObject.set("longName", name);
fileContent = T.JSONUtil.parse(jsonObject).toJSONString(2);
}
if ("signature.json".equals(str)) {
if ("signature.json".equals(filename)) {
fileContent = this.signatureJsonTemplate;
}
ObjectId objectId = JGitUtils.insertBlobFileToDatabase(repository, fileContent.getBytes());
filePathAndBlobIdMap.put(savePath, objectId);
filePathAndBlobIdMap.put(JGitUtils.buildFilePath(name, filename), objectId);
}
try (ObjectInserter inserter = repository.getObjectDatabase().newInserter();
@@ -310,6 +308,79 @@ public class ApplicationServiceImpl implements IApplicationService {
}
}
@Override
public void renameApplication(String workspaceId, String branch, String oldName, String newName) {
log.info("[applicationName] [begin] [workspaceId: {}] [branch: {}] [oldName: {}] [newName: {}]", workspaceId, branch, oldName, newName);
if (T.StrUtil.equals(oldName, newName)) {
log.warn("[renameApplication] [newName has not changed]");
return;
}
File gitDir = workspaceService.getGitDir(workspaceId);
try (Repository repository = JGitUtils.openRepository(gitDir);
TreeWalk treeWalk = new TreeWalk(repository);
RevWalk revWalk = new RevWalk(repository)) {
ObjectId branchRef = repository.resolve(branch);
treeWalk.addTree(revWalk.parseTree(branchRef));
treeWalk.setRecursive(true);
DirCache newTree = DirCache.newInCore();
DirCacheBuilder newTreeBuilder = newTree.builder();
boolean isCommit = false;
String prefix = JGitUtils.buildFilePrefix(oldName);
while (treeWalk.next()) {
String pathString = treeWalk.getPathString();
// rename file
if (pathString.startsWith(prefix)) {
isCommit = true;
String filename = treeWalk.getNameString();
String savePath = JGitUtils.buildFilePath(newName, filename);
if (T.StrUtil.equals("meta.json", filename)) {
// 更新 meta.json name,longName 的值
ObjectLoader loader = repository.open(treeWalk.getObjectId(0));
String metaJsonStr = T.StrUtil.utf8Str(loader.getBytes());
JSONObject jsonObject = T.JSONUtil.parseObj(metaJsonStr);
jsonObject.set("name", newName);
jsonObject.set("longName", newName);
String content = T.JSONUtil.parse(jsonObject).toJSONString(2);
ObjectId blobId = JGitUtils.insertBlobFileToDatabase(repository, content.getBytes());
DirCacheEntry dirCacheEntry = JGitUtils.buildDirCacheEntry(savePath, treeWalk.getFileMode(0), blobId);
newTreeBuilder.add(dirCacheEntry);
} else {
DirCacheEntry dirCacheEntry = JGitUtils.buildDirCacheEntry(savePath, treeWalk.getFileMode(0), treeWalk.getObjectId(0));
newTreeBuilder.add(dirCacheEntry);
}
} else {
// other file
DirCacheEntry entry = JGitUtils.buildDirCacheEntry(pathString, treeWalk.getFileMode(0), treeWalk.getObjectId(0));
newTreeBuilder.add(entry);
}
}
newTreeBuilder.finish();
if (isCommit) {
try (ObjectInserter inserter = repository.getObjectDatabase().newInserter()) {
ObjectId newTreeId = newTree.writeTree(inserter);
String message = String.format("chore: rename application %s to %s", oldName, newName);
SysUserEntity loginUserEntity = userService.getById(StpUtil.getLoginIdAsString());
PersonIdent personIdent = JGitUtils.buildPersonIdent(loginUserEntity.getName());
JGitUtils.createCommit(repository, branch, newTreeId, message, personIdent);
}
} else {
log.warn("[renameApplication] [old application not found] [name: {}]", oldName);
}
} catch (IOException | ConcurrentRefUpdateException e) {
log.error(e, "[renameApplication] [error] [workspaceId: {}] [branch: {}] [oldName: {}] [newName: {}]", workspaceId, branch, oldName, newName);
throw new RuntimeException(e);
}
}
@Override
public void deleteApplication(String workspaceId, String branch, String name) {
File gitDir = workspaceService.getGitDir(workspaceId);
@@ -325,7 +396,7 @@ public class ApplicationServiceImpl implements IApplicationService {
DirCache newTree = DirCache.newInCore();
DirCacheBuilder newTreeBuilder = newTree.builder();
String appFilePrefixStr = T.StrUtil.concat(true, "applications/", name, "/");
String appFilePrefixStr = JGitUtils.buildFilePrefix(name);
while (treeWalk.next()) {
String pathString = treeWalk.getPathString();
if (!pathString.startsWith(appFilePrefixStr)) {
@@ -483,7 +554,7 @@ public class ApplicationServiceImpl implements IApplicationService {
@Override
public Map<Object, Object> infoApplicationFileContent(String workspaceId, String branch, String name, String commitId, String file) {
// applications/qq/meta.json
String path = T.StrUtil.concat(true, "applications/", name, "/", file);
String path = JGitUtils.buildFilePath(name, file);
Map<Object, Object> result = T.MapUtil.builder()
.put("path", path)
@@ -607,8 +678,8 @@ public class ApplicationServiceImpl implements IApplicationService {
// 修改文件路径信息
List<String> updateFilePath = T.ListUtil.list(true);
updateAppList.parallelStream().forEach(entity -> {
updateFilePath.add(T.StrUtil.concat(true, "applications/", entity.getName(), "/meta.json"));
updateFilePath.add(T.StrUtil.concat(true, "applications/", entity.getName(), "/signature.json"));
updateFilePath.add(JGitUtils.buildFilePath(entity.getName(), "meta.json"));
updateFilePath.add(JGitUtils.buildFilePath(entity.getName(), "signature.json"));
});
// build tree
@@ -643,7 +714,7 @@ public class ApplicationServiceImpl implements IApplicationService {
fileContent = T.JSONUtil.parse(tempJSONObject).toJSONString(2);
}
// save
String filePath = T.StrUtil.concat(true, "applications/", entity.getName(), "/", fileName);
String filePath = JGitUtils.buildFilePath(entity.getName(), fileName);
DirCacheEntry dirCacheEntry = new DirCacheEntry(filePath);
dirCacheEntry.setFileMode(FileMode.REGULAR_FILE);
@@ -669,7 +740,7 @@ public class ApplicationServiceImpl implements IApplicationService {
String fileContent = T.JSONUtil.parse(jsonObject).toJSONString(2);
ObjectId objectId = JGitUtils.insertBlobFileToDatabase(repository, fileContent.getBytes());
DirCacheEntry dirCacheEntry = JGitUtils.buildDirCacheEntry(T.StrUtil.concat(true, "applications/", entity.getName(), "/meta.json"), FileMode.REGULAR_FILE, objectId);
DirCacheEntry dirCacheEntry = JGitUtils.buildDirCacheEntry(JGitUtils.buildFilePath(entity.getName(), "meta.json"), FileMode.REGULAR_FILE, objectId);
newTreeBuilder.add(dirCacheEntry);
// signature.json
@@ -678,7 +749,7 @@ public class ApplicationServiceImpl implements IApplicationService {
String fileContent2 = T.JSONUtil.parse(jsonObject2).toJSONString(2);
ObjectId objectId2 = JGitUtils.insertBlobFileToDatabase(repository, fileContent2.getBytes());
DirCacheEntry dirCacheEntry2 = JGitUtils.buildDirCacheEntry(T.StrUtil.concat(true, "applications/", entity.getName(), "/signature.json"), FileMode.REGULAR_FILE, objectId2);
DirCacheEntry dirCacheEntry2 = JGitUtils.buildDirCacheEntry(JGitUtils.buildFilePath(entity.getName(), "signature.json"), FileMode.REGULAR_FILE, objectId2);
newTreeBuilder.add(dirCacheEntry2);
}
newTreeBuilder.finish();

View File

@@ -14,8 +14,8 @@ import net.geedge.asw.module.app.service.IPackageService;
import net.geedge.asw.module.app.util.ApkInfo;
import net.geedge.asw.module.app.util.ApkUtil;
import net.geedge.asw.module.app.util.PkgConstant;
import net.geedge.asw.module.runner.entity.JobEntity;
import net.geedge.asw.module.runner.service.IJobService;
import net.geedge.asw.module.job.entity.JobEntity;
import net.geedge.asw.module.job.service.IJobService;
import net.geedge.asw.module.sys.entity.SysUserEntity;
import net.geedge.asw.module.sys.service.ISysUserService;
import net.geedge.asw.module.workbook.service.IWorkbookResourceService;

View File

@@ -302,6 +302,27 @@ public class JGitUtils {
return commitList;
}
/**
* 文件前缀
*
* @param applicationName
* @return
*/
public static String buildFilePrefix(String applicationName) {
return T.StrUtil.concat(true, "applications/", applicationName, "/");
}
/**
* git file save path
*
* @param applicationName
* @param fileName
* @return
*/
public static String buildFilePath(String applicationName, String fileName) {
return T.StrUtil.concat(true, buildFilePrefix(applicationName), fileName);
}
/**
* 构建 DirCacheEntry
*
@@ -543,6 +564,14 @@ public class JGitUtils {
break;
}
case RENAME: {
if (0 != addedLines | 0 != deletedLines) {
Map<Object, Object> fileContent = getFileContent(repository, oldPath, diff.getOldId().toObjectId());
oldContent = T.MapUtil.getStr(fileContent, "content", "");
Map<Object, Object> fileContent1 = getFileContent(repository, newPath, diff.getNewId().toObjectId());
encoding = T.MapUtil.getStr(fileContent1, "encoding", "");
newContent = T.MapUtil.getStr(fileContent1, "content", "");
}
break;
}
default:

View File

@@ -18,9 +18,9 @@ import net.geedge.asw.module.environment.entity.EnvironmentSessionEntity;
import net.geedge.asw.module.environment.service.IEnvironmentService;
import net.geedge.asw.module.environment.service.IEnvironmentSessionService;
import net.geedge.asw.module.environment.util.EnvironmentUtil;
import net.geedge.asw.module.runner.entity.PcapEntity;
import net.geedge.asw.module.runner.service.IPcapService;
import net.geedge.asw.module.runner.util.RunnerConstant;
import net.geedge.asw.module.job.entity.PcapEntity;
import net.geedge.asw.module.job.service.IPcapService;
import net.geedge.asw.module.job.util.JobConstant;
import net.geedge.asw.module.sys.service.ISysUserService;
import net.geedge.asw.module.workspace.entity.WorkspaceEntity;
import net.geedge.asw.module.workspace.service.IWorkspaceService;
@@ -28,8 +28,6 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.*;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.socket.CloseStatus;
import org.springframework.web.socket.WebSocketSession;
import java.io.File;
import java.io.FileOutputStream;
@@ -211,7 +209,7 @@ public class EnvironmentController {
entity.setId(pcapId);
entity.setName(destination.getName());
entity.setSize(destination.length());
entity.setStatus(RunnerConstant.PcapStatus.UPLOADED.getValue());
entity.setStatus(JobConstant.PcapStatus.UPLOADED.getValue());
entity.setCreateTimestamp(System.currentTimeMillis());
entity.setCreateUserId(StpUtil.getLoginIdAsString());
entity.setWorkspaceId(workspace.getId());

View File

@@ -1,4 +1,4 @@
package net.geedge.asw.module.runner.controller;
package net.geedge.asw.module.job.controller;
import cn.hutool.http.HttpRequest;
import cn.hutool.http.HttpResponse;
@@ -8,18 +8,17 @@ import cn.hutool.log.Log;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import net.geedge.asw.common.util.Constants;
import net.geedge.asw.common.util.R;
import net.geedge.asw.common.util.RCode;
import net.geedge.asw.common.util.T;
import net.geedge.asw.common.util.*;
import net.geedge.asw.module.environment.entity.EnvironmentEntity;
import net.geedge.asw.module.environment.entity.EnvironmentSessionEntity;
import net.geedge.asw.module.environment.service.IEnvironmentService;
import net.geedge.asw.module.environment.service.IEnvironmentSessionService;
import net.geedge.asw.module.runner.entity.JobEntity;
import net.geedge.asw.module.runner.util.JobQueueManager;
import net.geedge.asw.module.runner.service.IJobService;
import net.geedge.asw.module.runner.util.RunnerConstant;
import net.geedge.asw.module.job.entity.JobCfgEntity;
import net.geedge.asw.module.job.entity.JobEntity;
import net.geedge.asw.module.job.service.IJobCfgService;
import net.geedge.asw.module.job.util.JobQueueManager;
import net.geedge.asw.module.job.service.IJobService;
import net.geedge.asw.module.job.util.JobConstant;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
@@ -45,6 +44,9 @@ public class JobController {
@Autowired
private JobQueueManager jobQueueManager;
@Autowired
private IJobCfgService jobCfgService;
@GetMapping("/{workspaceId}/job/{id}")
public R detail(@PathVariable("workspaceId") String workspaceId,
@PathVariable("id") String id) {
@@ -61,18 +63,11 @@ public class JobController {
}
@PostMapping("/{workspaceId}/job")
public R add(@PathVariable("workspaceId") String workspaceId,
@RequestBody JobEntity entity) {
T.VerifyUtil.is(entity).notNull()
.and(entity.getEnvironmentId()).notEmpty(RCode.ENVIRONMENT_ID_CANNOT_EMPTY)
.and(entity.getPackageId()).notEmpty(RCode.PACKAGE_ID_CANNOT_EMPTY)
.and(entity.getPlaybookId()).notEmpty(RCode.PLAYBOOK_ID_CANNOT_EMPTY);
public R add(@PathVariable("workspaceId") String workspaceId, String jobCfgId) {
T.VerifyUtil.is(jobCfgId).notNull(RCode.ID_CANNOT_EMPTY);
entity.setWorkspaceId(workspaceId);
entity.setEnvId(entity.getEnvironmentId());
JobEntity jobEntity = jobService.saveJob(entity);
return R.ok().putData("id", jobEntity.getId());
JobEntity jobEntity = jobService.saveJob(workspaceId, jobCfgId);
return R.ok().putData("record", T.MapUtil.of("id", jobEntity.getId()));
}
@DeleteMapping("/{workspaceId}/job")
@@ -107,7 +102,7 @@ public class JobController {
HttpRequest requestStatus = T.HttpUtil.createGet(String.format("%s/api/v1/env/playbook/%s", url, id));
requestStatus.header("Authorization", token);
if (job.getStatus().contains(RunnerConstant.JobStatus.RUNNING.getValue())){
if (job.getStatus().contains(JobConstant.JobStatus.RUNNING.getValue())){
while (true){
HttpResponse response = requestStatus.execute();
if (response.isOk()){
@@ -121,8 +116,8 @@ public class JobController {
log.info("[cancelJob] [request env stop playbook] [status: {}]", response.body());
}
if (job.getStatus().contains(RunnerConstant.JobStatus.PENDING.getValue())){
jobQueueManager.requeueJob(job);
if (job.getStatus().contains(JobConstant.JobStatus.PENDING.getValue())){
jobQueueManager.removeJob(job);
}
Thread runningThread = Constants.RUNNING_JOB_THREAD.get(id);
@@ -159,4 +154,55 @@ public class JobController {
return R.ok().putData("record", result);
}
@GetMapping("/{workspaceId}/job/cfg/{id}")
public R jobCfgInfo(@PathVariable("workspaceId") String workspaceId,
@PathVariable("id") String id) {
JobCfgEntity entity = jobCfgService.info(id);
return R.ok().putData("record", entity);
}
@GetMapping("/{workspaceId}/job/cfg")
public R jobCfgList(@PathVariable("workspaceId") String workspaceId,
@RequestParam Map<String, Object> params) {
Page page = jobCfgService.queryList(params);
return R.ok(page);
}
@PostMapping("/{workspaceId}/job/cfg")
public R saveCfg(@PathVariable("workspaceId") String workspaceId,
@RequestBody JobCfgEntity cfg) {
VerifyUtil.is(cfg).notNull()
.and(cfg.getName()).notEmpty(RCode.NAME_CANNOT_EMPTY)
.and(cfg.getPlaybookId()).notEmpty(RCode.PLAYBOOK_ID_CANNOT_EMPTY)
.and(cfg.getPackageId()).notEmpty(RCode.PACKAGE_ID_CANNOT_EMPTY)
.and(cfg.getType()).notEmpty(RCode.JOB_CFG_TYPE_CANNOT_EMPTY)
.and(cfg.getStatus()).notEmpty(RCode.JOB_CFG_STATUS_CANNOT_EMPTY);
JobCfgEntity jobCfgEntity = jobCfgService.saveJobCfg(workspaceId, cfg);
return R.ok().putData("record", T.MapUtil.of("id", jobCfgEntity.getId()));
}
@PutMapping("/{workspaceId}/job/cfg")
public R updateCfg(@PathVariable("workspaceId") String workspaceId,
@RequestBody JobCfgEntity cfg) {
VerifyUtil.is(cfg).notNull()
.and(cfg.getId()).notEmpty(RCode.ID_CANNOT_EMPTY)
.and(cfg.getName()).notEmpty(RCode.NAME_CANNOT_EMPTY)
.and(cfg.getPlaybookId()).notEmpty(RCode.PLAYBOOK_ID_CANNOT_EMPTY)
.and(cfg.getPackageId()).notEmpty(RCode.PACKAGE_ID_CANNOT_EMPTY)
.and(cfg.getType()).notEmpty(RCode.JOB_CFG_TYPE_CANNOT_EMPTY)
.and(cfg.getStatus()).notEmpty(RCode.JOB_CFG_STATUS_CANNOT_EMPTY);
JobCfgEntity jobCfgEntity = jobCfgService.updateCfg(workspaceId, cfg);
return R.ok().putData("record", T.MapUtil.of("id", jobCfgEntity.getId()));
}
@DeleteMapping("/{workspaceId}/job/cfg")
public R deleteJobCfg(@PathVariable("workspaceId") String workspaceId,
String ids) {
VerifyUtil.is(ids).notEmpty();
List<String> idList = Arrays.asList(ids.split(","));
jobCfgService.removeBatchByIds(idList);
return R.ok();
}
}

View File

@@ -1,4 +1,4 @@
package net.geedge.asw.module.runner.controller;
package net.geedge.asw.module.job.controller;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateUtil;
@@ -13,8 +13,8 @@ import jakarta.servlet.http.HttpServletResponse;
import net.geedge.asw.common.config.SpringContextUtils;
import net.geedge.asw.common.util.*;
import net.geedge.asw.module.feign.client.WebSharkClient;
import net.geedge.asw.module.runner.entity.PcapEntity;
import net.geedge.asw.module.runner.service.IPcapService;
import net.geedge.asw.module.job.entity.PcapEntity;
import net.geedge.asw.module.job.service.IPcapService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpStatus;

View File

@@ -1,4 +1,4 @@
package net.geedge.asw.module.runner.controller;
package net.geedge.asw.module.job.controller;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import jakarta.servlet.http.HttpServletResponse;
@@ -6,8 +6,8 @@ import net.geedge.asw.common.util.R;
import net.geedge.asw.common.util.RCode;
import net.geedge.asw.common.util.ResponseUtil;
import net.geedge.asw.common.util.T;
import net.geedge.asw.module.runner.entity.PlaybookEntity;
import net.geedge.asw.module.runner.service.IPlaybookService;
import net.geedge.asw.module.job.entity.PlaybookEntity;
import net.geedge.asw.module.job.service.IPlaybookService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;

View File

@@ -1,16 +1,15 @@
package net.geedge.asw.module.runner.dao;
package net.geedge.asw.module.job.dao;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import net.geedge.asw.module.runner.entity.RunnerEntity;
import net.geedge.asw.module.job.entity.JobCfgEntity;
import org.apache.ibatis.annotations.Mapper;
import java.util.List;
import java.util.Map;
@Mapper
public interface RunnerDao extends BaseMapper<RunnerEntity>{
List<RunnerEntity> queryList(Page page, Map<String, Object> params);
public interface JobCfgDao extends BaseMapper<JobCfgEntity> {
List<JobCfgEntity> queryList(Page page, Map<String, Object> params);
}

View File

@@ -1,10 +1,9 @@
package net.geedge.asw.module.runner.dao;
package net.geedge.asw.module.job.dao;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import net.geedge.asw.module.runner.entity.JobEntity;
import net.geedge.asw.module.job.entity.JobEntity;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import java.util.Map;

View File

@@ -1,8 +1,8 @@
package net.geedge.asw.module.runner.dao;
package net.geedge.asw.module.job.dao;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import net.geedge.asw.module.runner.entity.PcapEntity;
import net.geedge.asw.module.job.entity.PcapEntity;
import org.apache.ibatis.annotations.Mapper;
import java.util.List;

View File

@@ -1,8 +1,8 @@
package net.geedge.asw.module.runner.dao;
package net.geedge.asw.module.job.dao;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import net.geedge.asw.module.runner.entity.PlaybookEntity;
import net.geedge.asw.module.job.entity.PlaybookEntity;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;

View File

@@ -0,0 +1,63 @@
package net.geedge.asw.module.job.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import net.geedge.asw.module.app.entity.PackageEntity;
import net.geedge.asw.module.environment.entity.EnvironmentEntity;
import net.geedge.asw.module.sys.entity.SysUserEntity;
import net.geedge.asw.module.workspace.entity.WorkspaceEntity;
import java.util.List;
@Data
@TableName("job_cfg")
public class JobCfgEntity {
@TableId(type = IdType.ASSIGN_UUID)
private String id;
private String name;
private String description;
private String packageId;
private String envId;
private String playbookId;
private String type;
private String cron;
private String status;
private String parameters;
private Long createTimestamp;
private Long updateTimestamp;
private String createUserId;
private String updateUserId;
private String workspaceId;
@TableField(exist = false)
@JsonProperty(value = "package")
private PackageEntity pkg;
@TableField(exist = false)
private EnvironmentEntity environment;
@TableField(exist = false)
private PlaybookEntity playbook;
@TableField(exist = false)
private List<PcapEntity> pcap;
@TableField(exist = false)
private SysUserEntity createUser;
@TableField(exist = false)
private SysUserEntity updateUser;
@TableField(exist = false)
private WorkspaceEntity workspace;
@TableField(exist = false)
private JobEntity lastJobResult;
}

View File

@@ -1,4 +1,4 @@
package net.geedge.asw.module.runner.entity;
package net.geedge.asw.module.job.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
@@ -21,6 +21,7 @@ public class JobEntity {
@TableId(type = IdType.ASSIGN_UUID)
private String id;
private String jobCfgId;
private String packageId;
private String envId;
private String playbookId;
@@ -65,6 +66,6 @@ public class JobEntity {
@TableField(exist = false)
private EnvironmentSessionEntity session;
@TableField(exist = false)
private JobCfgEntity jobCfg;
}

View File

@@ -1,4 +1,4 @@
package net.geedge.asw.module.runner.entity;
package net.geedge.asw.module.job.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;

View File

@@ -1,4 +1,4 @@
package net.geedge.asw.module.runner.entity;
package net.geedge.asw.module.job.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;

View File

@@ -1,4 +1,4 @@
package net.geedge.asw.module.runner.job;
package net.geedge.asw.module.job.job;
import cn.hutool.core.net.url.UrlBuilder;
import cn.hutool.http.HttpRequest;
@@ -17,11 +17,11 @@ import net.geedge.asw.module.environment.entity.EnvironmentEntity;
import net.geedge.asw.module.environment.entity.EnvironmentSessionEntity;
import net.geedge.asw.module.environment.service.IEnvironmentService;
import net.geedge.asw.module.environment.service.IEnvironmentSessionService;
import net.geedge.asw.module.runner.entity.JobEntity;
import net.geedge.asw.module.runner.entity.PcapEntity;
import net.geedge.asw.module.runner.service.IJobService;
import net.geedge.asw.module.runner.service.IPcapService;
import net.geedge.asw.module.runner.util.RunnerConstant;
import net.geedge.asw.module.job.entity.JobEntity;
import net.geedge.asw.module.job.entity.PcapEntity;
import net.geedge.asw.module.job.service.IJobService;
import net.geedge.asw.module.job.service.IPcapService;
import net.geedge.asw.module.job.util.JobConstant;
import net.lingala.zip4j.ZipFile;
import net.lingala.zip4j.model.FileHeader;
import net.lingala.zip4j.model.ZipParameters;
@@ -82,7 +82,7 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
private void playbookExecResultChecker() {
List<JobEntity> jobList = jobService.list(new LambdaQueryWrapper<JobEntity>().eq(JobEntity::getStatus, RunnerConstant.JobStatus.RUNNING.getValue()));
List<JobEntity> jobList = jobService.list(new LambdaQueryWrapper<JobEntity>().eq(JobEntity::getStatus, JobConstant.JobStatus.RUNNING.getValue()));
if (jobList.isEmpty()) {
return;
}
@@ -116,10 +116,10 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
Constants.RUNNING_JOB_THREAD.computeIfAbsent(id, jobId -> startGetJobLogThread(job, environment));
break;
case "error":
Constants.RESULT_JOB_THREAD.computeIfAbsent(id, jobId -> startGetJobResultThread(job, environment,RunnerConstant.JobStatus.FAILED.getValue()));
Constants.RESULT_JOB_THREAD.computeIfAbsent(id, jobId -> startGetJobResultThread(job, environment, JobConstant.JobStatus.FAILED.getValue()));
break;
case "done":
Constants.RESULT_JOB_THREAD.computeIfAbsent(id, jobId -> startGetJobResultThread(job, environment, RunnerConstant.JobStatus.PASSED.getValue()));
Constants.RESULT_JOB_THREAD.computeIfAbsent(id, jobId -> startGetJobResultThread(job, environment, JobConstant.JobStatus.PASSED.getValue()));
break;
}
}
@@ -153,7 +153,7 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
// 检查 Job 的状态是否为 running
private boolean isJobInRunningStatus(JobEntity jobEntity) {
JobEntity job = jobService.getById(jobEntity.getId());
return job != null && !T.StrUtil.equalsIgnoreCase(job.getStatus(), RunnerConstant.JobStatus.RUNNING.getValue());
return job != null && !T.StrUtil.equalsIgnoreCase(job.getStatus(), JobConstant.JobStatus.RUNNING.getValue());
}
/**
@@ -251,7 +251,7 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
String pcapId = T.StrUtil.uuid();
// 上传 pcap 文件流
File pcapFile = FileResourceUtil.createFile(resources, job.getWorkspaceId(), Constants.FileTypeEnum.PCAP.getType(), pcapId, pcapId + "pcap");
File pcapFile = FileResourceUtil.createFile(resources, job.getWorkspaceId(), Constants.FileTypeEnum.PCAP.getType(), pcapId, pcapId + ".pcap");
File parentDir = pcapFile.getParentFile();
if (!parentDir.exists()) {
parentDir.mkdirs();
@@ -263,7 +263,7 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
entity.setName(fileName);
entity.setSize(pcapFile.length());
entity.setStatus(RunnerConstant.PcapStatus.UPLOADED.getValue());
entity.setStatus(JobConstant.PcapStatus.UPLOADED.getValue());
entity.setCreateTimestamp(System.currentTimeMillis());
entity.setCreateUserId(job.getCreateUserId());
entity.setWorkspaceId(job.getWorkspaceId());

View File

@@ -1,4 +1,4 @@
package net.geedge.asw.module.runner.job;
package net.geedge.asw.module.job.job;
import cn.hutool.http.HttpRequest;
import cn.hutool.http.HttpResponse;
@@ -14,12 +14,12 @@ import net.geedge.asw.module.environment.entity.EnvironmentEntity;
import net.geedge.asw.module.environment.entity.EnvironmentSessionEntity;
import net.geedge.asw.module.environment.service.IEnvironmentService;
import net.geedge.asw.module.environment.service.IEnvironmentSessionService;
import net.geedge.asw.module.runner.entity.JobEntity;
import net.geedge.asw.module.runner.entity.PlaybookEntity;
import net.geedge.asw.module.runner.service.IJobService;
import net.geedge.asw.module.runner.service.IPlaybookService;
import net.geedge.asw.module.runner.util.JobQueueManager;
import net.geedge.asw.module.runner.util.RunnerConstant;
import net.geedge.asw.module.job.entity.JobEntity;
import net.geedge.asw.module.job.entity.PlaybookEntity;
import net.geedge.asw.module.job.service.IJobService;
import net.geedge.asw.module.job.service.IPlaybookService;
import net.geedge.asw.module.job.util.JobQueueManager;
import net.geedge.asw.module.job.util.JobConstant;
import org.apache.commons.lang3.time.StopWatch;
import org.quartz.DisallowConcurrentExecution;
import org.quartz.JobExecutionContext;
@@ -76,7 +76,7 @@ public class JobPlaybookExecutor extends QuartzJobBean {
public void playbookExecutor() {
List<JobEntity> createdJobs = jobService.list(
new LambdaQueryWrapper<JobEntity>()
.eq(JobEntity::getStatus, RunnerConstant.JobStatus.CREATED.getValue())
.eq(JobEntity::getStatus, JobConstant.JobStatus.CREATED.getValue())
.orderByAsc(JobEntity::getCreateTimestamp)
);
@@ -85,7 +85,7 @@ public class JobPlaybookExecutor extends QuartzJobBean {
// CREATED 任务加入队列
createdJobs.forEach(jobQueueManager::addJob);
// 更新 createdJobs 状态为 pending
createdJobs.forEach(x -> x.setStatus(RunnerConstant.JobStatus.PENDING.getValue()));
createdJobs.forEach(x -> x.setStatus(JobConstant.JobStatus.PENDING.getValue()));
jobService.updateBatchById(createdJobs);
}
@@ -114,7 +114,7 @@ public class JobPlaybookExecutor extends QuartzJobBean {
// update job status running
jobService.update(new LambdaUpdateWrapper<JobEntity>()
.set(JobEntity::getStatus, RunnerConstant.JobStatus.RUNNING.getValue())
.set(JobEntity::getStatus, JobConstant.JobStatus.RUNNING.getValue())
.set(JobEntity::getStartTimestamp, System.currentTimeMillis())
.eq(JobEntity::getId, nextJob.getId())
);
@@ -150,7 +150,7 @@ public class JobPlaybookExecutor extends QuartzJobBean {
// update job status, starTime, updateTimestamp
jobService.update(new LambdaUpdateWrapper<JobEntity>()
.set(JobEntity::getStatus, RunnerConstant.JobStatus.FAILED.getValue())
.set(JobEntity::getStatus, JobConstant.JobStatus.FAILED.getValue())
.set(JobEntity::getEndTimestamp, System.currentTimeMillis())
.eq(JobEntity::getId, job.getId()));
@@ -162,7 +162,7 @@ public class JobPlaybookExecutor extends QuartzJobBean {
// update job status, starTime, updateTimestamp
jobService.update(new LambdaUpdateWrapper<JobEntity>()
.set(JobEntity::getStatus, RunnerConstant.JobStatus.FAILED.getValue())
.set(JobEntity::getStatus, JobConstant.JobStatus.FAILED.getValue())
.set(JobEntity::getEndTimestamp, System.currentTimeMillis())
.eq(JobEntity::getId, job.getId()));

View File

@@ -0,0 +1,18 @@
package net.geedge.asw.module.job.service;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.IService;
import net.geedge.asw.module.job.entity.JobCfgEntity;
import java.util.Map;
public interface IJobCfgService extends IService<JobCfgEntity> {
JobCfgEntity info(String id);
Page queryList(Map<String, Object> params);
JobCfgEntity saveJobCfg(String workspaceId, JobCfgEntity cfg);
JobCfgEntity updateCfg(String workspaceId, JobCfgEntity cfg);
}

View File

@@ -1,8 +1,8 @@
package net.geedge.asw.module.runner.service;
package net.geedge.asw.module.job.service;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.IService;
import net.geedge.asw.module.runner.entity.JobEntity;
import net.geedge.asw.module.job.entity.JobEntity;
import java.util.List;
import java.util.Map;
@@ -13,16 +13,9 @@ public interface IJobService extends IService<JobEntity>{
Page queryList(Map<String, Object> params);
JobEntity saveJob(JobEntity entity);
JobEntity saveJob(String workspaceId, String jobCfgId);
void removeJob(List<String> ids);
Map queryJobLog(String id, Integer offset);
// JobEntity assignPendingJob(String id, String platform);
//
// void appendTraceLogStrToFile(String jobId, String content) throws RuntimeException;
//
// void updateJobResult(String jobId, String state, MultipartFile pcapFile);
}

View File

@@ -1,8 +1,8 @@
package net.geedge.asw.module.runner.service;
package net.geedge.asw.module.job.service;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.IService;
import net.geedge.asw.module.runner.entity.PcapEntity;
import net.geedge.asw.module.job.entity.PcapEntity;
import org.springframework.core.io.Resource;
import java.util.Map;

View File

@@ -1,8 +1,8 @@
package net.geedge.asw.module.runner.service;
package net.geedge.asw.module.job.service;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.IService;
import net.geedge.asw.module.runner.entity.PlaybookEntity;
import net.geedge.asw.module.job.entity.PlaybookEntity;
import org.springframework.web.multipart.MultipartFile;
import java.util.Map;

View File

@@ -0,0 +1,159 @@
package net.geedge.asw.module.job.service.impl;
import cn.dev33.satoken.stp.StpUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import net.geedge.asw.common.config.Query;
import net.geedge.asw.common.util.*;
import net.geedge.asw.module.app.entity.PackageEntity;
import net.geedge.asw.module.app.service.IPackageService;
import net.geedge.asw.module.environment.entity.EnvironmentEntity;
import net.geedge.asw.module.environment.service.IEnvironmentService;
import net.geedge.asw.module.job.dao.JobCfgDao;
import net.geedge.asw.module.job.entity.JobCfgEntity;
import net.geedge.asw.module.job.entity.JobEntity;
import net.geedge.asw.module.job.entity.PcapEntity;
import net.geedge.asw.module.job.entity.PlaybookEntity;
import net.geedge.asw.module.job.service.IJobCfgService;
import net.geedge.asw.module.job.service.IJobService;
import net.geedge.asw.module.job.service.IPcapService;
import net.geedge.asw.module.job.service.IPlaybookService;
import net.geedge.asw.module.sys.entity.SysUserEntity;
import net.geedge.asw.module.sys.service.ISysUserService;
import net.geedge.asw.module.workspace.entity.WorkspaceEntity;
import net.geedge.asw.module.workspace.service.IWorkspaceService;
import org.quartz.CronExpression;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Map;
@Service
public class JobCfgServiceImpl extends ServiceImpl<JobCfgDao, JobCfgEntity> implements IJobCfgService {
@Autowired
private IPackageService packageService;
@Autowired
private IPlaybookService playbookService;
@Autowired
private IEnvironmentService environmentService;
@Autowired
private IJobService jobService;
@Autowired
private IPcapService pcapService;
@Autowired
private ISysUserService userService;
@Autowired
private IWorkspaceService workspaceService;
@Override
public JobCfgEntity info(String id) {
JobCfgEntity cfg = this.getById(id);
PackageEntity pkg = packageService.getById(cfg.getPackageId());
if (T.ObjectUtil.isNotEmpty(pkg)) {
cfg.setPkg(pkg);
}
EnvironmentEntity environment = environmentService.getById(cfg.getEnvId());
if (T.ObjectUtil.isNotEmpty(environment)) {
cfg.setEnvironment(environment);
}
PlaybookEntity playbook = playbookService.getById(cfg.getPlaybookId());
if (T.ObjectUtil.isNotEmpty(playbook)) {
cfg.setPlaybook(playbook);
}
SysUserEntity createUser = userService.getById(cfg.getCreateUserId());
if (T.ObjectUtil.isNotEmpty(createUser)) {
cfg.setCreateUser(createUser);
}
SysUserEntity updateUser = userService.getById(cfg.getUpdateUserId());
if (T.ObjectUtil.isNotEmpty(updateUser)) {
cfg.setCreateUser(updateUser);
}
WorkspaceEntity workspace = workspaceService.getById(cfg.getWorkspaceId());
if (T.ObjectUtil.isNotEmpty(workspace)) {
cfg.setWorkspace(workspace);
}
JobEntity lastJob = jobService.getOne(new LambdaQueryWrapper<JobEntity>()
.eq(JobEntity::getJobCfgId, cfg.getId())
.orderByDesc(JobEntity::getCreateTimestamp)
.last("limit 1"));
if (T.ObjectUtil.isNotEmpty(lastJob)) {
cfg.setLastJobResult(lastJob);
List<PcapEntity> pcapList = pcapService.list(new LambdaQueryWrapper<PcapEntity>().eq(PcapEntity::getJobId, lastJob.getId()));
cfg.setPcap(pcapList);
}
return cfg;
}
@Override
public Page queryList(Map<String, Object> params) {
Page page = new Query(JobCfgEntity.class).getPage(params);
List<JobCfgEntity> jobList = this.getBaseMapper().queryList(page, params);
page.setRecords(jobList);
return page;
}
private void validateJobCfgInfo(JobCfgEntity cfg) {
if (!Constants.JOB_CFG_STATUS_LIST.contains(cfg.getStatus())) {
throw new ASWException(RCode.JOB_CFG_STATUS_ERROR);
}
if (!Constants.JOB_CFG_TYPE_LIST.contains(cfg.getType())) {
throw new ASWException(RCode.JOB_CFG_TYPE_ERROR);
}
if (T.StrUtil.equals(cfg.getType(), "cron") && cfg.getCron() == null) {
throw new ASWException(RCode.JOB_CFG_CRON_CANNOT_EMPTY);
}
if (T.StrUtil.isNotEmpty(cfg.getCron()) && !CronExpression.isValidExpression(cfg.getCron())) {
throw new ASWException(RCode.JOB_CFG_CRON_ERROR);
}
JobCfgEntity entity = this.getOne(new LambdaQueryWrapper<JobCfgEntity>().eq(JobCfgEntity::getName, cfg.getName()));
if (cfg.getId() == null && T.ObjectUtil.isNotEmpty(entity)) {
throw new ASWException(RCode.JOB_CFG_NANE_ALREADY_EXISTS);
}
if (cfg.getId() != null && T.ObjectUtil.isNotEmpty(entity) && !T.StrUtil.equals(cfg.getId(), entity.getId())) {
throw new ASWException(RCode.JOB_CFG_NANE_ALREADY_EXISTS);
}
}
@Override
public JobCfgEntity saveJobCfg(String workspaceId, JobCfgEntity cfg) {
this.validateJobCfgInfo(cfg);
cfg.setCreateTimestamp(System.currentTimeMillis());
cfg.setUpdateTimestamp(System.currentTimeMillis());
cfg.setCreateUserId(StpUtil.getLoginIdAsString());
cfg.setUpdateUserId(StpUtil.getLoginIdAsString());
cfg.setWorkspaceId(workspaceId);
this.save(cfg);
return cfg;
}
@Override
public JobCfgEntity updateCfg(String workspaceId, JobCfgEntity cfg) {
this.validateJobCfgInfo(cfg);
cfg.setUpdateTimestamp(System.currentTimeMillis());
cfg.setUpdateUserId(StpUtil.getLoginIdAsString());
this.updateById(cfg);
return cfg;
}
}

View File

@@ -1,4 +1,4 @@
package net.geedge.asw.module.runner.service.impl;
package net.geedge.asw.module.job.service.impl;
import cn.dev33.satoken.stp.StpUtil;
import cn.hutool.log.Log;
@@ -14,14 +14,16 @@ import net.geedge.asw.module.environment.entity.EnvironmentEntity;
import net.geedge.asw.module.environment.entity.EnvironmentSessionEntity;
import net.geedge.asw.module.environment.service.IEnvironmentService;
import net.geedge.asw.module.environment.service.IEnvironmentSessionService;
import net.geedge.asw.module.runner.dao.JobDao;
import net.geedge.asw.module.runner.entity.JobEntity;
import net.geedge.asw.module.runner.entity.PcapEntity;
import net.geedge.asw.module.runner.entity.PlaybookEntity;
import net.geedge.asw.module.runner.service.IJobService;
import net.geedge.asw.module.runner.service.IPcapService;
import net.geedge.asw.module.runner.service.IPlaybookService;
import net.geedge.asw.module.runner.util.RunnerConstant;
import net.geedge.asw.module.job.dao.JobDao;
import net.geedge.asw.module.job.entity.JobCfgEntity;
import net.geedge.asw.module.job.entity.JobEntity;
import net.geedge.asw.module.job.entity.PcapEntity;
import net.geedge.asw.module.job.entity.PlaybookEntity;
import net.geedge.asw.module.job.service.IJobCfgService;
import net.geedge.asw.module.job.service.IJobService;
import net.geedge.asw.module.job.service.IPcapService;
import net.geedge.asw.module.job.service.IPlaybookService;
import net.geedge.asw.module.job.util.JobConstant;
import net.geedge.asw.module.sys.entity.SysUserEntity;
import net.geedge.asw.module.sys.service.ISysUserService;
import net.geedge.asw.module.workspace.entity.WorkspaceEntity;
@@ -63,20 +65,12 @@ public class JobServiceImpl extends ServiceImpl<JobDao, JobEntity> implements IJ
@Autowired
private IEnvironmentSessionService environmentSessionService;
@Autowired
private IJobCfgService jobCfgService;
@Value("${asw.resources.path:resources}")
private String resources;
/**
* rootPath/result/{jobId}
*
* @param jobId
* @return
*/
private String getJobResultPath(String jobId) {
return T.FileUtil.file(T.WebPathUtil.getRootPath(), "job_result", jobId).getPath();
}
@Override
public JobEntity queryInfo(String id) {
JobEntity job = this.getById(id);
@@ -103,6 +97,9 @@ public class JobServiceImpl extends ServiceImpl<JobDao, JobEntity> implements IJ
EnvironmentSessionEntity session = environmentSessionService.getOne(new LambdaQueryWrapper<EnvironmentSessionEntity>().eq(EnvironmentSessionEntity::getJobId, id));
job.setSession(session);
JobCfgEntity cfg = jobCfgService.getById(job.getJobCfgId());
job.setJobCfg(cfg);
if (-1 == job.getStartTimestamp()) {
job.setStartTimestamp(null);
}
@@ -130,23 +127,35 @@ public class JobServiceImpl extends ServiceImpl<JobDao, JobEntity> implements IJ
@Override
@Transactional(rollbackFor = Exception.class)
public JobEntity saveJob(JobEntity entity) {
entity.setCreateTimestamp(System.currentTimeMillis());
entity.setUpdateTimestamp(System.currentTimeMillis());
entity.setCreateUserId(StpUtil.getLoginIdAsString());
entity.setUpdateUserId(StpUtil.getLoginIdAsString());
entity.setStatus(RunnerConstant.JobStatus.CREATED.getValue());
public JobEntity saveJob(String workspaceId, String jobCfgId) {
JobCfgEntity cfg = jobCfgService.getById(jobCfgId);
if (T.ObjectUtil.isEmpty(cfg)){
throw new ASWException(RCode.JOB_CFG_NOT_EXIST);
}
JobEntity job = new JobEntity();
job.setWorkspaceId(workspaceId);
job.setJobCfgId(jobCfgId);
job.setParameters(cfg.getParameters());
job.setEnvId(cfg.getEnvId());
job.setPackageId(cfg.getPackageId());
job.setPlaybookId(cfg.getPlaybookId());
job.setCreateTimestamp(System.currentTimeMillis());
job.setUpdateTimestamp(System.currentTimeMillis());
job.setCreateUserId(StpUtil.getLoginIdAsString());
job.setUpdateUserId(StpUtil.getLoginIdAsString());
job.setStatus(JobConstant.JobStatus.CREATED.getValue());
// save
this.save(entity);
this.save(job);
// trace log file path
String saveFileName = entity.getId() + "." + "log";
File traceLogFile = FileResourceUtil.createFile(resources, entity.getWorkspaceId(), Constants.FileTypeEnum.JOB.getType(), entity.getId(), saveFileName);
String saveFileName = job.getId() + "." + "log";
File traceLogFile = FileResourceUtil.createFile(resources, job.getWorkspaceId(), Constants.FileTypeEnum.JOB.getType(), job.getId(), saveFileName);
this.update(new LambdaUpdateWrapper<JobEntity>()
.set(JobEntity::getLogPath, traceLogFile.getPath())
.eq(JobEntity::getId, entity.getId()));
return entity;
.eq(JobEntity::getId, job.getId()));
return job;
}
@Override
@@ -191,60 +200,4 @@ public class JobServiceImpl extends ServiceImpl<JobDao, JobEntity> implements IJ
return result;
}
// @Override
// public synchronized JobEntity assignPendingJob(String runnerId, String platform) {
// if (T.StrUtil.hasEmpty(runnerId, platform)) {
// return null;
// }
// // query
// JobEntity job = this.getBaseMapper().getPendingJobByPlatform(platform);
// if (T.ObjectUtil.isNotNull(job)) {
// // update
// this.update(new LambdaUpdateWrapper<JobEntity>()
// .set(JobEntity::getRunnerId, runnerId)
// .set(JobEntity::getStatus, RunnerConstant.JobStatus.RUNNING.getValue())
// .set(JobEntity::getStartTimestamp, System.currentTimeMillis())
// .eq(JobEntity::getId, job.getId())
// );
// }
// return job;
// }
//
// @Override
// public void appendTraceLogStrToFile(String jobId, String content) throws RuntimeException {
// try {
// JobEntity job = this.getById(jobId);
// if (T.StrUtil.isEmpty(job.getLogPath())) {
// File traceLogFile = T.FileUtil.file(this.getJobResultPath(jobId), "trace.log");
// job.setLogPath(traceLogFile.getPath());
// }
// // append content
// T.FileUtil.appendString(content, T.FileUtil.file(job.getLogPath()), T.CharsetUtil.CHARSET_UTF_8);
// } catch (IORuntimeException e) {
// log.error(e, "[appendTraceLogStrToFile] [error] [job: {}] [content: {}]", jobId, content);
// throw new RuntimeException(e.getMessage());
// }
// }
//
// @Override
// @Transactional(rollbackFor = Exception.class)
// public void updateJobResult(String jobId, String state, MultipartFile pcapFile) {
// String pcapId = T.StrUtil.EMPTY;
// // save pcap file
// if (T.ObjectUtil.isNotNull(pcapFile)) {
// PcapEntity pcapEntity = pcapService.savePcap(jobId, pcapFile.getResource());
// pcapId = pcapEntity.getId();
// }
//
// // update job status&pcap_id
// state = T.StrUtil.equals("success", state) ? RunnerConstant.JobStatus.PASSED.getValue() : state;
// this.update(new LambdaUpdateWrapper<JobEntity>()
// .set(JobEntity::getStatus, state)
// .set(T.StrUtil.isNotEmpty(pcapId), JobEntity::getPcapId, pcapId)
// .set(JobEntity::getEndTimestamp, System.currentTimeMillis())
// .eq(JobEntity::getId, jobId)
// );
// }
}

View File

@@ -1,4 +1,4 @@
package net.geedge.asw.module.runner.service.impl;
package net.geedge.asw.module.job.service.impl;
import cn.dev33.satoken.stp.SaTokenInfo;
import cn.dev33.satoken.stp.StpUtil;
@@ -20,15 +20,15 @@ import net.geedge.asw.module.environment.entity.EnvironmentEntity;
import net.geedge.asw.module.environment.service.IEnvironmentService;
import net.geedge.asw.module.feign.client.DashboardClient;
import net.geedge.asw.module.feign.client.KibanaClient;
import net.geedge.asw.module.runner.dao.PcapDao;
import net.geedge.asw.module.runner.entity.JobEntity;
import net.geedge.asw.module.runner.entity.PcapEntity;
import net.geedge.asw.module.runner.entity.PlaybookEntity;
import net.geedge.asw.module.runner.service.IJobService;
import net.geedge.asw.module.runner.service.IPcapService;
import net.geedge.asw.module.runner.service.IPlaybookService;
import net.geedge.asw.module.runner.util.PcapParserThread;
import net.geedge.asw.module.runner.util.RunnerConstant;
import net.geedge.asw.module.job.dao.PcapDao;
import net.geedge.asw.module.job.entity.JobEntity;
import net.geedge.asw.module.job.entity.PcapEntity;
import net.geedge.asw.module.job.entity.PlaybookEntity;
import net.geedge.asw.module.job.service.IJobService;
import net.geedge.asw.module.job.service.IPcapService;
import net.geedge.asw.module.job.service.IPlaybookService;
import net.geedge.asw.module.job.util.PcapParserThread;
import net.geedge.asw.module.job.util.JobConstant;
import net.geedge.asw.module.sys.entity.SysUserEntity;
import net.geedge.asw.module.sys.service.ISysConfigService;
import net.geedge.asw.module.sys.service.ISysUserService;
@@ -148,7 +148,7 @@ public class PcapServiceImpl extends ServiceImpl<PcapDao, PcapEntity> implements
byte[] bytes = fileResource.getInputStream().readAllBytes();
entity.setSize((long) bytes.length);
entity.setStatus(RunnerConstant.PcapStatus.UPLOADED.getValue());
entity.setStatus(JobConstant.PcapStatus.UPLOADED.getValue());
entity.setCreateTimestamp(System.currentTimeMillis());
entity.setCreateUserId(createUserId);
entity.setWorkspaceId(workspaceId);
@@ -270,7 +270,7 @@ public class PcapServiceImpl extends ServiceImpl<PcapDao, PcapEntity> implements
log.error("delete openSearch index error index: {}", indexName);
throw new RuntimeException("delete openSearch index error ", e);
}
pcapEntity.setStatus(RunnerConstant.PcapStatus.UPLOADED.getValue());
pcapEntity.setStatus(JobConstant.PcapStatus.UPLOADED.getValue());
// del common pcap file
Map summary = T.JSONUtil.toBean(pcapEntity.getSummary(), Map.class);
T.FileUtil.del(T.MapUtil.getStr(summary, "commentPath"));

View File

@@ -1,4 +1,4 @@
package net.geedge.asw.module.runner.service.impl;
package net.geedge.asw.module.job.service.impl;
import cn.dev33.satoken.stp.StpUtil;
import cn.hutool.log.Log;
@@ -7,9 +7,9 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import net.geedge.asw.common.config.Query;
import net.geedge.asw.common.util.*;
import net.geedge.asw.module.runner.dao.PlaybookDao;
import net.geedge.asw.module.runner.entity.PlaybookEntity;
import net.geedge.asw.module.runner.service.IPlaybookService;
import net.geedge.asw.module.job.dao.PlaybookDao;
import net.geedge.asw.module.job.entity.PlaybookEntity;
import net.geedge.asw.module.job.service.IPlaybookService;
import org.apache.commons.io.FileUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

View File

@@ -1,6 +1,6 @@
package net.geedge.asw.module.runner.util;
package net.geedge.asw.module.job.util;
public class RunnerConstant {
public class JobConstant {
/**

View File

@@ -1,7 +1,7 @@
package net.geedge.asw.module.runner.util;
package net.geedge.asw.module.job.util;
import net.geedge.asw.common.util.T;
import net.geedge.asw.module.runner.entity.JobEntity;
import net.geedge.asw.module.job.entity.JobEntity;
import org.springframework.stereotype.Component;
import java.util.*;

View File

@@ -1,4 +1,4 @@
package net.geedge.asw.module.runner.util;
package net.geedge.asw.module.job.util;
import cn.hutool.log.Log;
import com.alibaba.fastjson2.JSONArray;
@@ -12,8 +12,8 @@ import net.geedge.asw.common.util.T;
import net.geedge.asw.module.feign.client.GeoipClient;
import net.geedge.asw.module.feign.client.PcapCommentClient;
import net.geedge.asw.module.feign.client.ZeekClient;
import net.geedge.asw.module.runner.entity.PcapEntity;
import net.geedge.asw.module.runner.service.IPcapService;
import net.geedge.asw.module.job.entity.PcapEntity;
import net.geedge.asw.module.job.service.IPcapService;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.time.StopWatch;
import org.opensearch.client.opensearch.OpenSearchClient;
@@ -35,7 +35,7 @@ import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.stream.Collectors;
import static net.geedge.asw.module.runner.util.RunnerConstant.PcapStatus;
import static net.geedge.asw.module.job.util.JobConstant.PcapStatus;
@Data
public class PcapParserThread implements Runnable {

View File

@@ -1,4 +1,4 @@
package net.geedge.asw.module.runner.util;
package net.geedge.asw.module.job.util;
import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONConfig;

View File

@@ -1,4 +1,4 @@
package net.geedge.asw.module.runner.util;
package net.geedge.asw.module.job.util;
import cn.hutool.log.Log;
import lombok.Data;

View File

@@ -1,174 +0,0 @@
//package net.geedge.asw.module.runner.controller;
//
//import cn.dev33.satoken.annotation.SaIgnore;
//import cn.hutool.core.lang.Opt;
//import cn.hutool.log.Log;
//import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
//import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
//import jakarta.servlet.http.HttpServletResponse;
//import net.geedge.asw.common.util.R;
//import net.geedge.asw.common.util.RCode;
//import net.geedge.asw.common.util.T;
//import net.geedge.asw.module.app.entity.PackageEntity;
//import net.geedge.asw.module.runner.entity.JobEntity;
//import net.geedge.asw.module.runner.entity.PlaybookEntity;
//import net.geedge.asw.module.runner.entity.RunnerEntity;
//import net.geedge.asw.module.runner.service.IJobService;
//import net.geedge.asw.module.runner.service.IRunnerService;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.web.bind.annotation.*;
//import org.springframework.web.multipart.MultipartFile;
//
//import java.io.IOException;
//import java.util.Map;
//
//@RestController
//@RequestMapping("/api/v1/runner")
//public class RunnerController {
//
// private static final Log log = Log.get();
//
// @Autowired
// private IJobService jobService;
//
// @Autowired
// private IRunnerService runnerService;
//
// @GetMapping("/{id}")
// public R detail(@PathVariable("id") String id) {
// RunnerEntity runnerEntity = runnerService.getById(id);
// return R.ok().putData("record", runnerEntity);
// }
//
// @GetMapping
// public R list(@RequestParam Map<String, Object> params) {
// T.VerifyUtil.is(params).notNull()
// .and(T.MapUtil.getStr(params, "workspaceId")).notEmpty(RCode.WORKSPACE_ID_CANNOT_EMPTY);
//
// Page page = runnerService.queryList(params);
// return R.ok(page);
// }
//
// @PostMapping
// public R add(@RequestBody RunnerEntity entity) {
// T.VerifyUtil.is(entity).notNull()
// .and(entity.getWorkspaceId()).notEmpty(RCode.WORKSPACE_ID_CANNOT_EMPTY);
//
// RunnerEntity runner = runnerService.saveRunner(entity);
// return R.ok().putData("record", runner);
// }
//
// @PutMapping
// public R update(@RequestBody RunnerEntity entity) {
// T.VerifyUtil.is(entity).notNull()
// .and(entity.getId()).notEmpty(RCode.ID_CANNOT_EMPTY)
// .and(entity.getWorkspaceId()).notEmpty(RCode.WORKSPACE_ID_CANNOT_EMPTY);
//
// RunnerEntity runner = runnerService.updateRunner(entity);
// return R.ok().putData("record", runner);
// }
//
// @DeleteMapping("/{id}")
// public R delete(@PathVariable("id") String id) {
// runnerService.removeById(id);
// return R.ok();
// }
//
// @SaIgnore
// @PostMapping("/register")
// public void register(@RequestHeader("Authorization") String token, HttpServletResponse response) throws IOException {
// RunnerEntity runner = runnerService.getOne(new LambdaUpdateWrapper<RunnerEntity>().eq(RunnerEntity::getToken, token));
// String status = Opt.ofNullable(runner).map(RunnerEntity::getStatus).orElseGet(() -> null);
// if (!T.StrUtil.equals("online", status)) {
// log.warn("[register] [runner is offline] [token: {}]", token);
// response.sendError(HttpServletResponse.SC_FORBIDDEN, "Runner is offline");
// }
// }
//
// @SaIgnore
// @PostMapping("/heartbeat")
// public void heartbeat(@RequestHeader("Authorization") String token, @RequestBody Map<String, Integer> platformMap,
// HttpServletResponse response) throws IOException {
// RunnerEntity runner = runnerService.getOne(new LambdaUpdateWrapper<RunnerEntity>().eq(RunnerEntity::getToken, token));
// String status = Opt.ofNullable(runner).map(RunnerEntity::getStatus).orElseGet(() -> null);
// if (!T.StrUtil.equals("online", status)) {
// log.warn("[heartbeat] [runner is offline] [token: {}]", token);
// response.sendError(HttpServletResponse.SC_FORBIDDEN, "Runner is offline");
// return;
// }
//
// // update last_heartbeat_timestamp
// runnerService.update(new LambdaUpdateWrapper<RunnerEntity>()
// .set(RunnerEntity::getLastHeartbeatTimestamp, System.currentTimeMillis())
// .eq(RunnerEntity::getId, runner.getId()));
//
// // findjob by platform
// String platform = platformMap.entrySet().stream().filter(entry -> entry.getValue() > 0).findFirst().map(entry -> entry.getKey()).orElseGet(null);
// JobEntity job = jobService.assignPendingJob(runner.getId(), platform);
// if (T.ObjectUtil.isNotNull(job)) {
// // package
// PackageEntity pkg = job.getPkg();
// Map<String, String> pkgInfo = T.MapUtil.builder("id", pkg.getId())
// .put("platform", pkg.getPlatform())
// .put("identifier", pkg.getIdentifier())
// .put("version", pkg.getVersion())
// .build();
//
// // playbook
// PlaybookEntity playbook = job.getPlaybook();
// Map<String, String> pbInfo = T.MapUtil.builder("id", playbook.getId())
// .put("name", playbook.getName())
// .build();
//
// // response job info
// Map<Object, Object> responseData = T.MapUtil.builder()
// .put("id", job.getId())
// .put("pkg", pkgInfo)
// .put("playbook", pbInfo)
// .build();
// response.setCharacterEncoding("UTF-8");
// response.setContentType("text/html; charset=UTF-8");
// response.getWriter().write(T.JSONUtil.toJsonStr(responseData));
// }
// }
//
// @SaIgnore
// @PutMapping("/trace/{jobId}")
// public void trace(@RequestHeader("Authorization") String token, @PathVariable String jobId, @RequestBody byte[] bytes,
// HttpServletResponse response) throws IOException {
// RunnerEntity runner = runnerService.getOne(new LambdaUpdateWrapper<RunnerEntity>().eq(RunnerEntity::getToken, token));
// String status = Opt.ofNullable(runner).map(RunnerEntity::getStatus).orElseGet(() -> null);
// if (!T.StrUtil.equals("online", status)) {
// log.warn("[trace] [runner is offline] [token: {}]", token);
// response.sendError(HttpServletResponse.SC_FORBIDDEN, "Runner is offline");
// return;
// }
//
// try {
// // 追加到文件中
// String content = T.StrUtil.str(bytes, T.CharsetUtil.CHARSET_UTF_8);
// jobService.appendTraceLogStrToFile(jobId, content);
// } catch (Exception e) {
// log.error("[trace] [error] [job: {}]", jobId);
// response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
// }
// }
//
// @SaIgnore
// @PutMapping("/jobResult/{jobId}")
// public void jobResult(@RequestHeader("Authorization") String token, @PathVariable String jobId, @RequestParam String state,
// @RequestParam(value = "file", required = false) MultipartFile pcapFile,
// HttpServletResponse response) throws IOException {
// RunnerEntity runner = runnerService.getOne(new LambdaUpdateWrapper<RunnerEntity>().eq(RunnerEntity::getToken, token));
// String status = Opt.ofNullable(runner).map(RunnerEntity::getStatus).orElseGet(() -> null);
// if (!T.StrUtil.equals("online", status)) {
// log.warn("[trace] [runner is offline] [token: {}]", token);
// response.sendError(HttpServletResponse.SC_FORBIDDEN, "Runner is offline");
// return;
// }
//
// // 更新任务状态
// jobService.updateJobResult(jobId, state, pcapFile);
// }
//
//}

View File

@@ -1,30 +0,0 @@
package net.geedge.asw.module.runner.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
@Data
@TableName("runner")
public class RunnerEntity {
@TableId(type = IdType.ASSIGN_UUID)
private String id;
private String name;
private String token;
private String tags;
private String supportPlatforms;
private Integer shareFlag;
private String description;
private String status;
private Long lastHeartbeatTimestamp;
private Long createTimestamp;
private Long updateTimestamp;
private String createUserId;
private String updateUserId;
private String workspaceId;
}

View File

@@ -1,17 +0,0 @@
//package net.geedge.asw.module.runner.service;
//
//import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
//import com.baomidou.mybatisplus.extension.service.IService;
//import net.geedge.asw.module.runner.entity.RunnerEntity;
//
//import java.util.Map;
//
//public interface IRunnerService extends IService<RunnerEntity>{
//
// Page queryList(Map<String, Object> params);
//
// RunnerEntity saveRunner(RunnerEntity entity);
//
// RunnerEntity updateRunner(RunnerEntity entity);
//
//}

View File

@@ -1,51 +0,0 @@
//package net.geedge.asw.module.runner.service.impl;
//
//import cn.dev33.satoken.stp.StpUtil;
//import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
//import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
//import net.geedge.asw.common.util.T;
//import net.geedge.asw.module.runner.dao.RunnerDao;
//import net.geedge.asw.module.runner.entity.RunnerEntity;
//import net.geedge.asw.module.runner.service.IRunnerService;
//import org.springframework.stereotype.Service;
//
//import java.util.List;
//import java.util.Map;
//
//@Service
//public class RunnerServiceImpl extends ServiceImpl<RunnerDao, RunnerEntity> implements IRunnerService {
//
// @Override
// public Page queryList(Map<String, Object> params) {
// Page page = T.PageUtil.getPage(params);
// List<RunnerEntity> jobList = this.getBaseMapper().queryList(page, params);
// page.setRecords(jobList);
// return page;
// }
//
// @Override
// public RunnerEntity saveRunner(RunnerEntity entity) {
// entity.setCreateTimestamp(System.currentTimeMillis());
// entity.setUpdateTimestamp(System.currentTimeMillis());
// entity.setCreateUserId(StpUtil.getLoginIdAsString());
// entity.setUpdateUserId(StpUtil.getLoginIdAsString());
//
// // token
// entity.setToken(T.IdUtil.fastSimpleUUID());
//
// // save
// this.save(entity);
// return entity;
// }
//
// @Override
// public RunnerEntity updateRunner(RunnerEntity entity) {
// entity.setUpdateTimestamp(System.currentTimeMillis());
// entity.setUpdateUserId(StpUtil.getLoginIdAsString());
//
// // update
// this.updateById(entity);
// return entity;
// }
//
//}

View File

@@ -4,6 +4,7 @@ import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.fasterxml.jackson.annotation.JsonIgnore;
import lombok.Data;
import net.geedge.asw.module.workspace.entity.WorkspaceMemberEntity;
@@ -21,6 +22,7 @@ public class SysUserEntity {
@TableField("user_name")
private String userName;
@JsonIgnore
private String pwd;
@TableField(exist = false)

View File

@@ -0,0 +1,142 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="net.geedge.asw.module.job.dao.JobCfgDao">
<resultMap type="net.geedge.asw.module.job.entity.JobCfgEntity" id="jobCfgResultMap">
<id property="id" column="id"/>
<result property="name" column="name"/>
<result property="description" column="description"/>
<result property="packageId" column="package_id"/>
<result property="envId" column="env_id"/>
<result property="playbookId" column="playbook_id"/>
<result property="type" column="type"/>
<result property="cron" column="cron"/>
<result property="status" column="status"/>
<result property="parameters" column="parameters"/>
<result property="createTimestamp" column="create_timestamp"/>
<result property="updateTimestamp" column="update_timestamp"/>
<result property="createUserId" column="create_user_id"/>
<result property="updateUserId" column="update_user_id"/>
<result property="workspaceId" column="workspace_id"/>
<association property="pkg" columnPrefix="pkg_" javaType="net.geedge.asw.module.app.entity.PackageEntity">
<id property="id" column="id"/>
<result property="platform" column="platform"/>
<result property="version" column="version"/>
<result property="name" column="name"/>
</association>
<association property="environment" columnPrefix="em_" javaType="net.geedge.asw.module.environment.entity.EnvironmentEntity">
<id property="id" column="id"/>
<result property="name" column="name"/>
</association>
<association property="playbook" columnPrefix="pb_" javaType="net.geedge.asw.module.job.entity.PlaybookEntity">
<id property="id" column="id"/>
<result property="name" column="name"/>
</association>
<association property="lastJobResult" columnPrefix="job_" javaType="net.geedge.asw.module.job.entity.JobEntity">
<id property="id" column="id"/>
<result property="status" column="status"/>
<result property="createTimestamp" column="create_timestamp"/>
<result property="startTimestamp" column="start_timestamp"/>
<result property="endTimestamp" column="end_timestamp"/>
</association>
<association property="createUser" columnPrefix="cu_" javaType="net.geedge.asw.module.sys.entity.SysUserEntity">
<id property="id" column="id"/>
<result property="name" column="name"/>
</association>
<association property="updateUser" columnPrefix="uu_" javaType="net.geedge.asw.module.sys.entity.SysUserEntity">
<id property="id" column="id"/>
<result property="name" column="name"/>
</association>
<association property="workspace" columnPrefix="ws_" javaType="net.geedge.asw.module.workspace.entity.WorkspaceEntity">
<id property="id" column="id"/>
<result property="name" column="name"/>
</association>
<collection property="pcap" columnPrefix="pcap_" javaType="java.util.List" ofType="net.geedge.asw.module.job.entity.PcapEntity">
<id property="id" column="id"/>
<result property="name" column="name"/>
</collection>
</resultMap>
<select id="queryList" resultMap="jobCfgResultMap">
SELECT
jc.*,
pkg.id AS pkg_id,
pkg.platform AS pkg_platform,
pkg.version AS pkg_version,
pkg.name AS pkg_name,
env.id AS em_id,
env.name AS em_name,
pb.id AS pb_id,
pb.name AS pb_name,
job.id AS job_id,
job.status AS job_status,
job.create_timestamp AS job_create_timestamp,
job.start_timestamp AS job_start_timestamp,
job.end_timestamp AS job_end_timestamp,
pcap.id AS pcap_id,
pcap.name AS pcap_name,
cu.id AS cu_id,
cu.name AS cu_name,
uu.id AS uu_id,
uu.name AS uu_name,
ws.id AS ws_id,
ws.name AS ws_name
FROM
job_cfg jc
LEFT JOIN environment env ON jc.env_id = env.id
LEFT JOIN package pkg ON jc.package_id = pkg.id
LEFT JOIN playbook pb ON jc.playbook_id = pb.id
LEFT JOIN sys_user cu ON jc.create_user_id = cu.id
LEFT JOIN sys_user uu ON jc.update_user_id = uu.id
LEFT JOIN job job ON jc.id = job.job_cfg_id AND job.create_timestamp = (SELECT MAX(create_timestamp) FROM job j WHERE j.job_cfg_id = jc.id)
LEFT JOIN pcap pcap ON job.id = pcap.job_id
LEFT JOIN workspace ws ON jc.workspace_id = ws.id
<where>
<if test="params.ids != null and params.ids != ''">
jc.id in
<foreach item="id" collection="params.ids.split(',')" separator="," open="(" close=")">#{id}</foreach>
</if>
<if test="params.packageIds != null and params.packageIds != ''">
AND pkg.id in
<foreach item="id" collection="params.packageIds.split(',')" separator="," open="(" close=")">#{id}</foreach>
</if>
<if test="params.environmentIds != null and params.environmentIds != ''">
AND env.id in
<foreach item="id" collection="params.environmentIds.split(',')" separator="," open="(" close=")">#{id}</foreach>
</if>
<if test="params.playbooks != null and params.playbooks != ''">
AND pb.id in
<foreach item="id" collection="params.playbooks.split(',')" separator="," open="(" close=")">#{id}</foreach>
</if>
<if test="params.workspaceId != null and params.workspaceId != ''">
AND jc.workspace_id = #{params.workspaceId}
</if>
<if test="params.q != null and params.q != ''">
AND ( locate(#{params.q}, pkg.name) OR locate(#{params.q}, env.name) OR locate(#{params.q}, pb.name) )
</if>
</where>
<if test="params.orderBy == null or params.orderBy == ''">
ORDER BY jc.id
</if>
</select>
</mapper>

View File

@@ -1,10 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="net.geedge.asw.module.runner.dao.JobDao">
<mapper namespace="net.geedge.asw.module.job.dao.JobDao">
<resultMap type="net.geedge.asw.module.runner.entity.JobEntity" id="jobResultMap">
<resultMap type="net.geedge.asw.module.job.entity.JobEntity" id="jobResultMap">
<id property="id" column="id"/>
<result property="jobCfgId" column="job_cfg_id"/>
<result property="playbookId" column="playbook_id"/>
<result property="packageId" column="package_id"/>
<result property="envId" column="env_id"/>
@@ -32,7 +33,22 @@
<result property="name" column="name"/>
</association>
<association property="playbook" columnPrefix="pb_" javaType="net.geedge.asw.module.runner.entity.PlaybookEntity">
<association property="playbook" columnPrefix="pb_" javaType="net.geedge.asw.module.job.entity.PlaybookEntity">
<id property="id" column="id"/>
<result property="name" column="name"/>
</association>
<association property="workspace" columnPrefix="ws_" javaType="net.geedge.asw.module.workspace.entity.WorkspaceEntity">
<id property="id" column="id"/>
<result property="name" column="name"/>
</association>
<association property="jobCfg" columnPrefix="jc_" javaType="net.geedge.asw.module.job.entity.JobCfgEntity">
<id property="id" column="id"/>
<result property="name" column="name"/>
</association>
<association property="createUser" columnPrefix="cu_" javaType="net.geedge.asw.module.sys.entity.SysUserEntity">
<id property="id" column="id"/>
<result property="name" column="name"/>
</association>
@@ -51,6 +67,15 @@
env.id AS em_id,
env.name AS em_name,
jc.id AS jc_id,
jc.name AS jc_name,
ws.id AS ws_id,
ws.name AS ws_name,
su.id AS cu_id,
su.name AS cu_name,
pb.id AS pb_id,
pb.name AS pb_name
FROM
@@ -58,6 +83,9 @@
LEFT JOIN environment env ON job.env_id = env.id
LEFT JOIN package pkg ON job.package_id = pkg.id
LEFT JOIN playbook pb ON job.playbook_id = pb.id
LEFT JOIN job_cfg jc ON job.job_cfg_id = jc.id
LEFT JOIN workspace ws ON job.workspace_id = ws.id
LEFT JOIN sys_user su ON job.create_user_id = su.id
<where>
<if test="params.ids != null and params.ids != ''">
job.id in
@@ -74,6 +102,11 @@
<foreach item="id" collection="params.environmentIds.split(',')" separator="," open="(" close=")">#{id}</foreach>
</if>
<if test="params.jobCfgIds != null and params.jobCfgIds != ''">
AND jc.id in
<foreach item="id" collection="params.jobCfgIds.split(',')" separator="," open="(" close=")">#{id}</foreach>
</if>
<if test="params.playbooks != null and params.playbooks != ''">
AND pb.id in
<foreach item="id" collection="params.playbooks.split(',')" separator="," open="(" close=")">#{id}</foreach>

View File

@@ -1,9 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="net.geedge.asw.module.runner.dao.PcapDao">
<mapper namespace="net.geedge.asw.module.job.dao.PcapDao">
<resultMap type="net.geedge.asw.module.runner.entity.PcapEntity" id="pcapResultMap">
<resultMap type="net.geedge.asw.module.job.entity.PcapEntity" id="pcapResultMap">
<id property="id" column="id"/>
<result property="name" column="name"/>
<result property="description" column="description"/>
@@ -29,7 +29,7 @@
<result property="name" column="name"/>
</association>
<association property="playbook" columnPrefix="pb_" javaType="net.geedge.asw.module.runner.entity.PlaybookEntity">
<association property="playbook" columnPrefix="pb_" javaType="net.geedge.asw.module.job.entity.PlaybookEntity">
<id property="id" column="id"/>
<result property="name" column="name"/>
</association>

View File

@@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="net.geedge.asw.module.runner.dao.PlaybookDao">
<mapper namespace="net.geedge.asw.module.job.dao.PlaybookDao">
<resultMap type="net.geedge.asw.module.runner.entity.PlaybookEntity" id="playbook">
<resultMap type="net.geedge.asw.module.job.entity.PlaybookEntity" id="playbook">
<id property="id" column="id"/>
<result property="name" column="name"/>
<result property="type" column="type"/>

View File

@@ -1,32 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="net.geedge.asw.module.runner.dao.RunnerDao">
<select id="queryList" resultType="net.geedge.asw.module.runner.entity.RunnerEntity">
SELECT
*
FROM
runner
<where>
<if test="params.workspaceId != null and params.workspaceId != ''">
workspace_id = #{params.workspaceId}
</if>
<if test="params.q != null and params.q != ''">
AND locate(#{params.q}, description)
</if>
<if test="params.tags != null and params.tags != ''">
AND <foreach item="item" collection="params.tags.split(',')" separator="OR" index="" open="(" close=")">
locate(#{item}, tags)
</foreach>
</if>
</where>
<if test="params.orderBy == null or params.orderBy == ''">
ORDER BY id
</if>
</select>
</mapper>

View File

@@ -163,5 +163,20 @@ INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (259, '203008', 'GIT_TAG_NOT_FOUND', 'Tag {0} 不存在', 'zh', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (261, '203009', 'GIT_TAG_ALREADY_IN_USE', 'Tag is already in use. Choose another tag.', 'en', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (263, '203009', 'GIT_TAG_ALREADY_IN_USE', 'Tag 已在使用中,请选择其他 tag', 'zh', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (265, '701001', 'JOB_CFG_TYPE_CANNOT_EMPTY', 'Job configuration type cannot be empty', 'en', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (267, '701002', 'JOB_CFG_STATUS_CANNOT_EMPTY', 'Job configuration status cannot be empty', 'en', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (269, '701003', 'JOB_CFG_STATUS_ERROR', 'Job configuration status error', 'en', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (271, '701004', 'JOB_CFG_TYPE_ERROR', 'Job configuration type error', 'en', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (273, '701005', 'JOB_CFG_CRON_CANNOT_EMPTY', 'Job configuration cron cannot be empty', 'en', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (275, '701006', 'JOB_CFG_CRON_ERROR', 'Job configuration cron is not a valid cron expression', 'en', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (277, '701007', 'JOB_CFG_NANE_ALREADY_EXISTS', 'Job configuration name already exists', 'en', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (279, '701001', 'JOB_CFG_TYPE_CANNOT_EMPTY', '作业配置类型不能为空', 'zh', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (281, '701002', 'JOB_CFG_STATUS_CANNOT_EMPTY', '作业配置状态不能为空', 'zh', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (283, '701003', 'JOB_CFG_STATUS_ERROR', '作业配置状态错误', 'zh', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (285, '701004', 'JOB_CFG_TYPE_ERROR', '作业配置类型错误', 'zh', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (287, '701005', 'JOB_CFG_CRON_CANNOT_EMPTY', '作业配置cron不能为空', 'zh', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (289, '701006', 'JOB_CFG_CRON_ERROR', '作业配置 cron 不是有效的 cron 表达式', 'zh', '', 'admin', 1724030366000);
INSERT INTO `sys_i18n`(`id`, `name`, `code`, `value`, `lang`, `remark`, `update_user_id`, `update_timestamp`) VALUES (291, '701007', 'JOB_CFG_NANE_ALREADY_EXISTS', '作业配置名称已存在', 'zh', '', 'admin', 1724030366000);
SET FOREIGN_KEY_CHECKS = 1;

View File

@@ -50,6 +50,8 @@ INSERT INTO `sys_menu` (`id`, `name`, `i18n`, `pid`, `type`, `perms`, `route`, `
INSERT INTO `sys_menu` (`id`, `name`, `i18n`, `pid`, `type`, `perms`, `route`, `icon`, `order`, `create_timestamp`, `state`) VALUES ('5003', 'job_delete', 'buttons.delete', '5000', 'button', '', '', '', 3, 1722478572000, 1);
INSERT INTO `sys_menu` (`id`, `name`, `i18n`, `pid`, `type`, `perms`, `route`, `icon`, `order`, `create_timestamp`, `state`) VALUES ('5004', 'job_cancel', 'buttons.cancel', '5000', 'button', '', '', '', 4, 1722478572000, 1);
INSERT INTO `sys_menu` (`id`, `name`, `i18n`, `pid`, `type`, `perms`, `route`, `icon`, `order`, `create_timestamp`, `state`) VALUES ('5005', 'job_retry', 'buttons.retry', '5000', 'button', '', '', '', 5, 1722478572000, 1);
INSERT INTO `sys_menu` (`id`, `name`, `i18n`, `pid`, `type`, `perms`, `route`, `icon`, `order`, `create_timestamp`, `state`) VALUES ('5006', 'job_run', 'buttons.run', '5000', 'button', '', '', '', 6, 1722478572000, 1);
INSERT INTO `sys_menu` (`id`, `name`, `i18n`, `pid`, `type`, `perms`, `route`, `icon`, `order`, `create_timestamp`, `state`) VALUES ('5007', 'job_edit', 'buttons.edit', '5000', 'button', '', '', '', 7, 1722478572000, 1);
INSERT INTO `sys_menu` (`id`, `name`, `i18n`, `pid`, `type`, `perms`, `route`, `icon`, `order`, `create_timestamp`, `state`) VALUES ('6000', 'playbooks', 'overall.playbooks', '0', 'menu', '', '/playbooks', 'asw-icon icon-Playbooks', 4, 1722478572000, 1);
INSERT INTO `sys_menu` (`id`, `name`, `i18n`, `pid`, `type`, `perms`, `route`, `icon`, `order`, `create_timestamp`, `state`) VALUES ('6001', 'playbook_view', 'buttons.view', '6000', 'button', '', '', '', 1, 1722478572000, 1);
@@ -76,3 +78,4 @@ INSERT INTO `sys_menu` (`id`, `name`, `i18n`, `pid`, `type`, `perms`, `route`, `
INSERT INTO `sys_menu` (`id`, `name`, `i18n`, `pid`, `type`, `perms`, `route`, `icon`, `order`, `create_timestamp`, `state`) VALUES ('1', 'workspace_add', 'buttons.add', '0', 'button', '', '', '', 1, 1722478572000, 1);
INSERT INTO `sys_menu` (`id`, `name`, `i18n`, `pid`, `type`, `perms`, `route`, `icon`, `order`, `create_timestamp`, `state`) VALUES ('2', 'workspace_delete', 'buttons.delete', '0', 'button', '', '', '', 1, 1722478572000, 1);

View File

@@ -6,6 +6,7 @@ delete from sys_role_menu where role_id in ('owner' ,'maintainer', 'developer',
-- owner
-- 具有所有权限
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('owner', '1');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('owner', '2');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('owner', '1000');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('owner', '1001');
@@ -53,6 +54,8 @@ INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('owner', '5002');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('owner', '5003');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('owner', '5004');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('owner', '5005');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('owner', '5006');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('owner', '5007');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('owner', '6000');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('owner', '6001');
@@ -128,6 +131,8 @@ INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('maintainer', '5002');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('maintainer', '5003');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('maintainer', '5004');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('maintainer', '5005');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('maintainer', '5006');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('maintainer', '5007');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('maintainer', '6000');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('maintainer', '6001');
@@ -196,6 +201,8 @@ INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('developer', '5002');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('developer', '5003');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('developer', '5004');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('developer', '5005');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('developer', '5006');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('developer', '5007');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('developer', '6000');
INSERT INTO `sys_role_menu`(`role_id`, `menu_id`) VALUES ('developer', '6001');

View File

@@ -202,6 +202,7 @@ CREATE TABLE `playbook` (
DROP TABLE IF EXISTS `job`;
CREATE TABLE `job` (
`id` varchar(64) NOT NULL COMMENT '主键',
`job_cfg_id` varchar(64) NOT NULL DEFAULT '' COMMENT '任务配置ID',
`playbook_id` varchar(64) NOT NULL DEFAULT '' COMMENT 'Playbook ID',
`package_id` varchar(64) NOT NULL DEFAULT '' COMMENT 'Package ID',
`env_id` varchar(64) NOT NULL DEFAULT '' COMMENT 'env ID',
@@ -223,6 +224,31 @@ CREATE TABLE `job` (
KEY `idx_workspace_id` (`workspace_id`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/**
* 新增 job_cfg 表
*/
DROP TABLE IF EXISTS `job_cfg`;
CREATE TABLE `job_cfg` (
`id` varchar(64) NOT NULL COMMENT '主键',
`name` varchar(256) NOT NULL DEFAULT '' COMMENT '名称',
`description` TEXT NOT NULL DEFAULT '' COMMENT '描述信息 ',
`package_id` varchar(64) NOT NULL DEFAULT '' COMMENT 'Package ID',
`env_id` varchar(64) NOT NULL DEFAULT '' COMMENT 'env ID',
`playbook_id` varchar(64) NOT NULL DEFAULT '' COMMENT 'playbook ID',
`type` varchar(64) NOT NULL DEFAULT '' COMMENT '任务类型:可选值:asap,cron',
`cron` varchar(64) NOT NULL DEFAULT '' COMMENT 'cron表达式,当 type = cron 时不能为空',
`status` varchar(64) NOT NULL DEFAULT '' COMMENT '状态:当 type = cron 时有效 可选值: enabled,disabled',
`parameters` TEXT NOT NULL DEFAULT '' COMMENT '运行参数',
`create_timestamp` bigint(20) NOT NULL COMMENT '创建时间戳',
`update_timestamp` bigint(20) NOT NULL COMMENT '更新时间戳',
`create_user_id` varchar(64) NOT NULL COMMENT '创建人',
`update_user_id` varchar(64) NOT NULL COMMENT '更新人',
`workspace_id` varchar(64) NOT NULL DEFAULT '' COMMENT '工作空间ID',
PRIMARY KEY (`id`) USING BTREE,
KEY `idx_name` (`name`) USING BTREE,
KEY `idx_type` (`type`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/**
* 新增 pcap 表
*/