fix:ASW-191 job 增加 artifacts 参数
This commit is contained in:
@@ -30,6 +30,8 @@ public class JobEntity {
|
|||||||
private String status;
|
private String status;
|
||||||
@JsonIgnore
|
@JsonIgnore
|
||||||
private String logPath;
|
private String logPath;
|
||||||
|
@JsonIgnore
|
||||||
|
private String artifactsPath;
|
||||||
|
|
||||||
private Long createTimestamp;
|
private Long createTimestamp;
|
||||||
private Long updateTimestamp;
|
private Long updateTimestamp;
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
package net.geedge.asw.module.runner.job;
|
package net.geedge.asw.module.runner.job;
|
||||||
|
|
||||||
|
import cn.hutool.core.net.url.UrlBuilder;
|
||||||
import cn.hutool.http.HttpRequest;
|
import cn.hutool.http.HttpRequest;
|
||||||
import cn.hutool.http.HttpResponse;
|
import cn.hutool.http.HttpResponse;
|
||||||
|
import cn.hutool.json.JSONArray;
|
||||||
import cn.hutool.json.JSONObject;
|
import cn.hutool.json.JSONObject;
|
||||||
import cn.hutool.log.Log;
|
import cn.hutool.log.Log;
|
||||||
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
||||||
@@ -22,6 +24,9 @@ import net.geedge.asw.module.runner.service.IPcapService;
|
|||||||
import net.geedge.asw.module.runner.util.RunnerConstant;
|
import net.geedge.asw.module.runner.util.RunnerConstant;
|
||||||
import net.lingala.zip4j.ZipFile;
|
import net.lingala.zip4j.ZipFile;
|
||||||
import net.lingala.zip4j.model.FileHeader;
|
import net.lingala.zip4j.model.FileHeader;
|
||||||
|
import net.lingala.zip4j.model.ZipParameters;
|
||||||
|
import net.lingala.zip4j.model.enums.CompressionLevel;
|
||||||
|
import net.lingala.zip4j.model.enums.CompressionMethod;
|
||||||
import org.apache.commons.lang3.time.StopWatch;
|
import org.apache.commons.lang3.time.StopWatch;
|
||||||
import org.quartz.DisallowConcurrentExecution;
|
import org.quartz.DisallowConcurrentExecution;
|
||||||
import org.quartz.JobExecutionContext;
|
import org.quartz.JobExecutionContext;
|
||||||
@@ -198,17 +203,30 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
|
|||||||
private Thread startGetJobResultThread(JobEntity job, EnvironmentEntity environment, String status) {
|
private Thread startGetJobResultThread(JobEntity job, EnvironmentEntity environment, String status) {
|
||||||
Thread thread = Thread.ofVirtual().start(() -> {
|
Thread thread = Thread.ofVirtual().start(() -> {
|
||||||
File destination = null;
|
File destination = null;
|
||||||
|
File artifactZip = null;
|
||||||
InputStream inputStream = null;
|
InputStream inputStream = null;
|
||||||
ZipFile zipFile = null;
|
ZipFile zipFile = null;
|
||||||
|
ZipFile artifactsZipFile = null;
|
||||||
|
List<File> artifactFiles = T.ListUtil.list(false);
|
||||||
try {
|
try {
|
||||||
log.info("[playbookExecResultChecker] [startGetJobResultThread] [job status: {}] [jod id: {}] [time: {}]", status, job.getId(), System.currentTimeMillis());
|
log.info("[playbookExecResultChecker] [startGetJobResultThread] [job status: {}] [jod id: {}] [time: {}]", status, job.getId(), System.currentTimeMillis());
|
||||||
JSONObject paramJSONObject = environment.getParamJSONObject();
|
JSONObject paramJSONObject = environment.getParamJSONObject();
|
||||||
String url = paramJSONObject.getStr("url");
|
String url = paramJSONObject.getStr("url");
|
||||||
String token = paramJSONObject.getStr("token");
|
String token = paramJSONObject.getStr("token");
|
||||||
|
UrlBuilder urlBuilder = UrlBuilder.of(String.format("%s/api/v1/env/playbook/%s/artifact", url, job.getId()));
|
||||||
|
|
||||||
HttpRequest request = T.HttpUtil.createGet(String.format("%s/api/v1/env/playbook/%s/artifact", url, job.getId()));
|
String parameters = job.getParameters();
|
||||||
|
if (T.StrUtil.isNotEmpty(parameters)){
|
||||||
|
JSONObject obj = T.JSONUtil.parseObj(parameters);
|
||||||
|
JSONArray artifacts = obj.getJSONArray("artifacts");
|
||||||
|
if (artifacts != null){
|
||||||
|
for (Object artifact : artifacts) {
|
||||||
|
urlBuilder.addQuery("artifacts", artifact);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
HttpRequest request = T.HttpUtil.createGet(urlBuilder.build());
|
||||||
request.header("Authorization", token);
|
request.header("Authorization", token);
|
||||||
|
|
||||||
HttpResponse response = request.execute();
|
HttpResponse response = request.execute();
|
||||||
log.info("[playbookExecResultChecker] [startGetJobResultThread] [request env playbook result api] [status: {}] [time: {}]", response.getStatus(), System.currentTimeMillis());
|
log.info("[playbookExecResultChecker] [startGetJobResultThread] [request env playbook result api] [status: {}] [time: {}]", response.getStatus(), System.currentTimeMillis());
|
||||||
|
|
||||||
@@ -257,13 +275,32 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
|
|||||||
entity.setMd5(md5Hex);
|
entity.setMd5(md5Hex);
|
||||||
pcapService.save(entity);
|
pcapService.save(entity);
|
||||||
log.info("[playbookExecResultChecker] [startGetJobResultThread] [upload pcap: {}] [job id: {}] [time: {}]", T.JSONUtil.toJsonStr(entity), job.getId(), System.currentTimeMillis());
|
log.info("[playbookExecResultChecker] [startGetJobResultThread] [upload pcap: {}] [job id: {}] [time: {}]", T.JSONUtil.toJsonStr(entity), job.getId(), System.currentTimeMillis());
|
||||||
} else {
|
} else if (fileHeader.getFileName().equals("result.log")) {
|
||||||
// 处理 log 文件
|
// 处理 log 文件
|
||||||
File logFile = T.FileUtil.file(job.getLogPath());
|
File logFile = T.FileUtil.file(job.getLogPath());
|
||||||
inputStream = zipFile.getInputStream(fileHeader);
|
inputStream = zipFile.getInputStream(fileHeader);
|
||||||
T.FileUtil.writeFromStream(inputStream, logFile);
|
T.FileUtil.writeFromStream(inputStream, logFile);
|
||||||
|
} else {
|
||||||
|
File artifactFile = FileResourceUtil.createFile(resources, job.getWorkspaceId(), Constants.FileTypeEnum.JOB.getType(), job.getId(), fileHeader.getFileName());
|
||||||
|
inputStream = zipFile.getInputStream(fileHeader);
|
||||||
|
T.FileUtil.writeFromStream(inputStream, artifactFile);
|
||||||
|
artifactFiles.add(artifactFile);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (T.CollUtil.isNotEmpty(artifactFiles) && artifactFiles.size() > 1) {
|
||||||
|
artifactZip = FileResourceUtil.createFile(resources, job.getWorkspaceId(), Constants.FileTypeEnum.JOB.getType(), job.getId(), T.StrUtil.concat(true, job.getId(), ".zip"));
|
||||||
|
artifactsZipFile = new ZipFile(artifactZip);
|
||||||
|
|
||||||
|
ZipParameters zipParameters = new ZipParameters();
|
||||||
|
zipParameters.setCompressionMethod(CompressionMethod.DEFLATE); // 压缩方法
|
||||||
|
zipParameters.setCompressionLevel(CompressionLevel.FASTEST); // 压缩级别,选项有 FASTEST、ULTRA 等
|
||||||
|
artifactsZipFile.addFiles(artifactFiles, zipParameters);
|
||||||
|
job.setArtifactsPath(artifactZip.getPath());
|
||||||
|
}
|
||||||
|
if (T.CollUtil.isNotEmpty(artifactFiles) && artifactFiles.size() == 1) {
|
||||||
|
job.setArtifactsPath(artifactFiles.getFirst().getPath());
|
||||||
|
}
|
||||||
// update job status
|
// update job status
|
||||||
job.setStatus(status);
|
job.setStatus(status);
|
||||||
job.setEndTimestamp(System.currentTimeMillis());
|
job.setEndTimestamp(System.currentTimeMillis());
|
||||||
@@ -286,8 +323,12 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
|
|||||||
log.error("[playbookExecResultChecker] [startGetJobResultThread] [error]", e);
|
log.error("[playbookExecResultChecker] [startGetJobResultThread] [error]", e);
|
||||||
} finally {
|
} finally {
|
||||||
T.IoUtil.close(zipFile);
|
T.IoUtil.close(zipFile);
|
||||||
|
T.IoUtil.close(artifactsZipFile);
|
||||||
T.FileUtil.del(destination);
|
T.FileUtil.del(destination);
|
||||||
T.IoUtil.close(inputStream);
|
T.IoUtil.close(inputStream);
|
||||||
|
if (artifactFiles.size() > 1){
|
||||||
|
artifactFiles.stream().forEach(file-> T.FileUtil.del(file));
|
||||||
|
}
|
||||||
Constants.RESULT_JOB_THREAD.remove(job.getId());
|
Constants.RESULT_JOB_THREAD.remove(job.getId());
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
<result property="endTimestamp" column="end_timestamp"/>
|
<result property="endTimestamp" column="end_timestamp"/>
|
||||||
<result property="status" column="status"/>
|
<result property="status" column="status"/>
|
||||||
<result property="logPath" column="log_path"/>
|
<result property="logPath" column="log_path"/>
|
||||||
|
<result property="artifactsPath" column="artifacts_path"/>
|
||||||
<result property="createTimestamp" column="create_timestamp"/>
|
<result property="createTimestamp" column="create_timestamp"/>
|
||||||
<result property="updateTimestamp" column="update_timestamp"/>
|
<result property="updateTimestamp" column="update_timestamp"/>
|
||||||
<result property="createUserId" column="create_user_id"/>
|
<result property="createUserId" column="create_user_id"/>
|
||||||
|
|||||||
@@ -210,6 +210,7 @@ CREATE TABLE `job` (
|
|||||||
`end_timestamp` bigint(20) NOT NULL DEFAULT -1 COMMENT '结束时间戳',
|
`end_timestamp` bigint(20) NOT NULL DEFAULT -1 COMMENT '结束时间戳',
|
||||||
`status` varchar(64) NOT NULL DEFAULT '' COMMENT '状态; 可选值: created,pending,running,passed,failed,cancel',
|
`status` varchar(64) NOT NULL DEFAULT '' COMMENT '状态; 可选值: created,pending,running,passed,failed,cancel',
|
||||||
`log_path` varchar(256) NOT NULL DEFAULT '' COMMENT '日志文件路径',
|
`log_path` varchar(256) NOT NULL DEFAULT '' COMMENT '日志文件路径',
|
||||||
|
`artifacts_path` varchar(256) NOT NULL DEFAULT '' COMMENT '制品文件路径',
|
||||||
`create_timestamp` bigint(20) NOT NULL COMMENT '创建时间戳',
|
`create_timestamp` bigint(20) NOT NULL COMMENT '创建时间戳',
|
||||||
`update_timestamp` bigint(20) NOT NULL COMMENT '更新时间戳',
|
`update_timestamp` bigint(20) NOT NULL COMMENT '更新时间戳',
|
||||||
`create_user_id` varchar(64) NOT NULL COMMENT '创建人',
|
`create_user_id` varchar(64) NOT NULL COMMENT '创建人',
|
||||||
|
|||||||
Reference in New Issue
Block a user