|
|
|
|
@@ -1,7 +1,9 @@
|
|
|
|
|
package net.geedge.asw.module.runner.job;
|
|
|
|
|
|
|
|
|
|
import cn.hutool.core.net.url.UrlBuilder;
|
|
|
|
|
import cn.hutool.http.HttpRequest;
|
|
|
|
|
import cn.hutool.http.HttpResponse;
|
|
|
|
|
import cn.hutool.json.JSONArray;
|
|
|
|
|
import cn.hutool.json.JSONObject;
|
|
|
|
|
import cn.hutool.log.Log;
|
|
|
|
|
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
|
|
|
|
|
@@ -22,6 +24,9 @@ import net.geedge.asw.module.runner.service.IPcapService;
|
|
|
|
|
import net.geedge.asw.module.runner.util.RunnerConstant;
|
|
|
|
|
import net.lingala.zip4j.ZipFile;
|
|
|
|
|
import net.lingala.zip4j.model.FileHeader;
|
|
|
|
|
import net.lingala.zip4j.model.ZipParameters;
|
|
|
|
|
import net.lingala.zip4j.model.enums.CompressionLevel;
|
|
|
|
|
import net.lingala.zip4j.model.enums.CompressionMethod;
|
|
|
|
|
import org.apache.commons.lang3.time.StopWatch;
|
|
|
|
|
import org.quartz.DisallowConcurrentExecution;
|
|
|
|
|
import org.quartz.JobExecutionContext;
|
|
|
|
|
@@ -198,17 +203,30 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
|
|
|
|
|
private Thread startGetJobResultThread(JobEntity job, EnvironmentEntity environment, String status) {
|
|
|
|
|
Thread thread = Thread.ofVirtual().start(() -> {
|
|
|
|
|
File destination = null;
|
|
|
|
|
File artifactZip = null;
|
|
|
|
|
InputStream inputStream = null;
|
|
|
|
|
ZipFile zipFile = null;
|
|
|
|
|
ZipFile artifactsZipFile = null;
|
|
|
|
|
List<File> artifactFiles = T.ListUtil.list(false);
|
|
|
|
|
try {
|
|
|
|
|
log.info("[playbookExecResultChecker] [startGetJobResultThread] [job status: {}] [jod id: {}] [time: {}]", status, job.getId(), System.currentTimeMillis());
|
|
|
|
|
JSONObject paramJSONObject = environment.getParamJSONObject();
|
|
|
|
|
String url = paramJSONObject.getStr("url");
|
|
|
|
|
String token = paramJSONObject.getStr("token");
|
|
|
|
|
UrlBuilder urlBuilder = UrlBuilder.of(String.format("%s/api/v1/env/playbook/%s/artifact", url, job.getId()));
|
|
|
|
|
|
|
|
|
|
HttpRequest request = T.HttpUtil.createGet(String.format("%s/api/v1/env/playbook/%s/artifact", url, job.getId()));
|
|
|
|
|
String parameters = job.getParameters();
|
|
|
|
|
if (T.StrUtil.isNotEmpty(parameters)){
|
|
|
|
|
JSONObject obj = T.JSONUtil.parseObj(parameters);
|
|
|
|
|
JSONArray artifacts = obj.getJSONArray("artifacts");
|
|
|
|
|
if (artifacts != null){
|
|
|
|
|
for (Object artifact : artifacts) {
|
|
|
|
|
urlBuilder.addQuery("artifacts", artifact);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
HttpRequest request = T.HttpUtil.createGet(urlBuilder.build());
|
|
|
|
|
request.header("Authorization", token);
|
|
|
|
|
|
|
|
|
|
HttpResponse response = request.execute();
|
|
|
|
|
log.info("[playbookExecResultChecker] [startGetJobResultThread] [request env playbook result api] [status: {}] [time: {}]", response.getStatus(), System.currentTimeMillis());
|
|
|
|
|
|
|
|
|
|
@@ -257,13 +275,32 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
|
|
|
|
|
entity.setMd5(md5Hex);
|
|
|
|
|
pcapService.save(entity);
|
|
|
|
|
log.info("[playbookExecResultChecker] [startGetJobResultThread] [upload pcap: {}] [job id: {}] [time: {}]", T.JSONUtil.toJsonStr(entity), job.getId(), System.currentTimeMillis());
|
|
|
|
|
} else {
|
|
|
|
|
} else if (fileHeader.getFileName().equals("result.log")) {
|
|
|
|
|
// 处理 log 文件
|
|
|
|
|
File logFile = T.FileUtil.file(job.getLogPath());
|
|
|
|
|
inputStream = zipFile.getInputStream(fileHeader);
|
|
|
|
|
T.FileUtil.writeFromStream(inputStream, logFile);
|
|
|
|
|
} else {
|
|
|
|
|
File artifactFile = FileResourceUtil.createFile(resources, job.getWorkspaceId(), Constants.FileTypeEnum.JOB.getType(), job.getId(), fileHeader.getFileName());
|
|
|
|
|
inputStream = zipFile.getInputStream(fileHeader);
|
|
|
|
|
T.FileUtil.writeFromStream(inputStream, artifactFile);
|
|
|
|
|
artifactFiles.add(artifactFile);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (T.CollUtil.isNotEmpty(artifactFiles) && artifactFiles.size() > 1) {
|
|
|
|
|
artifactZip = FileResourceUtil.createFile(resources, job.getWorkspaceId(), Constants.FileTypeEnum.JOB.getType(), job.getId(), T.StrUtil.concat(true, job.getId(), ".zip"));
|
|
|
|
|
artifactsZipFile = new ZipFile(artifactZip);
|
|
|
|
|
|
|
|
|
|
ZipParameters zipParameters = new ZipParameters();
|
|
|
|
|
zipParameters.setCompressionMethod(CompressionMethod.DEFLATE); // 压缩方法
|
|
|
|
|
zipParameters.setCompressionLevel(CompressionLevel.FASTEST); // 压缩级别,选项有 FASTEST、ULTRA 等
|
|
|
|
|
artifactsZipFile.addFiles(artifactFiles, zipParameters);
|
|
|
|
|
job.setArtifactsPath(artifactZip.getPath());
|
|
|
|
|
}
|
|
|
|
|
if (T.CollUtil.isNotEmpty(artifactFiles) && artifactFiles.size() == 1) {
|
|
|
|
|
job.setArtifactsPath(artifactFiles.getFirst().getPath());
|
|
|
|
|
}
|
|
|
|
|
// update job status
|
|
|
|
|
job.setStatus(status);
|
|
|
|
|
job.setEndTimestamp(System.currentTimeMillis());
|
|
|
|
|
@@ -286,8 +323,12 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
|
|
|
|
|
log.error("[playbookExecResultChecker] [startGetJobResultThread] [error]", e);
|
|
|
|
|
} finally {
|
|
|
|
|
T.IoUtil.close(zipFile);
|
|
|
|
|
T.IoUtil.close(artifactsZipFile);
|
|
|
|
|
T.FileUtil.del(destination);
|
|
|
|
|
T.IoUtil.close(inputStream);
|
|
|
|
|
if (artifactFiles.size() > 1){
|
|
|
|
|
artifactFiles.stream().forEach(file-> T.FileUtil.del(file));
|
|
|
|
|
}
|
|
|
|
|
Constants.RESULT_JOB_THREAD.remove(job.getId());
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|