summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorzhangshuai <[email protected]>2024-11-25 15:03:55 +0800
committerzhangshuai <[email protected]>2024-11-25 15:03:55 +0800
commitafcbbe30b667cb9cae1243bdca86b815010675af (patch)
treef19d8c5fa6e885adea489f2cb55f1db36b4b3076
parent5444bf2c0b108812c0e968f44a4246d9542c48d1 (diff)
fix:ASW-191 job 增加 artifacts 参数
-rw-r--r--src/main/java/net/geedge/asw/module/runner/entity/JobEntity.java2
-rw-r--r--src/main/java/net/geedge/asw/module/runner/job/JobPlaybookExecResultChecker.java47
-rw-r--r--src/main/resources/db/mapper/runner/JobMapper.xml1
-rw-r--r--src/main/resources/db/migration/V1.0.01__INIT_TABLES.sql1
4 files changed, 48 insertions, 3 deletions
diff --git a/src/main/java/net/geedge/asw/module/runner/entity/JobEntity.java b/src/main/java/net/geedge/asw/module/runner/entity/JobEntity.java
index cc95fc2..5ab8d1b 100644
--- a/src/main/java/net/geedge/asw/module/runner/entity/JobEntity.java
+++ b/src/main/java/net/geedge/asw/module/runner/entity/JobEntity.java
@@ -30,6 +30,8 @@ public class JobEntity {
private String status;
@JsonIgnore
private String logPath;
+ @JsonIgnore
+ private String artifactsPath;
private Long createTimestamp;
private Long updateTimestamp;
diff --git a/src/main/java/net/geedge/asw/module/runner/job/JobPlaybookExecResultChecker.java b/src/main/java/net/geedge/asw/module/runner/job/JobPlaybookExecResultChecker.java
index dba1112..2860c34 100644
--- a/src/main/java/net/geedge/asw/module/runner/job/JobPlaybookExecResultChecker.java
+++ b/src/main/java/net/geedge/asw/module/runner/job/JobPlaybookExecResultChecker.java
@@ -1,7 +1,9 @@
package net.geedge.asw.module.runner.job;
+import cn.hutool.core.net.url.UrlBuilder;
import cn.hutool.http.HttpRequest;
import cn.hutool.http.HttpResponse;
+import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONObject;
import cn.hutool.log.Log;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
@@ -22,6 +24,9 @@ import net.geedge.asw.module.runner.service.IPcapService;
import net.geedge.asw.module.runner.util.RunnerConstant;
import net.lingala.zip4j.ZipFile;
import net.lingala.zip4j.model.FileHeader;
+import net.lingala.zip4j.model.ZipParameters;
+import net.lingala.zip4j.model.enums.CompressionLevel;
+import net.lingala.zip4j.model.enums.CompressionMethod;
import org.apache.commons.lang3.time.StopWatch;
import org.quartz.DisallowConcurrentExecution;
import org.quartz.JobExecutionContext;
@@ -198,17 +203,30 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
private Thread startGetJobResultThread(JobEntity job, EnvironmentEntity environment, String status) {
Thread thread = Thread.ofVirtual().start(() -> {
File destination = null;
+ File artifactZip = null;
InputStream inputStream = null;
ZipFile zipFile = null;
+ ZipFile artifactsZipFile = null;
+ List<File> artifactFiles = T.ListUtil.list(false);
try {
log.info("[playbookExecResultChecker] [startGetJobResultThread] [job status: {}] [jod id: {}] [time: {}]", status, job.getId(), System.currentTimeMillis());
JSONObject paramJSONObject = environment.getParamJSONObject();
String url = paramJSONObject.getStr("url");
String token = paramJSONObject.getStr("token");
+ UrlBuilder urlBuilder = UrlBuilder.of(String.format("%s/api/v1/env/playbook/%s/artifact", url, job.getId()));
- HttpRequest request = T.HttpUtil.createGet(String.format("%s/api/v1/env/playbook/%s/artifact", url, job.getId()));
+ String parameters = job.getParameters();
+ if (T.StrUtil.isNotEmpty(parameters)){
+ JSONObject obj = T.JSONUtil.parseObj(parameters);
+ JSONArray artifacts = obj.getJSONArray("artifacts");
+ if (artifacts != null){
+ for (Object artifact : artifacts) {
+ urlBuilder.addQuery("artifacts", artifact);
+ }
+ }
+ }
+ HttpRequest request = T.HttpUtil.createGet(urlBuilder.build());
request.header("Authorization", token);
-
HttpResponse response = request.execute();
log.info("[playbookExecResultChecker] [startGetJobResultThread] [request env playbook result api] [status: {}] [time: {}]", response.getStatus(), System.currentTimeMillis());
@@ -257,13 +275,32 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
entity.setMd5(md5Hex);
pcapService.save(entity);
log.info("[playbookExecResultChecker] [startGetJobResultThread] [upload pcap: {}] [job id: {}] [time: {}]", T.JSONUtil.toJsonStr(entity), job.getId(), System.currentTimeMillis());
- } else {
+ } else if (fileHeader.getFileName().equals("result.log")) {
// 处理 log 文件
File logFile = T.FileUtil.file(job.getLogPath());
inputStream = zipFile.getInputStream(fileHeader);
T.FileUtil.writeFromStream(inputStream, logFile);
+ } else {
+ File artifactFile = FileResourceUtil.createFile(resources, job.getWorkspaceId(), Constants.FileTypeEnum.JOB.getType(), job.getId(), fileHeader.getFileName());
+ inputStream = zipFile.getInputStream(fileHeader);
+ T.FileUtil.writeFromStream(inputStream, artifactFile);
+ artifactFiles.add(artifactFile);
}
}
+
+ if (T.CollUtil.isNotEmpty(artifactFiles) && artifactFiles.size() > 1) {
+ artifactZip = FileResourceUtil.createFile(resources, job.getWorkspaceId(), Constants.FileTypeEnum.JOB.getType(), job.getId(), T.StrUtil.concat(true, job.getId(), ".zip"));
+ artifactsZipFile = new ZipFile(artifactZip);
+
+ ZipParameters zipParameters = new ZipParameters();
+ zipParameters.setCompressionMethod(CompressionMethod.DEFLATE); // 压缩方法
+ zipParameters.setCompressionLevel(CompressionLevel.FASTEST); // 压缩级别,选项有 FASTEST、ULTRA 等
+ artifactsZipFile.addFiles(artifactFiles, zipParameters);
+ job.setArtifactsPath(artifactZip.getPath());
+ }
+ if (T.CollUtil.isNotEmpty(artifactFiles) && artifactFiles.size() == 1) {
+ job.setArtifactsPath(artifactFiles.getFirst().getPath());
+ }
// update job status
job.setStatus(status);
job.setEndTimestamp(System.currentTimeMillis());
@@ -286,8 +323,12 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
log.error("[playbookExecResultChecker] [startGetJobResultThread] [error]", e);
} finally {
T.IoUtil.close(zipFile);
+ T.IoUtil.close(artifactsZipFile);
T.FileUtil.del(destination);
T.IoUtil.close(inputStream);
+ if (artifactFiles.size() > 1){
+ artifactFiles.stream().forEach(file-> T.FileUtil.del(file));
+ }
Constants.RESULT_JOB_THREAD.remove(job.getId());
}
});
diff --git a/src/main/resources/db/mapper/runner/JobMapper.xml b/src/main/resources/db/mapper/runner/JobMapper.xml
index 3c4c355..f77da42 100644
--- a/src/main/resources/db/mapper/runner/JobMapper.xml
+++ b/src/main/resources/db/mapper/runner/JobMapper.xml
@@ -13,6 +13,7 @@
<result property="endTimestamp" column="end_timestamp"/>
<result property="status" column="status"/>
<result property="logPath" column="log_path"/>
+ <result property="artifactsPath" column="artifacts_path"/>
<result property="createTimestamp" column="create_timestamp"/>
<result property="updateTimestamp" column="update_timestamp"/>
<result property="createUserId" column="create_user_id"/>
diff --git a/src/main/resources/db/migration/V1.0.01__INIT_TABLES.sql b/src/main/resources/db/migration/V1.0.01__INIT_TABLES.sql
index 1123f71..39eb000 100644
--- a/src/main/resources/db/migration/V1.0.01__INIT_TABLES.sql
+++ b/src/main/resources/db/migration/V1.0.01__INIT_TABLES.sql
@@ -210,6 +210,7 @@ CREATE TABLE `job` (
`end_timestamp` bigint(20) NOT NULL DEFAULT -1 COMMENT '结束时间戳',
`status` varchar(64) NOT NULL DEFAULT '' COMMENT '状态; 可选值: created,pending,running,passed,failed,cancel',
`log_path` varchar(256) NOT NULL DEFAULT '' COMMENT '日志文件路径',
+ `artifacts_path` varchar(256) NOT NULL DEFAULT '' COMMENT '制品文件路径',
`create_timestamp` bigint(20) NOT NULL COMMENT '创建时间戳',
`update_timestamp` bigint(20) NOT NULL COMMENT '更新时间戳',
`create_user_id` varchar(64) NOT NULL COMMENT '创建人',