summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorzhangshuai <[email protected]>2024-11-12 15:20:27 +0800
committerzhangshuai <[email protected]>2024-11-12 15:20:27 +0800
commite2e0934d3421864f3d2cae33eb113e261d38dd5f (patch)
tree688f4ca19cdca239701a34e018e7244ab4701f5e
parent09946cb5f9f840624e7dcc58883ca0228d57fc72 (diff)
fix: ASW-146 不展示 system 系统用户
-rw-r--r--pom.xml8
-rw-r--r--src/main/java/net/geedge/asw/module/runner/entity/JobEntity.java5
-rw-r--r--src/main/java/net/geedge/asw/module/runner/entity/PcapEntity.java1
-rw-r--r--src/main/java/net/geedge/asw/module/runner/job/JobPlaybookExecResultChecker.java92
-rw-r--r--src/main/java/net/geedge/asw/module/runner/service/impl/JobServiceImpl.java6
-rw-r--r--src/main/java/net/geedge/asw/module/runner/service/impl/PcapServiceImpl.java8
-rw-r--r--src/main/resources/db/mapper/runner/JobMapper.xml1
-rw-r--r--src/main/resources/db/mapper/runner/PcapMapper.xml5
-rw-r--r--src/main/resources/db/migration/V1.0.01__INIT_TABLES.sql2
9 files changed, 73 insertions, 55 deletions
diff --git a/pom.xml b/pom.xml
index 3467cd7..6507b1a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -191,6 +191,14 @@
<artifactId>spring-boot-starter-freemarker</artifactId>
</dependency>
+ <!-- https://mvnrepository.com/artifact/net.lingala.zip4j/zip4j -->
+ <dependency>
+ <groupId>net.lingala.zip4j</groupId>
+ <artifactId>zip4j</artifactId>
+ <version>2.11.5</version>
+ </dependency>
+
+
</dependencies>
<build>
diff --git a/src/main/java/net/geedge/asw/module/runner/entity/JobEntity.java b/src/main/java/net/geedge/asw/module/runner/entity/JobEntity.java
index c7caeb5..12df4cc 100644
--- a/src/main/java/net/geedge/asw/module/runner/entity/JobEntity.java
+++ b/src/main/java/net/geedge/asw/module/runner/entity/JobEntity.java
@@ -12,6 +12,8 @@ import net.geedge.asw.module.environment.entity.EnvironmentSessionEntity;
import net.geedge.asw.module.sys.entity.SysUserEntity;
import net.geedge.asw.module.workspace.entity.WorkspaceEntity;
+import java.util.List;
+
@Data
@TableName("job")
public class JobEntity {
@@ -25,7 +27,6 @@ public class JobEntity {
private Long startTimestamp;
private Long endTimestamp;
private String status;
- private String pcapId;
private String logPath;
private Long createTimestamp;
@@ -49,7 +50,7 @@ public class JobEntity {
private PlaybookEntity playbook;
@TableField(exist = false)
- private PcapEntity pcap;
+ private List<PcapEntity> pcap;
@TableField(exist = false)
private SysUserEntity createUser;
diff --git a/src/main/java/net/geedge/asw/module/runner/entity/PcapEntity.java b/src/main/java/net/geedge/asw/module/runner/entity/PcapEntity.java
index 94c3c42..7f975e9 100644
--- a/src/main/java/net/geedge/asw/module/runner/entity/PcapEntity.java
+++ b/src/main/java/net/geedge/asw/module/runner/entity/PcapEntity.java
@@ -35,7 +35,6 @@ public class PcapEntity {
@TableField(exist = false)
private WorkspaceEntity workspace;
- @TableField(exist = false)
private String jobId;
@TableField(exist = false)
diff --git a/src/main/java/net/geedge/asw/module/runner/job/JobPlaybookExecResultChecker.java b/src/main/java/net/geedge/asw/module/runner/job/JobPlaybookExecResultChecker.java
index 77b414a..748206e 100644
--- a/src/main/java/net/geedge/asw/module/runner/job/JobPlaybookExecResultChecker.java
+++ b/src/main/java/net/geedge/asw/module/runner/job/JobPlaybookExecResultChecker.java
@@ -19,21 +19,18 @@ import net.geedge.asw.module.runner.entity.PcapEntity;
import net.geedge.asw.module.runner.service.IJobService;
import net.geedge.asw.module.runner.service.IPcapService;
import net.geedge.asw.module.runner.util.RunnerConstant;
+import net.lingala.zip4j.ZipFile;
+import net.lingala.zip4j.model.FileHeader;
import org.apache.commons.lang3.time.StopWatch;
import org.quartz.DisallowConcurrentExecution;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.core.io.FileSystemResource;
-import org.springframework.core.io.Resource;
import org.springframework.scheduling.quartz.QuartzJobBean;
import org.springframework.transaction.annotation.Transactional;
-import java.io.File;
-import java.io.InputStream;
+import java.io.*;
import java.util.List;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipFile;
@DisallowConcurrentExecution
@@ -195,63 +192,86 @@ public class JobPlaybookExecResultChecker extends QuartzJobBean {
private Thread startGetJobResultThread(JobEntity job, EnvironmentEntity environment, String status) {
Thread thread = Thread.ofVirtual().start(() -> {
File destination = null;
- File pcapDestination = null;
InputStream inputStream = null;
ZipFile zipFile = null;
try {
- log.info("[playbookExecResultChecker] [startGetJobResultThread] [job status] [jod id: {}]", status, job.getId());
+ log.info("[playbookExecResultChecker] [startGetJobResultThread] [job status: {}] [jod id: {}] [time: {}]", status, job.getId(), System.currentTimeMillis());
JSONObject paramJSONObject = environment.getParamJSONObject();
String url = paramJSONObject.getStr("url");
String token = paramJSONObject.getStr("token");
+
HttpRequest request = T.HttpUtil.createGet(String.format("%s/api/v1/env/playbook/%s/artifact", url, job.getId()));
request.header("Authorization", token);
HttpResponse response = request.execute();
- log.info("[playbookExecResultChecker] [startGetJobResultThread] [request env playbook result api] [status: {}]", response.getStatus());
+ log.info("[playbookExecResultChecker] [startGetJobResultThread] [request env playbook result api] [status: {}] [time: {}]", response.getStatus(), System.currentTimeMillis());
if (response.isOk()) {
destination = T.FileUtil.file(Constants.TEMP_PATH, T.StrUtil.concat(true, job.getId(), ".zip"));
T.FileUtil.writeBytes(response.bodyBytes(), destination);
- zipFile = T.ZipUtil.toZipFile(destination, T.CharsetUtil.CHARSET_UTF_8);
- List<? extends ZipEntry> list = zipFile.stream().toList();
- for (ZipEntry entry : list) {
- if (entry.getName().endsWith("pcap")) {
+ zipFile = new ZipFile(destination);
+ List<FileHeader> fileHeaders = zipFile.getFileHeaders();
+ for (FileHeader fileHeader : fileHeaders) {
+ // 处理 pcap 文件
+ if (fileHeader.getFileName().endsWith("pcap")) {
PackageEntity packageEntity = packageService.getById(job.getPackageId());
+ String fileName = T.StrUtil.concat(true, "job", T.StrUtil.DASHED, T.StrUtil.sub(job.getId(), 0, 8), T.StrUtil.DASHED, packageEntity.getName(), ".pcap");
+ if (fileHeader.getFileName().contains("all")) {
+ fileName = T.StrUtil.concat(true, "job", T.StrUtil.DASHED, T.StrUtil.sub(job.getId(), 0, 8), ".pcap");
+ }
+
+ // 上传 pcap 文件流
+ File pcapFile = T.FileUtil.file(T.WebPathUtil.getRootPath(), job.getWorkspaceId(), fileName);
+ File parentDir = pcapFile.getParentFile();
+ if (!parentDir.exists()) {
+ parentDir.mkdirs();
+ }
+ inputStream = zipFile.getInputStream(fileHeader);
+ T.FileUtil.writeFromStream(inputStream, pcapFile);
+ PcapEntity entity = new PcapEntity();
+ String pcapId = T.StrUtil.uuid();
+ entity.setId(pcapId);
+ entity.setName(fileName);
- // pcap name {package.name}-job-{jobId[0:8]}.pcap
- String fileName = T.StrUtil.concat(true, packageEntity.getName(), T.StrUtil.DASHED, "job", T.StrUtil.DASHED, T.StrUtil.sub(job.getId(), 0, 8), ".pcap");
- pcapDestination = T.FileUtil.file(Constants.TEMP_PATH, fileName);
- inputStream = T.ZipUtil.get(zipFile, entry.getName());
- T.FileUtil.writeFromStream(inputStream, pcapDestination);
- Resource fileResource = new FileSystemResource(pcapDestination);
- // upload pcap file
- PcapEntity pcapEntity = pcapService.savePcap(fileResource, "", job.getWorkspaceId(), job.getCreateUserId());
- log.info("[playbookExecResultChecker] [startGetJobResultThread] [upload pcap: {}] [job id: {}] ", T.JSONUtil.toJsonStr(pcapEntity), job.getId());
- job.setPcapId(pcapEntity.getId());
+ entity.setSize(pcapFile.length());
+ entity.setStatus(RunnerConstant.PcapStatus.UPLOADED.getValue());
+ entity.setCreateTimestamp(System.currentTimeMillis());
+ entity.setCreateUserId(job.getCreateUserId());
+ entity.setWorkspaceId(job.getWorkspaceId());
+ entity.setPath(pcapFile.getPath());
+
+ // md5
+ String md5Hex = T.DigestUtil.md5Hex(pcapFile);
+ entity.setMd5(md5Hex);
+ pcapService.save(entity);
+ log.info("[playbookExecResultChecker] [startGetJobResultThread] [upload pcap: {}] [job id: {}] [time: {}]", T.JSONUtil.toJsonStr(entity), job.getId(), System.currentTimeMillis());
} else {
- // log
- inputStream = T.ZipUtil.get(zipFile, entry.getName());
+ // 处理 log 文件
File logFile = T.FileUtil.file(job.getLogPath());
+ inputStream = zipFile.getInputStream(fileHeader);
T.FileUtil.writeFromStream(inputStream, logFile);
}
}
- }
- // update job status
- job.setStatus(status);
- job.setEndTimestamp(System.currentTimeMillis());
- jobService.updateById(job);
-
- // remove session
- EnvironmentSessionEntity session = environmentSessionService.getOne(new LambdaQueryWrapper<EnvironmentSessionEntity>().eq(EnvironmentSessionEntity::getJobId, job.getId()));
- environmentService.removeSession(session.getId());
- log.info("[playbookExecResultChecker] [startGetJobResultThread] [finshed] [job id: {}]", job.getId());
+ // update job status
+ job.setStatus(status);
+ job.setEndTimestamp(System.currentTimeMillis());
+ jobService.updateById(job);
+ // remove session
+ EnvironmentSessionEntity session = environmentSessionService.getOne(new LambdaQueryWrapper<EnvironmentSessionEntity>()
+ .eq(EnvironmentSessionEntity::getJobId, job.getId())
+ .eq(EnvironmentSessionEntity::getStatus, 1)
+ );
+ if (session != null) {
+ environmentService.removeSession(session.getId());
+ }
+ log.info("[playbookExecResultChecker] [startGetJobResultThread] [finshed] [job id: {}]", job.getId());
+ }
} catch (Exception e) {
log.error("[playbookExecResultChecker] [startGetJobResultThread] [error]", e);
} finally {
T.IoUtil.close(zipFile);
T.FileUtil.del(destination);
- T.FileUtil.del(pcapDestination);
T.IoUtil.close(inputStream);
Constants.RESULT_JOB_THREAD.remove(job.getId());
}
diff --git a/src/main/java/net/geedge/asw/module/runner/service/impl/JobServiceImpl.java b/src/main/java/net/geedge/asw/module/runner/service/impl/JobServiceImpl.java
index ff06638..bd7dbdd 100644
--- a/src/main/java/net/geedge/asw/module/runner/service/impl/JobServiceImpl.java
+++ b/src/main/java/net/geedge/asw/module/runner/service/impl/JobServiceImpl.java
@@ -90,10 +90,8 @@ public class JobServiceImpl extends ServiceImpl<JobDao, JobEntity> implements IJ
PackageEntity pkg = packageService.getById(job.getPackageId());
job.setPkg(pkg);
- if (T.StrUtil.isNotEmpty(job.getPcapId())){
- PcapEntity pcap = pcapService.getById(job.getPcapId());
- job.setPcap(pcap);
- }
+ List<PcapEntity> pcapList = pcapService.list(new LambdaQueryWrapper<PcapEntity>().eq(PcapEntity::getJobId, id));
+ job.setPcap(pcapList);
SysUserEntity createUser = userService.getById(job.getCreateUserId());
job.setCreateUser(createUser);
diff --git a/src/main/java/net/geedge/asw/module/runner/service/impl/PcapServiceImpl.java b/src/main/java/net/geedge/asw/module/runner/service/impl/PcapServiceImpl.java
index 37bab86..f48047b 100644
--- a/src/main/java/net/geedge/asw/module/runner/service/impl/PcapServiceImpl.java
+++ b/src/main/java/net/geedge/asw/module/runner/service/impl/PcapServiceImpl.java
@@ -108,7 +108,7 @@ public class PcapServiceImpl extends ServiceImpl<PcapDao, PcapEntity> implements
PcapEntity pcap = this.getById(id);
T.VerifyUtil.is(pcap).notNull(RCode.SYS_RECORD_NOT_FOUND);
- JobEntity job = jobService.getOne(new LambdaQueryWrapper<JobEntity>().eq(JobEntity::getPcapId, pcap.getId()));
+ JobEntity job = jobService.getOne(new LambdaQueryWrapper<JobEntity>().eq(JobEntity::getId, pcap.getJobId()));
if (T.ObjectUtil.isNotNull(job)) {
pcap.setJobId(job.getId());
@@ -192,12 +192,6 @@ public class PcapServiceImpl extends ServiceImpl<PcapDao, PcapEntity> implements
// remove
this.removeById(id);
-
- // update job pcap_id
- jobService.update(new LambdaUpdateWrapper<JobEntity>()
- .set(JobEntity::getPcapId, "")
- .eq(JobEntity::getPcapId, id)
- );
}
}
diff --git a/src/main/resources/db/mapper/runner/JobMapper.xml b/src/main/resources/db/mapper/runner/JobMapper.xml
index b0d469a..c938576 100644
--- a/src/main/resources/db/mapper/runner/JobMapper.xml
+++ b/src/main/resources/db/mapper/runner/JobMapper.xml
@@ -11,7 +11,6 @@
<result property="startTimestamp" column="start_timestamp"/>
<result property="endTimestamp" column="end_timestamp"/>
<result property="status" column="status"/>
- <result property="pcapId" column="pcap_id"/>
<result property="logPath" column="log_path"/>
<result property="createTimestamp" column="create_timestamp"/>
<result property="updateTimestamp" column="update_timestamp"/>
diff --git a/src/main/resources/db/mapper/runner/PcapMapper.xml b/src/main/resources/db/mapper/runner/PcapMapper.xml
index 3639f0a..a9627a7 100644
--- a/src/main/resources/db/mapper/runner/PcapMapper.xml
+++ b/src/main/resources/db/mapper/runner/PcapMapper.xml
@@ -15,8 +15,7 @@
<result property="createTimestamp" column="create_timestamp"/>
<result property="createUserId" column="create_user_id"/>
<result property="workspaceId" column="workspace_id"/>
-
- <result property="jobId" column="jobId"/>
+ <result property="jobId" column="job_id"/>
<association property="pkg" columnPrefix="pkg_" javaType="net.geedge.asw.module.app.entity.PackageEntity">
<id property="id" column="id"/>
@@ -53,7 +52,7 @@
pb.name AS pb_name
FROM
pcap pcap
- left join job job on pcap.id = job.pcap_id
+ left join job job on pcap.job_id = job.id
LEFT JOIN environment env ON job.env_id = env.id
LEFT JOIN package pkg ON job.package_id = pkg.id
LEFT JOIN playbook pb ON job.playbook_id = pb.id
diff --git a/src/main/resources/db/migration/V1.0.01__INIT_TABLES.sql b/src/main/resources/db/migration/V1.0.01__INIT_TABLES.sql
index d235b8d..bfb48c3 100644
--- a/src/main/resources/db/migration/V1.0.01__INIT_TABLES.sql
+++ b/src/main/resources/db/migration/V1.0.01__INIT_TABLES.sql
@@ -209,7 +209,6 @@ CREATE TABLE `job` (
`start_timestamp` bigint(20) NOT NULL DEFAULT -1 COMMENT '开始时间戳',
`end_timestamp` bigint(20) NOT NULL DEFAULT -1 COMMENT '结束时间戳',
`status` varchar(64) NOT NULL DEFAULT '' COMMENT '状态; 可选值: created,pending,running,passed,failed,cancel',
- `pcap_id` varchar(64) NOT NULL DEFAULT '' COMMENT 'PCAP ID',
`log_path` varchar(256) NOT NULL DEFAULT '' COMMENT '日志文件路径',
`create_timestamp` bigint(20) NOT NULL COMMENT '创建时间戳',
`update_timestamp` bigint(20) NOT NULL COMMENT '更新时间戳',
@@ -239,6 +238,7 @@ CREATE TABLE `pcap` (
`create_timestamp` bigint(20) NOT NULL COMMENT '创建时间戳',
`create_user_id` varchar(64) NOT NULL COMMENT '创建人',
`workspace_id` varchar(64) NOT NULL DEFAULT '' COMMENT '工作空间ID',
+ `job_id` varchar(64) NOT NULL DEFAULT '' COMMENT '任务id',
PRIMARY KEY (`id`) USING BTREE,
KEY `idx_name` (`name`) USING BTREE,
KEY `idx_md5` (`md5`) USING BTREE,