From 5e5cb498f4f609f0b246718102e9e4e1969ae816 Mon Sep 17 00:00:00 2001 From: wanghao Date: Thu, 7 Dec 2023 18:28:38 +0800 Subject: [Fix][schema] 基础字段app_path、protocol_path重命名(TSG-17828) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/main/resources/http-sql-template.sql | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) (limited to 'src/main/resources/http-sql-template.sql') diff --git a/src/main/resources/http-sql-template.sql b/src/main/resources/http-sql-template.sql index c81bfb98..20d9b534 100644 --- a/src/main/resources/http-sql-template.sql +++ b/src/main/resources/http-sql-template.sql @@ -45,43 +45,43 @@ select client_ip as client_ip, vsys_id as vsys_id from %s where %s %s AND notEmp #end #sql("ENTITY_UDP_SESSION") -select server_ip as server_ip,vsys_id as vsys_id ,COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'UDP') and server_port in (53 , 443) group by server_ip,vsys_id order by sessions desc limit %s +select server_ip as server_ip,vsys_id as vsys_id ,COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'UDP') and server_port in (53 , 443) group by server_ip,vsys_id order by sessions desc limit %s #end #sql("ENTITY_UDP_UNIQ_CLIENT_IPS") -select server_ip as server_ip, vsys_id as vsys_id from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'UDP') group by server_ip,vsys_id order by COUNT(DISTINCT(client_ip)) desc limit %s +select server_ip as server_ip, vsys_id as vsys_id from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'UDP') group by server_ip,vsys_id order by COUNT(DISTINCT(client_ip)) desc limit %s #end #sql("ENTITY_TCP_SESSION") -select server_ip as server_ip,vsys_id as vsys_id ,COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'TCP') group by server_ip,vsys_id order by sessions desc limit %s +select server_ip as server_ip,vsys_id as vsys_id ,COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'TCP') group by server_ip,vsys_id order by sessions desc limit %s #end #sql("ENTITY_TCP_UNIQ_CLIENT_IPS") -select server_ip as server_ip, vsys_id as vsys_id from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'TCP') group by server_ip, vsys_id order by COUNT(DISTINCT(client_ip)) desc limit %s +select server_ip as server_ip, vsys_id as vsys_id from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'TCP') group by server_ip, vsys_id order by COUNT(DISTINCT(client_ip)) desc limit %s #end #sql("TOP_ENTITY_TCP_UNIQ_CLIENT_IPS") -select COUNT(DISTINCT(client_ip)) as client_ips from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'TCP') AND server_ip in (select server_ip from %s as cc where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'TCP') group by server_ip order by COUNT(DISTINCT(client_ip)) desc limit %s) +select COUNT(DISTINCT(client_ip)) as client_ips from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'TCP') AND server_ip in (select server_ip from %s as cc where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'TCP') group by server_ip order by COUNT(DISTINCT(client_ip)) desc limit %s) #end #sql("TOP_ENTITY_UDP_UNIQ_CLIENT_IPS") -select COUNT(DISTINCT(client_ip)) as client_ips from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'UDP') AND server_ip in (select server_ip from %s as cc where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'UDP') group by server_ip order by COUNT(DISTINCT(client_ip)) desc limit %s) +select COUNT(DISTINCT(client_ip)) as client_ips from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'UDP') AND server_ip in (select server_ip from %s as cc where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'UDP') group by server_ip order by COUNT(DISTINCT(client_ip)) desc limit %s) #end #sql("TOTAL_ENTITY_UDP_SESSION") -select COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'UDP') and server_port in (53 , 443) limit 1 +select COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'UDP') and server_port in (53 , 443) limit 1 #end #sql("TOTAL_ENTITY_UDP_UNIQ_CLIENT_IPS") -select COUNT(DISTINCT(client_ip)) as uniq_client_ips from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'UDP') limit 1 +select COUNT(DISTINCT(client_ip)) as uniq_client_ips from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'UDP') limit 1 #end #sql("TOTAL_ENTITY_TCP_SESSION") -select COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'TCP') limit 1 +select COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'TCP') limit 1 #end #sql("TOTAL_ENTITY_TCP_UNIQ_CLIENT_IPS") -select COUNT(DISTINCT(client_ip)) as uniq_client_ips from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'TCP') limit 1 +select COUNT(DISTINCT(client_ip)) as uniq_client_ips from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'TCP') limit 1 #end #sql("ENTITY_TOP_SNI") -- cgit v1.2.3 From 4fa72c3a215ac59b65b7d654a3aeb9ad2bed5aae Mon Sep 17 00:00:00 2001 From: wanghao Date: Sat, 9 Dec 2023 20:09:20 +0800 Subject: [Fix][API] 重构Dataset API(TSG-17843) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tsg/Galaxy/datasets_variables.json | 138 +++++++++++++++ .../tsg/Galaxy/sql_datasets_variables.json | 138 --------------- .../java/com/mesalab/common/nacos/NacosConst.java | 2 +- .../java/com/mesalab/qgw/benchmark/Writer.java | 7 +- .../com/mesalab/qgw/constant/QGWMessageConst.java | 7 +- .../services/controller/DatasetController.java | 56 ++++++ .../mesalab/services/service/DatasetService.java | 57 +++++++ .../services/service/impl/DatasetServiceImp.java | 188 +++++++++++++++++++++ src/main/resources/http-sql-template.sql | 2 +- 9 files changed, 451 insertions(+), 144 deletions(-) create mode 100644 config/nacos/config/fixed-127.0.0.1_8848-tsg_nacos/data/config-data-tenant/tsg/Galaxy/datasets_variables.json delete mode 100644 config/nacos/config/fixed-127.0.0.1_8848-tsg_nacos/data/config-data-tenant/tsg/Galaxy/sql_datasets_variables.json create mode 100644 src/main/java/com/mesalab/services/controller/DatasetController.java create mode 100644 src/main/java/com/mesalab/services/service/DatasetService.java create mode 100644 src/main/java/com/mesalab/services/service/impl/DatasetServiceImp.java (limited to 'src/main/resources/http-sql-template.sql') diff --git a/config/nacos/config/fixed-127.0.0.1_8848-tsg_nacos/data/config-data-tenant/tsg/Galaxy/datasets_variables.json b/config/nacos/config/fixed-127.0.0.1_8848-tsg_nacos/data/config-data-tenant/tsg/Galaxy/datasets_variables.json new file mode 100644 index 00000000..40071b78 --- /dev/null +++ b/config/nacos/config/fixed-127.0.0.1_8848-tsg_nacos/data/config-data-tenant/tsg/Galaxy/datasets_variables.json @@ -0,0 +1,138 @@ +[ + { + "key": "start", + "default": "2023-08-01 00:00:00" + }, + { + "key": "end", + "default": "2023-08-02 00:00:00" + }, + { + "key": "vsys_id", + "default": 1 + }, + { + "key": "rule_id", + "default": 1 + }, + { + "key": "template_id", + "default": 1 + }, + { + "key": "chart_id", + "default": 1 + }, + { + "key": "version", + "default": 1 + }, + { + "key": "object_id", + "default": 1 + }, + { + "key": "item_id", + "default": 40 + }, + { + "key": "limit", + "default": 20 + }, + { + "key": "granularity_period", + "default": "PT5M" + }, + { + "key": "granularity_period_second", + "default": 300 + }, + { + "default": "P1D", + "key": "alignment_period" + }, + { + "key": "alignment_period_second", + "default": 86400 + }, + { + "key": "metric", + "default": 1 + }, + { + "key": "sf_profile_id", + "default": 10011 + }, + { + "key": "sff_profile_id", + "default": 4035 + }, + { + "key": "profile_id", + "default": 275 + }, + { + "key": "subscriber_id", + "default": "test" + }, + { + "key": "destination_ip", + "default": "8.8.8.8" + }, + { + "key": "attack_type", + "default": "DNS Flood" + }, + { + "key": "filter", + "default": "" + }, + { + "key": "timestampdiff_second", + "default": 86400 + }, + { + "key": "client_ip", + "default": "120.242.132.200" + }, + { + "key": "log_id", + "default": 1153021139190754263 + }, + { + "key": "order_by", + "default": 1 + }, + { + "key": "separator", + "default": "," + }, + { + "key": "policy_id", + "default": 1 + }, + { + "key": "task_id", + "default": 1 + }, + { + "key": "columns", + "default": 1 + }, + { + "key": "app_name", + "default": "wechat" + }, + { + "key": "column_name", + "default": 1 + }, + { + "key": "source", + "default": "statistics_rule" + }, + { + "key": "timestamp_column", + "default": "__time" + } +] \ No newline at end of file diff --git a/config/nacos/config/fixed-127.0.0.1_8848-tsg_nacos/data/config-data-tenant/tsg/Galaxy/sql_datasets_variables.json b/config/nacos/config/fixed-127.0.0.1_8848-tsg_nacos/data/config-data-tenant/tsg/Galaxy/sql_datasets_variables.json deleted file mode 100644 index 7d890db1..00000000 --- a/config/nacos/config/fixed-127.0.0.1_8848-tsg_nacos/data/config-data-tenant/tsg/Galaxy/sql_datasets_variables.json +++ /dev/null @@ -1,138 +0,0 @@ -[ - { - "key": "start", - "default": "2023-08-01 00:00:00" - }, - { - "key": "end", - "default": "2023-08-02 00:00:00" - }, - { - "key": "vsys_id", - "default": 1 - }, - { - "key": "rule_id", - "default": 1 - }, - { - "key": "template_id", - "default": 1 - }, - { - "key": "chart_id", - "default": 1 - }, - { - "key": "version", - "default": 1 - }, - { - "key": "object_id", - "default": 1 - }, - { - "key": "item_id", - "default": 40 - }, - { - "key": "limit", - "default": 20 - }, - { - "key": "granularity_period", - "default": "PT5M" - }, - { - "key": "granularity_period_second", - "default": 300 - }, - { - "default": "P1D", - "key": "alignment_period" - }, - { - "key": "alignment_period_second", - "default": 86400 - }, - { - "key": "metric", - "default": 1 - }, - { - "key": "sf_profile_id", - "default": 10011 - }, - { - "key": "sff_profile_id", - "default": 4035 - }, - { - "key": "profile_id", - "default": 275 - }, - { - "key": "subscriber_id", - "default": "test" - }, - { - "key": "destination_ip", - "default": "8.8.8.8" - }, - { - "key": "attack_type", - "default": "DNS Flood" - }, - { - "key": "filter", - "default": "" - }, - { - "key": "timestampdiff_second", - "default": 86400 - }, - { - "key": "client_ip", - "default": "120.242.132.200" - }, - { - "key": "log_id", - "default": 1153021139190754263 - }, - { - "key": "order_by", - "default": 1 - }, - { - "key": "separator", - "default": "," - }, - { - "key": "policy_id", - "default": 1 - }, - { - "key": "task_id", - "default": 1 - }, - { - "key": "columns", - "default": 1 - }, - { - "key": "app_name", - "default": "wechat" - }, - { - "key": "column_name", - "default": "client_ip" - }, - { - "key": "source", - "default": "statistics_rule" - }, - { - "key": "timestamp_column", - "default": "__time" - } -] \ No newline at end of file diff --git a/src/main/java/com/mesalab/common/nacos/NacosConst.java b/src/main/java/com/mesalab/common/nacos/NacosConst.java index a4efb2e1..87bc919f 100644 --- a/src/main/java/com/mesalab/common/nacos/NacosConst.java +++ b/src/main/java/com/mesalab/common/nacos/NacosConst.java @@ -36,7 +36,7 @@ public class NacosConst { public static final String META_DATA_ID = "meta_data.json"; - public static final String SQL_DATASETS_VARIABLES = "sql_datasets_variables.json"; + public static final String DATASETS_VARIABLES = "datasets_variables.json"; public static final String CK_FILTER_DATA_ID = "ck-filter.json"; diff --git a/src/main/java/com/mesalab/qgw/benchmark/Writer.java b/src/main/java/com/mesalab/qgw/benchmark/Writer.java index 63a37ba7..064fecdd 100644 --- a/src/main/java/com/mesalab/qgw/benchmark/Writer.java +++ b/src/main/java/com/mesalab/qgw/benchmark/Writer.java @@ -15,6 +15,7 @@ import com.mesalab.common.nacos.NacosConfig; import com.mesalab.common.nacos.NacosConst; import com.geedgenetworks.utils.StringUtil; import com.mesalab.qgw.service.MetadataService; +import com.mesalab.services.service.DatasetService; import com.mesalab.services.service.SQLDatasetService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; @@ -35,7 +36,7 @@ public abstract class Writer { @Autowired MetadataService metadataService; @Autowired - SQLDatasetService sqlDatasetService; + DatasetService datasetService; private static final Log log = LogFactory.get(); public static final boolean IS_TITLE = false; @@ -73,14 +74,14 @@ public abstract class Writer { writer.getFile().createNewFile(); } List variables = Lists.newArrayList(); - Object codeInfo = metadataService.getCfg(NacosConst.SQL_DATASETS_VARIABLES); + Object codeInfo = metadataService.getCfg(NacosConst.DATASETS_VARIABLES); if (StringUtil.isNotEmpty(codeInfo)) { Object json = JSON.toJSON(codeInfo); variables = (List) JSON.parseArray(json.toString(), LinkedHashMap.class); } List resultLines = Lists.newArrayList(); for (String line : list){ - String sql = sqlDatasetService.buildExecSQL(variables, line); + String sql = datasetService.buildExecSQL(variables, line); resultLines.add(sql); } writer.appendLines(resultLines); diff --git a/src/main/java/com/mesalab/qgw/constant/QGWMessageConst.java b/src/main/java/com/mesalab/qgw/constant/QGWMessageConst.java index c9aa22f0..abc0e80a 100644 --- a/src/main/java/com/mesalab/qgw/constant/QGWMessageConst.java +++ b/src/main/java/com/mesalab/qgw/constant/QGWMessageConst.java @@ -22,9 +22,13 @@ public class QGWMessageConst { public static final String SQL_PARSE_ERROR_NOT_FOUNT_TABLE = "unable to parse table name."; + public static final String CONSISTENCY_OPTION_ERROR = "not support consistency option value."; + + public static final String BENCHMARK_OPTION_ERROR = "not support benchmark option value."; + public static final String DIAGNOSIS_OPTION_ERROR = "not support option value."; - public static final String DIAGNOSIS_CATEGORY_ERROR = "not support category value."; + public static final String DATASET_CATEGORY_ERROR = "not support category value."; public static final String NOT_SUPPORT_DML_PARSER = "not support DML Parser."; @@ -110,5 +114,6 @@ public class QGWMessageConst { public static final String QUERY_CUSTOM_FIELD_DISCOVERY_ERROR = "The param custom.field_discovery.* illegal."; + public static final String DATASET_BACKEND_ENGINE_ERROR = "The backend_engine is not supported, backend_engine value must be :[ qgw| clickhouse| druid| hbase]"; } diff --git a/src/main/java/com/mesalab/services/controller/DatasetController.java b/src/main/java/com/mesalab/services/controller/DatasetController.java new file mode 100644 index 00000000..6371e4b5 --- /dev/null +++ b/src/main/java/com/mesalab/services/controller/DatasetController.java @@ -0,0 +1,56 @@ +package com.mesalab.services.controller; + +import cn.hutool.core.util.StrUtil; +import cn.hutool.http.HttpStatus; +import cn.hutool.log.Log; +import cn.hutool.log.LogFactory; +import com.geedgenetworks.utils.StringUtil; +import com.mesalab.common.entity.BaseResult; + +import com.mesalab.common.entity.BaseResultGenerator; +import com.mesalab.common.enums.DBTypeEnum; +import com.mesalab.common.enums.ResultCodeEnum; +import com.mesalab.qgw.constant.QGWMessageConst; +import com.mesalab.qgw.exception.QGWBusinessException; +import com.mesalab.services.service.DatasetService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.*; + +import java.util.*; + +@RestController +@RequestMapping(value = "/v1/dataset") +public class DatasetController { + private static final Log log = LogFactory.get(); + @Autowired + private DatasetService datasetService; + + + @GetMapping(value = "/{identifier_name}") + public BaseResult getDataset(@PathVariable("identifier_name") String identifierName) { + log.info("Get Dataset, ID is: {}", identifierName); + return BaseResultGenerator.success(datasetService.getDataset(identifierName)); + } + + @GetMapping + public BaseResult getDatasets(@RequestParam(value = "identifier_names", required = false) String datasetIds, @RequestParam(required = false) String category, @RequestParam(value = "backend_engine", required = false) String backendEngine) { + if (StrUtil.isNotBlank(backendEngine)) { + if (Arrays.stream(DBTypeEnum.values()).noneMatch(o -> o.getValue().equalsIgnoreCase(backendEngine))) { + throw new QGWBusinessException(HttpStatus.HTTP_BAD_REQUEST, ResultCodeEnum.PARAMETER_ERROR.getCode(), + String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.DATASET_BACKEND_ENGINE_ERROR)); + } + } + log.info("Get Datasets. IDs is: {}, category is: {}, backendEngine is: {}", datasetIds, category, backendEngine); + return BaseResultGenerator.success(datasetService.getDatasets(Arrays.asList(StrUtil.split(StringUtil.removeAll(datasetIds,"\'"), ",")), category, backendEngine)); + } + + @GetMapping(value = "/{identifier_name}/result_preview") + public BaseResult getPreview(@PathVariable("identifier_name") String identifierName) { + return datasetService.getPreview(identifierName); + } + + @GetMapping(value = "/dataset/global_variable") + public BaseResult getVariable() { + return BaseResultGenerator.success(datasetService.getVariable()); + } +} \ No newline at end of file diff --git a/src/main/java/com/mesalab/services/service/DatasetService.java b/src/main/java/com/mesalab/services/service/DatasetService.java new file mode 100644 index 00000000..60ea09f9 --- /dev/null +++ b/src/main/java/com/mesalab/services/service/DatasetService.java @@ -0,0 +1,57 @@ +package com.mesalab.services.service; + +import com.google.common.collect.Lists; +import com.mesalab.common.entity.BaseResult; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +/** + * 数据集市管理服务 + * + * @Classname DatasetService + */ +public interface DatasetService +{ + /** + * Desc: 获取动态变量 + * + * @param + * @return {@link List} + */ + List getVariable(); + + /** + * Desc: 获取单个SQL模版 + * + * @param datasetId + * @return {@link Map} + */ + Map getDataset(String datasetId); + + /** + * Desc: 批量获取SQL模版 + * + * @param + * @return {@link Map} + */ + Map getDatasets(List ids, String category, String backendEngine); + + /** + * Desc: 结果预览 + * + * @param datasetId + * @return {@link BaseResult} + */ + BaseResult getPreview(String datasetId); + + /** + * Desc: 获取执行SQL + * + * @param + * @return + */ + String buildExecSQL(List variables, String sql); + +} diff --git a/src/main/java/com/mesalab/services/service/impl/DatasetServiceImp.java b/src/main/java/com/mesalab/services/service/impl/DatasetServiceImp.java new file mode 100644 index 00000000..6edce129 --- /dev/null +++ b/src/main/java/com/mesalab/services/service/impl/DatasetServiceImp.java @@ -0,0 +1,188 @@ +package com.mesalab.services.service.impl; + +import cn.hutool.core.util.StrUtil; +import cn.hutool.log.Log; +import cn.hutool.log.LogFactory; +import com.alibaba.fastjson2.JSON; +import com.geedgenetworks.utils.StringUtil; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.jfinal.plugin.activerecord.Db; +import com.jfinal.plugin.activerecord.Record; +import com.mesalab.common.entity.BaseResult; +import com.mesalab.common.entity.BaseResultGenerator; +import com.mesalab.common.enums.*; +import com.mesalab.common.nacos.NacosConst; +import com.mesalab.qgw.controller.QueryJobController; +import com.mesalab.qgw.exception.QGWBusinessException; +import com.mesalab.qgw.exception.QGWErrorCode; +import com.mesalab.qgw.model.basic.*; +import com.mesalab.qgw.service.MetadataService; +import com.mesalab.qgw.service.QueryService; +import com.mesalab.services.common.property.SqlPropertySourceFactory; +import com.mesalab.services.service.DatasetService; +import org.apache.commons.collections.CollectionUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.PropertySource; +import org.springframework.core.env.Environment; +import org.springframework.stereotype.Service; + +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +@Service("datasetService") +@PropertySource(value = "classpath:http-sql-template.sql", factory = SqlPropertySourceFactory.class) +public class DatasetServiceImp + implements DatasetService { + private static final Log log = LogFactory.get(); + private static Pattern pFieldVariable = Pattern.compile("\\$\\{(metric|dimension)_(.*?)\\}", Pattern.CASE_INSENSITIVE); + private static Pattern pLeftRightFlag = Pattern.compile("\\[\\[(.*?)\\]\\]", Pattern.CASE_INSENSITIVE); + private final static String TEMPLATE = "template"; + private final static String BACKEND_ENGINE = "backend_engine"; + + private final static String DATASET_TYPE = "type"; + @Autowired + Environment env; + @Autowired + MetadataService metadataService; + @Autowired + QueryService queryService; + @Autowired + private QueryJobController queryJobController; + + @Override + public List getVariable() { + Object codeInfo = metadataService.getCfg(NacosConst.DATASETS_VARIABLES); + if (StringUtil.isNotEmpty(codeInfo)) { + Object json = JSON.toJSON(codeInfo); + return JSON.parseArray(json.toString(), LinkedHashMap.class); + } + return Lists.newArrayList(); + } + + @Override + public Map getDataset(String datasetId) { + List> results = new ArrayList<>(); + List list = Db.find(buildGetDatasetSQL(Lists.newArrayList(datasetId), null, null)); + if (CollectionUtils.isEmpty(list)) { + return Maps.newHashMap(); + } + list.forEach(record -> results.add(record.getColumns())); + Map result = results.get(0); + log.info("ID is: {}, Dataset is: {}", datasetId, result); + return result; + } + + @Override + public Map getDatasets(List ids, String category, String backendEngine) { + String sql = buildGetDatasetSQL(ids, category, backendEngine); + List> data = new ArrayList<>(); + List list = Db.find(sql); + if (CollectionUtils.isEmpty(list)) { + return Maps.newHashMap(); + } + list.forEach(record -> data.add(record.getColumns())); + Map result = Maps.newHashMap(); + result.put("list", data); + return result; + } + + @Override + public BaseResult getPreview(String datasetId) { + Map dataset = getDataset(datasetId); + if (dataset.isEmpty() || StrUtil.isBlankIfStr(dataset.get(TEMPLATE))) { + return BaseResultGenerator.success(); + } + String template = buildExecSQL(getVariable(), String.valueOf(dataset.get(TEMPLATE))); + String backendEngine = String.valueOf(dataset.get(BACKEND_ENGINE)); + String datasetType = String.valueOf(dataset.get(DATASET_TYPE)); + if (datasetType.equalsIgnoreCase("sql")) { + SqlQueryRequest sqlQueryRequest = JSON.parseObject(template, SqlQueryRequest.class); + sqlQueryRequest.setExecMode(ExecModeEnum.ONESHOT.getValue()); + log.info("Dataset Preview, ID is: {}, Type is: {}, Exec SQL is: {}", datasetId, "sql", sqlQueryRequest.getStatement()); + return queryJobController.commitSql(sqlQueryRequest); + } else if (datasetType.equalsIgnoreCase("dsl")) { + DslQueryRequest dslQueryRequest = JSON.parseObject(template, DslQueryRequest.class); + dslQueryRequest.setExecMode(ExecModeEnum.ONESHOT.getValue()); + log.info("Dataset Preview, ID is: {}, Type is: {}, DSL is: {}", datasetId, "dsl", template); + return queryJobController.commitDsl(dslQueryRequest); + } else { + throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.PARAM_SYNTAX_BAD_REQUEST_EXCEPTION.getCode(), + String.format(QGWErrorCode.PARAM_SYNTAX_BAD_REQUEST_EXCEPTION.getMessage(), "not Supported")); + } + } + + + @Override + public String buildExecSQL(List variables, String sql) { + Matcher matcher = pLeftRightFlag.matcher(sql); + while (matcher.find()) { + sql = processOptionalClause(sql, "[[", "]]"); + } + sql = processFieldVariable(sql); + for (LinkedHashMap linkedHashMap : variables) { + String variable = String.valueOf(linkedHashMap.get("key")); + String def = String.valueOf(linkedHashMap.get("default")); + if ("filter".equals(variable) && StringUtil.isBlank(def)) { + def = " 1 = 1"; + } + String parameter = "${".concat(variable).concat("}"); + sql = sql.replace(parameter, def); + } + return sql; + } + + private String buildGetDatasetSQL(List ids, String category, String backendEngine) { + List filterList = Lists.newArrayList(); + if (!ids.isEmpty()) { + filterList.add(" identifier_name IN ( '".concat(String.join("', '", ids)).concat("')")); + } + if (StrUtil.isNotBlank(category)) { + filterList.add(" category = '".concat(category.replace("'", "\\'")).concat("'")); + } + if (StrUtil.isNotBlank(backendEngine)) { + filterList.add(" backend_engine = '".concat(backendEngine.replace("'", "\\'")).concat("'")); + } + String filter = String.join(" AND ", filterList); + return String.format(Objects.requireNonNull(env.getProperty("SQL_DATASETS")), StrUtil.isNotBlank(filter) ? "WHERE ".concat(filter) : ""); + } + + private String processFieldVariable(String str) { + Matcher matcher = pFieldVariable.matcher(str); + StringBuffer sb = new StringBuffer(); + while (matcher.find()) { + matcher.appendReplacement(sb, matcher.group(2)); + } + matcher.appendTail(sb); + return sb.toString(); + } + + private String processOptionalClause(String str, String leftFlag, String rightFlag) { + String left = ""; + String right = ""; + String center = ""; + boolean leftMark = false; + for (int i = 0; i < str.length(); i++) { + String element = str.substring(i, i + 1); + if (leftFlag.startsWith(element) && leftFlag.equals(str.substring(i, i + leftFlag.length()))) { + left = str.substring(0, i); + leftMark = true; + continue; + } + if (leftMark) { + if (rightFlag.startsWith(element) && rightFlag.equals(str.substring(i, i + rightFlag.length()))) { + right = str.substring(i + rightFlag.length()); + center = str.substring(left.length(), str.length() - (right.length())); + break; + } + } + } + if (StrUtil.isNotEmpty(center)) { + String substring = center.substring(leftFlag.length(), center.length() - leftFlag.length()); + str = left.concat(substring).concat(right); + } + return str; + } +} diff --git a/src/main/resources/http-sql-template.sql b/src/main/resources/http-sql-template.sql index 20d9b534..dfaae764 100644 --- a/src/main/resources/http-sql-template.sql +++ b/src/main/resources/http-sql-template.sql @@ -131,7 +131,7 @@ SELECT common_log_id AS log_id,%s AS file_path FROM %s where %s AND notEmpty(%s) #end #sql("SQL_DATASETS") -SELECT id, identifier_name, category, execute_engine, type, template, description, generated_time, last_update_time FROM dataset %s ORDER BY last_update_time DESC +SELECT id, identifier_name, category, backend_engine, type, template, description, generated_time, last_update_time FROM dataset %s ORDER BY last_update_time DESC #end #sql("SQL_DATASETS_CATEGORY") -- cgit v1.2.3 From 79d5f4448d64851fd5816b79694d76ddc59658af Mon Sep 17 00:00:00 2001 From: wangwei Date: Tue, 19 Dec 2023 10:26:15 +0800 Subject: [Fix][dataset] 移除liveChart summary uniq_client_ip指标 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tsg/Galaxy/application_protocol_stat.json | 16 +--------------- .../network/service/impl/NetworkMonitorServiceImpl.java | 3 --- .../com/mesalab/qgw/service/impl/DslServiceImpl.java | 3 --- .../mesalab/qgw/service/impl/QueryJobServiceImpl.java | 2 +- src/main/resources/http-sql-template.sql | 2 +- 5 files changed, 3 insertions(+), 23 deletions(-) (limited to 'src/main/resources/http-sql-template.sql') diff --git a/config/nacos/config/fixed-127.0.0.1_8848-tsg_nacos/data/config-data-tenant/tsg/Galaxy/application_protocol_stat.json b/config/nacos/config/fixed-127.0.0.1_8848-tsg_nacos/data/config-data-tenant/tsg/Galaxy/application_protocol_stat.json index f4d8c1e0..62e46f03 100644 --- a/config/nacos/config/fixed-127.0.0.1_8848-tsg_nacos/data/config-data-tenant/tsg/Galaxy/application_protocol_stat.json +++ b/config/nacos/config/fixed-127.0.0.1_8848-tsg_nacos/data/config-data-tenant/tsg/Galaxy/application_protocol_stat.json @@ -38,8 +38,7 @@ "c2s_tcp_retransmitted_pkts", "s2c_tcp_retransmitted_pkts", "c2s_tcp_retransmitted_bytes", - "s2c_tcp_retransmitted_bytes", - "client_ip_sketch" + "s2c_tcp_retransmitted_bytes" ], "filters": [ "device_id", @@ -348,19 +347,6 @@ "visibility": "enabled" }, "type": "long" - }, - { - "name": "client_ip_sketch", - "label": "Client IP Sketch", - "doc": { - "constraints": { - "type": "HLLDSketchMerge", - "metric_type": "gauge", - "aggregation_functions": "APPROX_COUNT_DISTINCT_HLLD" - }, - "visibility": "enabled" - }, - "type": "string" } ] } \ No newline at end of file diff --git a/src/main/java/com/mesalab/network/service/impl/NetworkMonitorServiceImpl.java b/src/main/java/com/mesalab/network/service/impl/NetworkMonitorServiceImpl.java index ed45a5d6..6fff1154 100644 --- a/src/main/java/com/mesalab/network/service/impl/NetworkMonitorServiceImpl.java +++ b/src/main/java/com/mesalab/network/service/impl/NetworkMonitorServiceImpl.java @@ -140,8 +140,6 @@ public class NetworkMonitorServiceImpl implements NetworkMonitorService { long totalSessionUseOnAsymmetricFlows = Long.parseLong(String.valueOf( data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES_USE_ON_ASYMMETRIC_FLOWS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES_USE_ON_ASYMMETRIC_FLOWS))); - long uniqClientIp = Long.parseLong(String.valueOf( - data.get(Constants.NETWORK_OVERVIEW_METRIC_UNIQ_CLIENT_IP) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_UNIQ_CLIENT_IP))); long totalSessions = Long.parseLong(String.valueOf( data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS))); long totalBytes = Long.parseLong(String.valueOf( @@ -174,7 +172,6 @@ public class NetworkMonitorServiceImpl implements NetworkMonitorService { List result = new ArrayList<>(); Map resultMap = new LinkedHashMap<>(); - resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_UNIQ_CLIENT_IP, uniqClientIp); resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS, totalSessions); resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_DATA_RATE, dataRate); resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES, totalBytes); diff --git a/src/main/java/com/mesalab/qgw/service/impl/DslServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/DslServiceImpl.java index 387162b5..aebfba42 100644 --- a/src/main/java/com/mesalab/qgw/service/impl/DslServiceImpl.java +++ b/src/main/java/com/mesalab/qgw/service/impl/DslServiceImpl.java @@ -194,8 +194,6 @@ public class DslServiceImpl implements DSLService { long totalSessionUseOnAsymmetricFlows = Long.parseLong(String.valueOf( data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES_USE_ON_ASYMMETRIC_FLOWS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES_USE_ON_ASYMMETRIC_FLOWS))); - long uniqueClientIp = Long.parseLong(String.valueOf( - data.get(Constants.NETWORK_OVERVIEW_METRIC_UNIQ_CLIENT_IP) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_UNIQ_CLIENT_IP))); long totalSessions = Long.parseLong(String.valueOf( data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS))); long totalBytes = Long.parseLong(String.valueOf( @@ -227,7 +225,6 @@ public class DslServiceImpl implements DSLService { List result = new ArrayList<>(); Map resultMap = new LinkedHashMap<>(); - resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_UNIQ_CLIENT_IP, uniqueClientIp); resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS, totalSessions); resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_DATA_RATE, dataRate); resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES, totalBytes); diff --git a/src/main/java/com/mesalab/qgw/service/impl/QueryJobServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/QueryJobServiceImpl.java index f2cd0872..7a660b66 100644 --- a/src/main/java/com/mesalab/qgw/service/impl/QueryJobServiceImpl.java +++ b/src/main/java/com/mesalab/qgw/service/impl/QueryJobServiceImpl.java @@ -110,9 +110,9 @@ public class QueryJobServiceImpl implements QueryJobService, EnvironmentAware { } } else if (ExecMode.NORMAL.equals(execMode)) { QueryCache queryCache = new QueryCache(id, request.getOutputMode().getValue()); - queryCache.setType(JobConfig.FIELD_DISCOVERY); HazelcastInstanceMapUtil.put(id, queryCache); if (JobConfig.FIELD_DISCOVERY.equals(request.getName())) { + queryCache.setType(JobConfig.FIELD_DISCOVERY); validFieldDiscovery(request); jobExecuteService.addExecutorFieldDiscovery(id, request); } else { diff --git a/src/main/resources/http-sql-template.sql b/src/main/resources/http-sql-template.sql index dfaae764..29fb365f 100644 --- a/src/main/resources/http-sql-template.sql +++ b/src/main/resources/http-sql-template.sql @@ -3,7 +3,7 @@ SELECT SUM(asymmetric_c2s_flows + asymmetric_s2c_flows) AS asymmetric_flows, SUM #end #sql("NETWORK_OVERVIEW_STAT") -SELECT APPROX_COUNT_DISTINCT_DS_HLL(client_ip_sketch) AS uniq_client_ip, SUM(c2s_fragments + s2c_fragments) AS fragmentation_packets, SUM(c2s_bytes + s2c_bytes) AS total_bytes, SUM(c2s_pkts + s2c_pkts) AS total_packets, SUM(sessions) AS total_sessions, (SUM(c2s_bytes + s2c_bytes) * 8)/(%s -%s) AS data_rate FROM %s WHERE %s %s AND protocol_stack_id = '%s' LIMIT 1 +SELECT SUM(c2s_fragments + s2c_fragments) AS fragmentation_packets, SUM(c2s_bytes + s2c_bytes) AS total_bytes, SUM(c2s_pkts + s2c_pkts) AS total_packets, SUM(sessions) AS total_sessions, (SUM(c2s_bytes + s2c_bytes) * 8)/(%s -%s) AS data_rate FROM %s WHERE %s %s AND protocol_stack_id = '%s' LIMIT 1 #end #sql("NETWORK_OVERVIEW_TCP_STAT") -- cgit v1.2.3 From ccf62654fb974216a036438fde7b02bf0986dad7 Mon Sep 17 00:00:00 2001 From: wangwei Date: Sun, 7 Apr 2024 15:26:09 +0800 Subject: [Fix][dsl] 删除推荐当前的非结构化数据文件接口real-time-data-analytics-unstructured-data (TSG-20173) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../qgw/constant/DslIdentifierNameConst.java | 1 - .../java/com/mesalab/qgw/service/DSLService.java | 10 - .../mesalab/qgw/service/impl/DslServiceImpl.java | 127 ------------ .../common/enums/UnstructuredDataType.java | 14 -- .../controller/UnstructuredController.java | 53 ----- .../services/service/UnstructuredService.java | 12 -- .../service/impl/UnstructuredServiceImpl.java | 223 --------------------- src/main/resources/dsl-sql-template.sql | 3 - src/main/resources/http-sql-template.sql | 11 - .../com/mesalab/qgw/service/UnstructuredTest.java | 44 ---- .../resources/parameters/unstructuredTest.json | 54 ----- 11 files changed, 552 deletions(-) delete mode 100644 src/main/java/com/mesalab/services/common/enums/UnstructuredDataType.java delete mode 100644 src/main/java/com/mesalab/services/controller/UnstructuredController.java delete mode 100644 src/main/java/com/mesalab/services/service/UnstructuredService.java delete mode 100644 src/main/java/com/mesalab/services/service/impl/UnstructuredServiceImpl.java delete mode 100644 src/test/java/com/mesalab/qgw/service/UnstructuredTest.java delete mode 100644 src/test/resources/parameters/unstructuredTest.json (limited to 'src/main/resources/http-sql-template.sql') diff --git a/src/main/java/com/mesalab/qgw/constant/DslIdentifierNameConst.java b/src/main/java/com/mesalab/qgw/constant/DslIdentifierNameConst.java index df40767b..84b287f5 100644 --- a/src/main/java/com/mesalab/qgw/constant/DslIdentifierNameConst.java +++ b/src/main/java/com/mesalab/qgw/constant/DslIdentifierNameConst.java @@ -17,7 +17,6 @@ public class DslIdentifierNameConst { public static final String APPLICATION_AND_PROTOCOL_APP_SUMMARY = "application-and-protocol-app-summary"; public static final String REAL_TIME_DATA_ANALYTICS_SUBSCRIBER_ID_RELATE_IP = "real-time-data-analytics-subscriber-id-relate-ip"; public static final String REAL_TIME_DATA_ANALYTICS_MOBILE_IDENTITY_RELATE_TEID = "real-time-data-analytics-mobile-identity-relate-teid"; - public static final String REAL_TIME_DATA_ANALYTICS_UNSTRUCTURED_DATA = "real-time-data-analytics-unstructured-data"; public static final String IP_LEARNING_FQDN_RELATE_IP = "ip-learning-fqdn-relate-ip"; public static final String IP_LEARNING_ACTIVE_IP = "ip-learning-active-ip"; } diff --git a/src/main/java/com/mesalab/qgw/service/DSLService.java b/src/main/java/com/mesalab/qgw/service/DSLService.java index 913e812a..8bbcd73b 100644 --- a/src/main/java/com/mesalab/qgw/service/DSLService.java +++ b/src/main/java/com/mesalab/qgw/service/DSLService.java @@ -117,16 +117,6 @@ public interface DSLService { */ BaseResult realTimeDataAnalyticsMobileIdentityRelateTeid(DSLQueryContext dslProfile); - /** - * Desc: Unstructured Data - * - * @param dslProfile - * @return {@link BaseResult} - * @created by wWei - * @date 2023/12/1 10:56 - */ - BaseResult realTimeDataAnalyticsUnstructuredData(DSLQueryContext dslProfile, boolean isDryRun); - /** * Desc: IP Learning (FQDN-IP) * diff --git a/src/main/java/com/mesalab/qgw/service/impl/DslServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/DslServiceImpl.java index a155c94e..7b56b59a 100644 --- a/src/main/java/com/mesalab/qgw/service/impl/DslServiceImpl.java +++ b/src/main/java/com/mesalab/qgw/service/impl/DslServiceImpl.java @@ -28,9 +28,7 @@ import com.mesalab.knowledge.service.KnowledgeService; import com.mesalab.qgw.constant.dsl.LiveChartConstants; import com.mesalab.qgw.model.dsl.LiveChartProtocol; import com.mesalab.qgw.constant.DslIdentifierNameConst; -import com.mesalab.qgw.exception.QGWBusinessException; import com.mesalab.common.exception.CommonErrorCode; -import com.mesalab.qgw.model.basic.ClickHouseHttpSource; import com.mesalab.qgw.model.basic.DSLQueryContext; import com.mesalab.qgw.model.basic.SQLQueryContext; import com.mesalab.qgw.service.DSLService; @@ -38,11 +36,8 @@ import com.mesalab.qgw.service.DatabaseService; import com.mesalab.qgw.service.SQLSyncQueryService; import com.mesalab.services.common.dsl.ComDSLObject; import com.mesalab.services.common.enums.EntityQueryType; -import com.mesalab.services.common.enums.UnstructuredDataType; import com.mesalab.services.common.property.SqlPropertySourceFactory; import com.mesalab.services.service.RelationService; -import com.mesalab.services.service.UnstructuredService; -import com.mesalab.services.service.impl.UnstructuredServiceImpl; import net.sf.jsqlparser.JSQLParserException; import net.sf.jsqlparser.expression.Expression; import net.sf.jsqlparser.expression.ExpressionVisitorAdapter; @@ -87,10 +82,6 @@ public class DslServiceImpl implements DSLService { @Autowired SQLSyncQueryService sqlSyncQueryService; @Autowired - UnstructuredService unstructuredService; - @Autowired - private ClickHouseHttpSource clickHouseHttpSource; - @Autowired KnowledgeService knowledgeService; @Autowired RelationService relationService; @@ -120,8 +111,6 @@ public class DslServiceImpl implements DSLService { return applicationAndProtocolAppThroughput(dslProfile, isDryRun); } else if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_APP_SUMMARY.equals(dslProfile.getName())) { return applicationAndProtocolAppSummary(dslProfile, isDryRun); - } else if (DslIdentifierNameConst.REAL_TIME_DATA_ANALYTICS_UNSTRUCTURED_DATA.equals(dslProfile.getName())) { - return realTimeDataAnalyticsUnstructuredData(dslProfile, isDryRun); } else if (DslIdentifierNameConst.IP_LEARNING_FQDN_RELATE_IP.equals(dslProfile.getName())) { return ipLearningFqdnRelateIp(dslProfile, isDryRun); } else if (DslIdentifierNameConst.IP_LEARNING_ACTIVE_IP.equals(dslProfile.getName())) { @@ -351,84 +340,6 @@ public class DslServiceImpl implements DSLService { return baseResult; } - @Override - public BaseResult realTimeDataAnalyticsUnstructuredData(DSLQueryContext dslProfile, boolean isDryRun) { - Stopwatch watch = Stopwatch.createStarted(); - List fileTypes = Lists.newArrayList(); - List dataSources = Lists.newArrayList(); - - ExpressionVisitorAdapter expressionVisitorAdapter = new ExpressionVisitorAdapter() { - @Override - public void visit(EqualsTo equalsTo) { - if (equalsTo.getLeftExpression() != null && "file_type".equals(equalsTo.getLeftExpression().toString())) { - if (equalsTo.getRightExpression() instanceof StringValue) { - fileTypes.add(((StringValue) equalsTo.getRightExpression()).getValue()); - equalsTo.setLeftExpression(new LongValue(1)); - equalsTo.setRightExpression(new LongValue(1)); - } - } else if (equalsTo.getLeftExpression() != null && "data_source".equals(equalsTo.getLeftExpression().toString())) { - if (equalsTo.getRightExpression() instanceof StringValue) { - dataSources.add(((StringValue) equalsTo.getRightExpression()).getValue()); - equalsTo.setLeftExpression(new LongValue(1)); - equalsTo.setRightExpression(new LongValue(1)); - } - } - } - }; - String filter = dslProfile.getFilter(); - Expression expression = null; - try { - expression = CCJSqlParserUtil.parseExpression(filter, false); - } catch (JSQLParserException e) { - throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), e.getMessage())); - } - expression.accept(expressionVisitorAdapter); - if (fileTypes.size() != 1 && dataSources.size() != 1) { - throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), dslProfile.getFilter())); - } - dslProfile.setFilter(expression.toString()); - - String type = fileTypes.get(0); - String dataSource = dataSources.get(0); - Map fileTypeInSchema = unstructuredService.getUnstructuredFields(); - List dataList = Lists.newArrayList(); - if (!fileTypeInSchema.containsKey(dataSource) && !clickHouseHttpSource.getDbName().equals(dataSource)) { - return build(dataList, watch); - } - if (UnstructuredDataType.ALL.getType().equalsIgnoreCase(type)) { - if (clickHouseHttpSource.getDbName().equals(dataSource)) { - for (String tableName : fileTypeInSchema.keySet()) { - Map> typeFields = (Map>) fileTypeInSchema.get(tableName); - for (String typeTmp : typeFields.keySet()) { - dataList.addAll(getData(typeFields.get(typeTmp), tableName, dslProfile, isDryRun)); - } - } - } else { - Map> typeFields = (Map>) fileTypeInSchema.get(dataSource); - for (String typeTmp : typeFields.keySet()) { - dataList.addAll(getData(typeFields.get(typeTmp), dataSource, dslProfile, isDryRun)); - } - } - } else { - if (clickHouseHttpSource.getDbName().equals(dataSource)) { - for (String tableName : fileTypeInSchema.keySet()) { - Map> typeFields = (Map>) fileTypeInSchema.get(tableName); - dataList.addAll(getData(typeFields.get(type), tableName, dslProfile, isDryRun)); - } - } else { - Map> typeFields = (Map>) fileTypeInSchema.get(dataSource); - dataList = getData(typeFields.get(type), dataSource, dslProfile, isDryRun); - } - } - List distinctList = dataList.stream() - .filter(distinctByValue(x -> x.get("file_path"))) - .collect(Collectors.toList()); - if (StrUtil.isNotEmpty(dslProfile.getLimit())) { - distinctList = distinctList.size() > Long.parseLong(dslProfile.getLimit()) ? distinctList.subList(0, Integer.parseInt(dslProfile.getLimit())) : distinctList; - } - return build(Lists.newArrayList(distinctList), watch); - } - @Override public BaseResult ipLearningFqdnRelateIp(DSLQueryContext dslProfile, boolean isDryRun) { String queryType = "iplearning"; @@ -722,49 +633,11 @@ public class DslServiceImpl implements DSLService { return expressionVisitorAdapter; } - private static Predicate distinctByValue(Function keyExtractor) { - Map seen = new ConcurrentHashMap<>(); - return t -> seen.putIfAbsent(keyExtractor.apply(t), Boolean.TRUE) == null; - } - - private List getData(List fields, String dataSource, DSLQueryContext dslProfile, boolean isDryRun) { - List dataResult = Lists.newArrayList(); - for (String field : fields) { - String sql = dslProfile.toSql(environment.getProperty("REAL_TIME_DATA_ANALYTICS_UNSTRUCTURED_DATA"), dataSource, databaseService.getPartitionKey(dataSource), LOGICAL_TYPE_UNIX_TIMESTAMP); - sql = sql.replace("$field", field); - sql = sql.replace("$primary_key", databaseService.getValueByKeyInSchemaDoc(dataSource, "primary_key")); - sql = sql.replace("$partition_key", databaseService.getValueByKeyInSchemaDoc(dataSource, "partition_key")); - BaseResult result = sqlSyncQueryService.executeQuery(queryBuild(sql, isDryRun)); - if (!result.isSuccess()) { - log.error("Query unstructured data status error: {}", result.getMessage()); - throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), "Query unstructured data status error: " + result.getMessage()); - } - List datalist = (List) result.getData(); - for (Map data : datalist) { - String fileTypeStr = StringUtil.isNotEmpty(UnstructuredServiceImpl.fileType.get(field)) ? UnstructuredServiceImpl.fileType.get(field).toString() : UnstructuredServiceImpl.OTHER_FILE; - data.put("log_type", dataSource); - data.put("file_type", fileTypeStr); - dataResult.add(data); - } - } - return dataResult; - } - private SQLQueryContext queryBuild(String sql, boolean isDryRun) { String queryOption = isDryRun ? QueryOption.SYNTAX_VALIDATION.getValue() : QueryOption.REAL_TIME.getValue(); return SQLQueryContext.builder().format(OutputMode.JSON.getValue()).originalSQL(sql).option(queryOption).build(); } - private BaseResult build(List dataList, Stopwatch watch) { - BaseResult baseResult; - Map statistics = Maps.newHashMap(); - statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS)); - statistics.put("result_rows", dataList.size()); - statistics.put("result_bytes", dataList.toString().getBytes().length); - baseResult = BaseResultGenerator.success("ok", dataList, statistics); - return baseResult; - } - @Override public List buildFlatStructure(List protocols) { List nodes = Lists.newArrayList(); diff --git a/src/main/java/com/mesalab/services/common/enums/UnstructuredDataType.java b/src/main/java/com/mesalab/services/common/enums/UnstructuredDataType.java deleted file mode 100644 index d651e8a4..00000000 --- a/src/main/java/com/mesalab/services/common/enums/UnstructuredDataType.java +++ /dev/null @@ -1,14 +0,0 @@ -package com.mesalab.services.common.enums; - -import lombok.AllArgsConstructor; -import lombok.Getter; - -@Getter -@AllArgsConstructor -public enum UnstructuredDataType { - ALL("all"), - MAIL("mail"), - HTTP("http"), - PCAP("pcap"); - private final String type; -} diff --git a/src/main/java/com/mesalab/services/controller/UnstructuredController.java b/src/main/java/com/mesalab/services/controller/UnstructuredController.java deleted file mode 100644 index 4e71bf1d..00000000 --- a/src/main/java/com/mesalab/services/controller/UnstructuredController.java +++ /dev/null @@ -1,53 +0,0 @@ -package com.mesalab.services.controller; - -import cn.hutool.log.Log; -import cn.hutool.log.LogFactory; -import com.mesalab.common.entity.BaseResult; -import com.mesalab.common.enums.HttpStatusCodeEnum; -import com.mesalab.qgw.exception.QGWBusinessException; -import com.mesalab.common.exception.CommonErrorCode; -import com.mesalab.services.common.dsl.ComDSLObject; -import com.mesalab.services.common.dsl.ComDSLValidate; -import com.mesalab.services.common.enums.UnstructuredDataType; -import com.mesalab.services.service.UnstructuredService; -import com.geedgenetworks.utils.StringUtil; -import org.apache.commons.lang3.EnumUtils; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.validation.annotation.Validated; -import org.springframework.web.bind.annotation.PostMapping; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RestController; - -import javax.servlet.http.HttpServletRequest; -import java.util.Enumeration; - -@RestController -@RequestMapping(value = "/unstructured_data") -public class UnstructuredController { - private static final Log log = LogFactory.get(); - - @Autowired - private UnstructuredService unstructuredService; - @Autowired - private ComDSLValidate comDSLValidate; - - @PostMapping(value = "/v1", produces = "application/json") - public BaseResult unstructuredDataQuery(HttpServletRequest request, @Validated @RequestBody ComDSLObject dslObject) { - Enumeration parameterNames = request.getParameterNames(); - while (parameterNames.hasMoreElements()) { - String param = parameterNames.nextElement(); - log.debug("非结构化数据-文件路径列表获取接口, 参数: queryString is {},params is {}", param, dslObject); - if (!EnumUtils.isValidEnum(UnstructuredDataType.class, StringUtil.upperCase(param)) - || StringUtil.isNotEmpty(request.getParameter(param))) { - throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "Not Support")); - } - comDSLValidate.validation(dslObject); - return unstructuredService.getUnstructuredData(param, dslObject); - } - throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(),"Not Support")); - } - -} diff --git a/src/main/java/com/mesalab/services/service/UnstructuredService.java b/src/main/java/com/mesalab/services/service/UnstructuredService.java deleted file mode 100644 index e46f9897..00000000 --- a/src/main/java/com/mesalab/services/service/UnstructuredService.java +++ /dev/null @@ -1,12 +0,0 @@ -package com.mesalab.services.service; - -import com.mesalab.common.entity.BaseResult; -import com.mesalab.services.common.dsl.ComDSLObject; - -import java.util.Map; - -public interface UnstructuredService { - Map getUnstructuredFields(); - - BaseResult getUnstructuredData(String param, ComDSLObject dslObject); -} diff --git a/src/main/java/com/mesalab/services/service/impl/UnstructuredServiceImpl.java b/src/main/java/com/mesalab/services/service/impl/UnstructuredServiceImpl.java deleted file mode 100644 index 4c4b8436..00000000 --- a/src/main/java/com/mesalab/services/service/impl/UnstructuredServiceImpl.java +++ /dev/null @@ -1,223 +0,0 @@ -package com.mesalab.services.service.impl; - -import cn.hutool.log.Log; -import cn.hutool.log.LogFactory; -import com.alibaba.fastjson2.JSON; -import com.clearspring.analytics.util.Lists; -import com.google.common.base.Stopwatch; -import com.google.common.collect.Maps; -import com.mesalab.common.entity.BaseResult; -import com.mesalab.common.entity.BaseResultGenerator; -import com.mesalab.common.enums.OutputMode; -import com.mesalab.common.enums.HttpStatusCodeEnum; -import com.mesalab.qgw.constant.QGWMessageConst; -import com.mesalab.qgw.exception.QGWBusinessException; -import com.mesalab.common.exception.CommonErrorCode; -import com.mesalab.qgw.model.basic.SQLQueryContext; -import com.mesalab.qgw.model.basic.ClickHouseHttpSource; -import com.mesalab.qgw.model.basic.EngineConfigSource; -import com.mesalab.qgw.service.SQLSyncQueryService; -import com.mesalab.qgw.service.DatabaseService; -import com.mesalab.services.common.dsl.ComDSLObject; -import com.mesalab.services.common.enums.UnstructuredDataType; -import com.mesalab.services.common.property.SqlPropertySourceFactory; -import com.mesalab.services.service.UnstructuredService; -import com.geedgenetworks.utils.StringUtil; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.EnvironmentAware; -import org.springframework.context.annotation.PropertySource; -import org.springframework.core.env.Environment; -import org.springframework.stereotype.Service; - - -import java.util.*; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; -import java.util.function.Predicate; -import java.util.stream.Collectors; - -@Service("UnstructuredService") -@PropertySource(value = "classpath:http-sql-template.sql", factory = SqlPropertySourceFactory.class) -public class UnstructuredServiceImpl implements UnstructuredService, EnvironmentAware { - - private static final Log log = LogFactory.get(); - public final static Map fileType = Maps.newHashMap(); - public static final String OTHER_FILE = "other"; - public static final String[] tables = new String[]{"session_record", "security_event", "monitor_event", "transaction_record", "voip_record", "proxy_event", "dos_event", "assessment_event"}; - static { - fileType.put("common_packet_capture_file", UnstructuredDataType.PCAP.getType()); - fileType.put("packet_capture_file", UnstructuredDataType.PCAP.getType()); - fileType.put("rtp_pcap_path", UnstructuredDataType.PCAP.getType()); - fileType.put("packet_url", UnstructuredDataType.PCAP.getType()); - fileType.put("mail_eml_file", UnstructuredDataType.MAIL.getType()); - fileType.put("http_request_body", UnstructuredDataType.HTTP.getType()); - fileType.put("http_response_body", UnstructuredDataType.HTTP.getType()); - } - - @Autowired - private DatabaseService databaseService; - @Autowired - private ClickHouseHttpSource clickHouseHttpSource; - @Autowired - private EngineConfigSource engineConfigSource; - @Autowired - private SQLSyncQueryService sqlSyncQueryService; - private Environment env; - - @Override - public Map getUnstructuredFields() { - Map fileTypeInSchema = Maps.newHashMap(); - List allTable = databaseService.getAllTable(); - for (String tableName : allTable) { - LinkedHashMap schemaMap = databaseService.getSchemaByName(tableName); - List fieldList = new ArrayList<>(); - if (!clickHouseHttpSource.getDbName().equals(schemaMap.get("namespace"))) continue; - List fields = (List) schemaMap.get("fields"); - for (Map field : fields) { - Map schemaDoc = JSON.parseObject(field.get("doc").toString(), Map.class); - if (schemaDoc.containsKey("constraints") && schemaDoc.get("visibility").equals("enabled")) { - Map SchemaConstraints = (Map) schemaDoc.get("constraints"); - String typeStr = SchemaConstraints.containsKey("type") ? SchemaConstraints.get("type") : StringUtil.EMPTY; - if (typeStr.equals("file") || typeStr.equals("files")) { - fieldList.add((String) field.get("name")); - } - } - } - - Map> fileTypeMap = Maps.newHashMap(); - if (fieldList.size() != 0) { - for (String field : fieldList) { - String fileTypeStr = StringUtil.isNotEmpty(fileType.get(field)) ? fileType.get(field).toString() : OTHER_FILE; - if (fileTypeMap.containsKey(fileTypeStr)) { - List preList = fileTypeMap.get(fileTypeStr); - preList.add(field); - fileTypeMap.put(fileTypeStr, preList); - } else { - List newList = new ArrayList<>(); - newList.add(field); - fileTypeMap.put(fileTypeStr, newList); - } - } - } - if (!fileTypeMap.isEmpty()) { - fileTypeInSchema.put(tableName, fileTypeMap); - } - } - return fileTypeInSchema; - } - - @Override - public BaseResult getUnstructuredData(String param, ComDSLObject dslObject) { - Stopwatch watch = Stopwatch.createStarted(); - List dataList = Lists.newArrayList(); - String dataSource = dslObject.getQuery().getDataSource(); - validationDataSource(dataSource); - Map fileTypeInSchema = getUnstructuredFields(); - if (!fileTypeInSchema.containsKey(dataSource) && !dataSource.equals(clickHouseHttpSource.getDbName())) { - return build(dataList, watch); - } - int limit = StringUtil.isBlank(dslObject.getQuery().getLimit()) ? engineConfigSource.getDefaultResultNum() : Integer.parseInt(dslObject.getQuery().getLimit()); - if (!param.equalsIgnoreCase(UnstructuredDataType.ALL.getType())) { - if (!dataSource.equals(clickHouseHttpSource.getDbName())) { - dataList = getData(fileTypeInSchema, limit, param, dslObject, dataSource); - } else { - for (String tableName : fileTypeInSchema.keySet()) { - dataList.addAll(getData(fileTypeInSchema, limit, param, dslObject, tableName)); - } - } - } else { - if (!dataSource.equals(clickHouseHttpSource.getDbName())) { - Map> fileType = (Map>) fileTypeInSchema.get(dataSource); - for (String s : fileType.keySet()) { - dataList.addAll(getData(fileTypeInSchema, limit, s, dslObject, dataSource)); - } - } else { - for (String tableName : fileTypeInSchema.keySet()) { - Map fileType = (Map) fileTypeInSchema.get(tableName); - for (String s : fileType.keySet()) { - dataList.addAll(getData(fileTypeInSchema, limit, s, dslObject, tableName)); - } - } - } - } - - List distinctList = dataList.stream() - .filter(distinctByValue(x -> x.get("file_path"))) - .collect(Collectors.toList()); - - distinctList = distinctList.size() > limit ? distinctList.subList(0, Integer.parseInt(dslObject.getQuery().getLimit())) : distinctList; - return build(Lists.newArrayList(distinctList), watch); - } - - private BaseResult build(List dataList, Stopwatch watch) { - BaseResult baseResult; - Map statistics = Maps.newHashMap(); - statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS)); - statistics.put("result_rows", dataList.size()); - statistics.put("result_bytes", dataList.toString().getBytes().length); - baseResult = BaseResultGenerator.success("ok", dataList, statistics); - return baseResult; - } - - private List getData(Map fileTypeMap, int limit, String param, ComDSLObject dslObject, String dataSource) { - List dataResult = Lists.newArrayList(); - Map> tableFileType = (Map>) fileTypeMap.get(dataSource); - if (tableFileType.containsKey(param)) { - List listFields = tableFileType.get(param); - for (String field : listFields) { - String sql = String.format( - Arrays.asList(tables).contains(dataSource) ? Objects.requireNonNull(env.getProperty("UNSTRUCTURED_DATA_PATH")) : Objects.requireNonNull(env.getProperty("UNSTRUCTURED_DATA_PATH_OLD")) - , field, dataSource - , generateWhereOfTime(Arrays.asList(tables).contains(dataSource) ? "recv_time" : "common_recv_time", dslObject.getQuery()) - , field, limit); - BaseResult result = sqlSyncQueryService.executeQuery(queryBuild(sql)); - if (!result.isSuccess()) { - log.error("Query unstructured data status error: {}", result.getMessage()); - throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), "Query unstructured data status error: " + result.getMessage()); - } - List datalist = (List) result.getData(); - for (Map data : datalist) { - String fileTypeStr = StringUtil.isNotEmpty(fileType.get(field)) ? fileType.get(field).toString() : OTHER_FILE; - data.put("file_type", fileTypeStr); - data.put("log_type", dataSource); - dataResult.add(data); - } - } - } - return dataResult; - } - - private String generateWhereOfTime(String timeKey, ComDSLObject.Query query) { - String[] intervals = getIntervals(query.getParameters().getIntervals()); - String whereOfTime = timeKey + " >= UNIX_TIMESTAMP('" + intervals[0] + "') AND " + timeKey + " < UNIX_TIMESTAMP('" + intervals[1] + "')"; - return whereOfTime; - } - - private String[] getIntervals(List intervals) { - return intervals.get(0).split("/"); - } - - private static Predicate distinctByValue(Function keyExtractor) { - Map seen = new ConcurrentHashMap<>(); - return t -> seen.putIfAbsent(keyExtractor.apply(t), Boolean.TRUE) == null; - } - - - private SQLQueryContext queryBuild(String sql) { - return SQLQueryContext.builder().format(OutputMode.JSON.getValue()).originalSQL(sql).build(); - } - - private void validationDataSource(String dataSource) { - List allTable = databaseService.getAllTable(); - if (!allTable.contains(dataSource) && !dataSource.equals(clickHouseHttpSource.getDbName())) { - throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.DATASOURCE_NOT_EXIST)); - } - } - - @Override - public void setEnvironment(Environment environment) { - this.env = environment; - } -} diff --git a/src/main/resources/dsl-sql-template.sql b/src/main/resources/dsl-sql-template.sql index 1c5e606b..edd615a2 100644 --- a/src/main/resources/dsl-sql-template.sql +++ b/src/main/resources/dsl-sql-template.sql @@ -30,7 +30,4 @@ SELECT app_name, SUM(sessions) AS sessions, SUM(c2s_bytes + s2c_bytes) AS bytes, #end #sql("REAL_TIME_DATA_ANALYTICS_SUBSCRIBER_ID_RELATE_IP") SELECT account AS SUBSCRIBER_ID, framed_ip AS IP, first_found_time AS FIRST_FOUND_TIME, last_update_time AS LAST_FOUND_TIME, vsys_id as VSYS_ID FROM $table WHERE $intervals_and_filter AND acct_status_type != 2 $limit -#end -#sql("REAL_TIME_DATA_ANALYTICS_UNSTRUCTURED_DATA") -SELECT $primary_key AS log_id, $field AS file_path FROM $table where $intervals_and_filter AND notEmpty($field) ORDER BY $partition_key DESC $limit #end \ No newline at end of file diff --git a/src/main/resources/http-sql-template.sql b/src/main/resources/http-sql-template.sql index 29fb365f..feaae91d 100644 --- a/src/main/resources/http-sql-template.sql +++ b/src/main/resources/http-sql-template.sql @@ -122,17 +122,6 @@ SELECT uplink_teid AS teid, apn, phone_number, imsi, imei, last_update_time,vsys SELECT any(splitByString(', ',sorting_key)) AS index_key FROM tables_cluster where name = '%s_local' AND "database" = '%s' #end -#sql("UNSTRUCTURED_DATA_PATH") -SELECT log_id AS log_id,%s AS file_path FROM %s where %s AND notEmpty(%s) ORDER BY recv_time DESC LIMIT %s -#end - -#sql("UNSTRUCTURED_DATA_PATH_OLD") -SELECT common_log_id AS log_id,%s AS file_path FROM %s where %s AND notEmpty(%s) ORDER BY common_recv_time DESC LIMIT %s -#end - #sql("SQL_DATASETS") SELECT id, identifier_name, category, backend_engine, type, template, description, generated_time, last_update_time FROM dataset %s ORDER BY last_update_time DESC #end - -#sql("SQL_DATASETS_CATEGORY") -SELECT category FROM dataset GROUP BY category diff --git a/src/test/java/com/mesalab/qgw/service/UnstructuredTest.java b/src/test/java/com/mesalab/qgw/service/UnstructuredTest.java deleted file mode 100644 index 4285bc5a..00000000 --- a/src/test/java/com/mesalab/qgw/service/UnstructuredTest.java +++ /dev/null @@ -1,44 +0,0 @@ -package com.mesalab.qgw.service; - -import com.mesalab.GalaxyQGWApplicationTests; -import com.mesalab.common.entity.BaseResult; -import com.mesalab.services.common.dsl.ComDSLObject; -import com.mesalab.services.service.UnstructuredService; -import org.junit.Assert; -import org.junit.Test; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.EnableAutoConfiguration; - -@EnableAutoConfiguration -public class UnstructuredTest extends GalaxyQGWApplicationTests { - @Autowired - UnstructuredService unstructuredService; - - @Test - public void allFilePath(){ - ComDSLObject dslObject = jsonToInParameter("parameters/unstructuredTest.json", "all", ComDSLObject.class); - BaseResult UnstructuredData = unstructuredService.getUnstructuredData("all", dslObject); - Assert.assertTrue(UnstructuredData.getMessage(), UnstructuredData.getStatus() == 200); - } - - @Test - public void mailFilePath(){ - ComDSLObject dslObject = jsonToInParameter("parameters/unstructuredTest.json", "mail", ComDSLObject.class); - BaseResult UnstructuredData = unstructuredService.getUnstructuredData("mail", dslObject); - Assert.assertTrue(UnstructuredData.getMessage(), UnstructuredData.getStatus() == 200); - } - - @Test - public void pcapFilePath(){ - ComDSLObject dslObject = jsonToInParameter("parameters/unstructuredTest.json", "pcap", ComDSLObject.class); - BaseResult UnstructuredData = unstructuredService.getUnstructuredData("pcap", dslObject); - Assert.assertTrue(UnstructuredData.getMessage(), UnstructuredData.getStatus() == 200); - } - - @Test - public void httpFilePath(){ - ComDSLObject dslObject = jsonToInParameter("parameters/unstructuredTest.json", "http", ComDSLObject.class); - BaseResult UnstructuredData = unstructuredService.getUnstructuredData("http", dslObject); - Assert.assertTrue(UnstructuredData.getMessage(), UnstructuredData.getStatus() == 200); - } -} diff --git a/src/test/resources/parameters/unstructuredTest.json b/src/test/resources/parameters/unstructuredTest.json deleted file mode 100644 index 64412dd7..00000000 --- a/src/test/resources/parameters/unstructuredTest.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "all": { - "clientId": null, - "query": { - "dataEngine": "BusinessEngine", - "dataSource": "tsg_galaxy_v3", - "limit": "1000", - "parameters": { - "intervals": [ - "2023-03-01T00:00:00+08:00/2023-04-06T00:00:00+08:00" - ] - } - } - }, - "mail": { - "clientId": null, - "query": { - "dataEngine": "BusinessEngine", - "dataSource": "session_record", - "limit": "1000", - "parameters": { - "intervals": [ - "2023-03-01T00:00:00+08:00/2023-04-06T00:00:00+08:00" - ] - } - } - }, - "http": { - "clientId": null, - "query": { - "dataEngine": "BusinessEngine", - "dataSource": "security_event", - "limit": "1000", - "parameters": { - "intervals": [ - "2023-03-01T00:00:00+08:00/2023-04-06T00:00:00+08:00" - ] - } - } - }, - "pcap": { - "clientId": null, - "query": { - "dataEngine": "BusinessEngine", - "dataSource": "voip_record", - "limit": "1000", - "parameters": { - "intervals": [ - "2023-03-01T00:00:00+08:00/2023-04-06T00:00:00+08:00" - ] - } - } - } -} \ No newline at end of file -- cgit v1.2.3 From 4be178142c0c2a97dcb958f54e54ed42dac0ea6e Mon Sep 17 00:00:00 2001 From: wangwei Date: Wed, 26 Jun 2024 10:22:22 +0800 Subject: [Fix][storage] 优化系统存储配额相关接口: 新增max_usage、基于mariadb获取数据、删除冗余代码(TSG-21554) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../com/mesalab/common/enums/JobHandlerOption.java | 1 + .../mesalab/qgw/controller/DatabaseController.java | 8 +- .../mesalab/qgw/controller/SystemController.java | 72 --- .../com/mesalab/qgw/service/DatabaseService.java | 16 +- .../com/mesalab/qgw/service/SystemService.java | 64 --- .../qgw/service/impl/DatabaseServiceImpl.java | 166 ++++-- .../qgw/service/impl/SQLSyncQueryServiceImpl.java | 15 +- .../qgw/service/impl/SystemServiceImpl.java | 640 --------------------- .../service/impl/TroubleshootingServiceImp.java | 2 +- .../services/controller/SQLDatasetController.java | 38 +- .../services/service/SQLDatasetService.java | 15 - .../service/impl/SQLDatasetServiceImpl.java | 14 +- src/main/resources/http-sql-template.sql | 6 +- 13 files changed, 168 insertions(+), 889 deletions(-) delete mode 100644 src/main/java/com/mesalab/qgw/service/SystemService.java delete mode 100644 src/main/java/com/mesalab/qgw/service/impl/SystemServiceImpl.java (limited to 'src/main/resources/http-sql-template.sql') diff --git a/src/main/java/com/mesalab/common/enums/JobHandlerOption.java b/src/main/java/com/mesalab/common/enums/JobHandlerOption.java index 2b5032a7..07f531c9 100644 --- a/src/main/java/com/mesalab/common/enums/JobHandlerOption.java +++ b/src/main/java/com/mesalab/common/enums/JobHandlerOption.java @@ -12,6 +12,7 @@ public enum JobHandlerOption { DELETE_ALL_TRAFFIC_DATA_JOB_HANDLER("deleteAllTrafficDataJobHandler"), DELETE_ALL_REPORT_AND_METRICS_DATA_JOB_HANDLER("deleteAllReportAndMetricsDataJobHandler"), DELETE_ALL_FILES_JOB_HANDLER("deleteAllFilesJobHandler"), + DELETE_OLD_DATA_JOB_HANDLER("deleteOldDataJobHandler"), ; private String value; diff --git a/src/main/java/com/mesalab/qgw/controller/DatabaseController.java b/src/main/java/com/mesalab/qgw/controller/DatabaseController.java index 7f7884c9..66de5887 100644 --- a/src/main/java/com/mesalab/qgw/controller/DatabaseController.java +++ b/src/main/java/com/mesalab/qgw/controller/DatabaseController.java @@ -4,6 +4,7 @@ import cn.hutool.core.collection.CollectionUtil; import cn.hutool.core.util.NumberUtil; import com.alibaba.fastjson2.JSON; import com.clearspring.analytics.util.Lists; +import com.google.common.collect.Sets; import com.jayway.jsonpath.JsonPath; import com.mesalab.common.entity.BaseResult; import com.mesalab.common.entity.BaseResultGenerator; @@ -21,6 +22,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -33,13 +35,13 @@ public class DatabaseController { @Autowired private DatabaseService databaseService; - private final static List VISIBILITY = Arrays.asList("enabled", "hidden", "disabled"); + private final static HashSet VISIBILITY = Sets.newHashSet("enabled", "hidden", "disabled"); @RequestMapping(value = "/table/{table_name}/schema", consumes = "application/x-www-form-urlencoded") @AuditLog("DatabaseController.getSchemaByTable") public BaseResult getSchemaByTable(@PathVariable("table_name") String tableName) { - log.debug("SCHEMA信息获取,参数为{} {}", tableName); + log.debug("SCHEMA信息获取,参数为{}", tableName); if (StringUtil.isBlank(tableName)) { return BaseResultGenerator.success4Message("ok"); @@ -62,7 +64,7 @@ public class DatabaseController { @RequestMapping(value = "/{dbName}/table", consumes = "application/x-www-form-urlencoded") @AuditLog("DatabaseController.getTableByDB") public BaseResult getTableByDB(@PathVariable String dbName) { - log.debug("SCHEMA信息获取,参数为{} {}", dbName); + log.debug("SCHEMA信息获取,参数为{}", dbName); if (StringUtil.isBlank(dbName)) { diff --git a/src/main/java/com/mesalab/qgw/controller/SystemController.java b/src/main/java/com/mesalab/qgw/controller/SystemController.java index 09087f9d..db8d78fa 100644 --- a/src/main/java/com/mesalab/qgw/controller/SystemController.java +++ b/src/main/java/com/mesalab/qgw/controller/SystemController.java @@ -2,91 +2,19 @@ package com.mesalab.qgw.controller; import cn.hutool.log.Log; import cn.hutool.log.LogFactory; -import com.clearspring.analytics.util.Lists; import com.mesalab.common.entity.BaseResult; import com.mesalab.common.entity.BaseResultGenerator; -import com.mesalab.common.enums.HttpStatusCodeEnum; -import com.mesalab.qgw.constant.QGWMessageConst; -import com.mesalab.qgw.exception.QGWBusinessException; -import com.mesalab.common.exception.CommonErrorCode; -import com.mesalab.qgw.model.job.EncryptionInfo; -import com.mesalab.qgw.model.job.StorageDeletionInfo; -import com.mesalab.qgw.service.SystemService; import com.mesalab.services.configuration.ThreadPoolMonitor; -import com.geedgenetworks.utils.StringUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; -import java.util.List; -import java.util.Map; - @RestController @RequestMapping(value = "/v1/admin") public class SystemController { private static final Log log = LogFactory.get(); @Autowired - private SystemService systemService; - @Autowired ThreadPoolMonitor threadPoolMonitor; - - @GetMapping ("setting/storage/quota") - public BaseResult storageQuota() { - return systemService.getStorageQuota(); - } - - @PutMapping(value = "setting/storage/quota") - public BaseResult storageSetting(@RequestBody List list) { - log.warn("数据配额设置, 参数: params is {}", list); - if (StringUtil.isEmpty(list)) { - return BaseResultGenerator.failure(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(),QGWMessageConst.PARAMETER_NOT_OBTAINED)); - } - for (StorageDeletionInfo info : list) { - if (StringUtil.isBlank(info.getType()) || StringUtil.isBlank(String.valueOf(info.getMaxDays()))) { - throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(),QGWMessageConst.LOGTYPE_AND_MAXDAYS_NOT_NULL)); - } - if ("ALL".equalsIgnoreCase(info.getType()) && list.size() != 1) { - throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(),QGWMessageConst.CANNOT_COEXIST_OTHER_TYPES)); - } - if (!(info.getMaxDays() >= 0 && info.getMaxDays() <= 2000)) { - throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(),QGWMessageConst.MAXDAYS_ERROR)); - } - } - return systemService.deleteStorage(list); - } - - @RequestMapping(value = "setting/storage/quota",method = RequestMethod.DELETE) - public BaseResult storageDeletion(@RequestParam String type){ - List deletionInfoList = Lists.newArrayList(); - StorageDeletionInfo deletionInfo = new StorageDeletionInfo(); - deletionInfo.setType(type); - deletionInfo.setMaxDays(0); - deletionInfoList.add(deletionInfo); - return systemService.deleteStorage(deletionInfoList); - } - - @RequestMapping("setting/storage/quota/daily_trend") - public BaseResult dailyTrendOfStorage(@RequestParam Map param) { - String searchStartTime = StringUtil.stripToEmpty((String) param.get("start_time")); - String searchEndTime = StringUtil.stripToEmpty((String) param.get("end_time")); - return systemService.dailyTrendOfStorage(searchStartTime, searchEndTime); - } - - - @PostMapping(value = "setting/tool/password_encryption") - public BaseResult getCiphertext(EncryptionInfo param) { - log.info("Plaintext encrypted,The plaintext argument is : {}", param); - if (StringUtil.isEmpty(param.getPassword()) || StringUtil.isEmpty(param.getSalt())) { - throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(),QGWMessageConst.PASSWORD_OR_SALT_CANNOT_BE_EMPTY)); - } - return systemService.getCiphertext(param); - } - @GetMapping(value = "/clear/realJobQueue") public BaseResult clearTaskQueue() { log.info("Clear real job queue."); diff --git a/src/main/java/com/mesalab/qgw/service/DatabaseService.java b/src/main/java/com/mesalab/qgw/service/DatabaseService.java index 4850c149..63de4cb2 100644 --- a/src/main/java/com/mesalab/qgw/service/DatabaseService.java +++ b/src/main/java/com/mesalab/qgw/service/DatabaseService.java @@ -4,6 +4,7 @@ package com.mesalab.qgw.service; import com.mesalab.common.entity.BaseResult; import com.mesalab.qgw.model.job.EncryptionInfo; import com.mesalab.qgw.model.job.StorageDeletionInfo; + import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -91,7 +92,6 @@ public interface DatabaseService { LinkedHashMap getSchemaByName(String name); - /** * Desc: update schema * @@ -103,12 +103,14 @@ public interface DatabaseService { /** * 系统存储配额 + * * @return */ BaseResult getStorageQuota(); /** * 获取每天日志存储变化 + * * @param searchStartTime * @param searchEndTime * @return @@ -117,8 +119,9 @@ public interface DatabaseService { /** * 数据配额设置: - * 1. 调度任务 - * 2. schema TTL设置 + * 1. 调度任务 + * 2. schema TTL设置 + * * @param list * @return */ @@ -126,11 +129,18 @@ public interface DatabaseService { /** * 数据配额设置状态 + * * @param logType * @return */ BaseResult getDeleteStorageStatus(String logType); + /** + * 获取日志储存配额利用率 + * + * @return + */ + BaseResult getStorageUsageStatus(); /** * 自定义查询ID: 由 Catalog(数据库类型): resultID+query 组成。 diff --git a/src/main/java/com/mesalab/qgw/service/SystemService.java b/src/main/java/com/mesalab/qgw/service/SystemService.java deleted file mode 100644 index 8cea8433..00000000 --- a/src/main/java/com/mesalab/qgw/service/SystemService.java +++ /dev/null @@ -1,64 +0,0 @@ -package com.mesalab.qgw.service; - -import com.mesalab.common.entity.BaseResult; -import com.mesalab.qgw.model.job.EncryptionInfo; -import com.mesalab.qgw.model.job.StorageDeletionInfo; - -import java.util.List; - -public interface SystemService { - - /** - * 系统存储配额 - * @return - */ - BaseResult getStorageQuota(); - - /** - * 获取每天日志存储变化 - * @param searchStartTime - * @param searchEndTime - * @return - */ - BaseResult dailyTrendOfStorage(String searchStartTime, String searchEndTime); - - /** - * 数据配额设置: - * 1. 调度任务 - * 2. schema TTL设置 - * @param list - * @return - */ - BaseResult deleteStorage(List list); - - /** - * 数据配额设置状态 - * @param logType - * @return - */ - BaseResult getDeleteStorageStatus(String logType); - - - /** - * 自定义查询ID: 由 Catalog(数据库类型): resultID+query 组成。 - * - * @param resultId - * @param query - * @return {@link String} - * @created by wWei - * @date 2021/1/7 6:48 下午 - */ - String getCustomQueryId(String resultId, String query); - - /** - * Desc: 通过queryId查询SQL任务执行状态 - * - * @param param - * @return {@link BaseResult} - * @created by wWei - * @date 2021/1/7 6:49 下午 - */ - BaseResult getCiphertext(EncryptionInfo param); - - -} diff --git a/src/main/java/com/mesalab/qgw/service/impl/DatabaseServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/DatabaseServiceImpl.java index e94b6f99..59686a95 100644 --- a/src/main/java/com/mesalab/qgw/service/impl/DatabaseServiceImpl.java +++ b/src/main/java/com/mesalab/qgw/service/impl/DatabaseServiceImpl.java @@ -11,7 +11,6 @@ import cn.hutool.log.LogFactory; import com.alibaba.fastjson2.JSON; import com.alibaba.fastjson2.JSONWriter; import com.alibaba.nacos.api.config.ConfigService; -import com.alibaba.nacos.api.config.annotation.NacosValue; import com.alibaba.nacos.api.config.listener.AbstractListener; import com.alibaba.nacos.api.exception.NacosException; import com.geedgenetworks.utils.DateUtils; @@ -19,7 +18,10 @@ import com.geedgenetworks.utils.Encodes; import com.google.common.base.Splitter; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import com.google.gson.Gson; import com.jayway.jsonpath.JsonPath; +import com.jfinal.plugin.activerecord.Db; +import com.jfinal.plugin.activerecord.Record; import com.mesalab.common.entity.BaseResult; import com.mesalab.common.entity.BaseResultGenerator; import com.mesalab.common.enums.*; @@ -57,6 +59,8 @@ import java.lang.reflect.Field; import java.time.Duration; import java.util.*; +import static com.mesalab.services.service.impl.SQLDatasetServiceImpl.QUERY_ID_SEPARATOR; + /** * @Date: 2021-03-11 15:48 * @Author : liuyongqiang @@ -97,7 +101,6 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { private final static String CODE = "code"; private final static String INDEX_KEY = "index_key"; private Environment env; - public final static String queryIdSeparator = ":"; private Map headers = Maps.newHashMap(); @@ -141,7 +144,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { public String getPartitionKey(String tableName) { String partitionKey = StringUtil.EMPTY; LinkedHashMap schemaMap = getSchemaByName(tableName); - if (schemaMap.containsKey("doc")){ + if (schemaMap.containsKey("doc")) { LinkedHashMap doc = (LinkedHashMap) schemaMap.get("doc"); Object partition_key = doc.get("partition_key"); partitionKey = StringUtil.isEmpty(partition_key) ? StringUtil.EMPTY : partition_key.toString(); @@ -163,7 +166,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { @Override public String getValueByKeyInSchemaDoc(String tableName, String key) { LinkedHashMap schemaMap = getSchemaByName(tableName); - if (schemaMap.containsKey("doc")){ + if (schemaMap.containsKey("doc")) { LinkedHashMap docMap = (LinkedHashMap) schemaMap.get("doc"); Object value = docMap.get(key); if (value instanceof Map) { @@ -231,7 +234,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), - tableName + ":"+QGWMessageConst.TABLE_NOT_EXIST)); + tableName + ":" + QGWMessageConst.TABLE_NOT_EXIST)); } LinkedHashMap map; if (!StrUtil.isBlankIfStr(map = (LinkedHashMap) schemaCache.get(tableName))) { @@ -242,7 +245,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { if (StrUtil.isBlankIfStr(content)) { throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(),tableName + ":"+QGWMessageConst.QUERY_SCHEMA_ERROR)); + String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), tableName + ":" + QGWMessageConst.QUERY_SCHEMA_ERROR)); } schemaCache.put(tableName, map = parseSchema(content)); systemConfigService.addListener(tableName.concat(NacosConst.JSON_SUFFIX), nacosConfig.getGroup(), new AbstractListener() { @@ -281,31 +284,38 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { @Override public BaseResult getStorageQuota() { - SQLQueryContext param = new SQLQueryContext(); - param.setOriginalSQL(env.getProperty("SYSTEM_STORAGE_QUOTA")); - BaseResult storageData = sqlSyncQueryService.executeQuery(param); + List storageData = Db.find(env.getProperty("SYSTEM_STORAGE_QUOTA")); List result = Lists.newArrayList(); - for (SystemServiceImpl.LogType value : SystemServiceImpl.LogType.values()) { - if (value.getValue().equals(SystemServiceImpl.LogType.ALL.getValue())) { + BaseResult storageUsageStatus = getStorageUsageStatus(); + List> usageData = Lists.newArrayList(); + if (storageUsageStatus.isSuccess()) { + usageData = (List>) storageUsageStatus.getData(); + } + for (LogType value : LogType.values()) { + if (value.getValue().equals(LogType.ALL.getValue())) { continue; } - Map resultItem = new HashMap(); - if (storageData.isSuccess()) { - List data = (List) storageData.getData(); - for (Map map : data) { - if (value.getValue().equals(map.get("type"))) { - resultItem.putAll(map); - break; - } + Map resultItem = Maps.newHashMap(); + storageData.forEach(record -> { + Map columns = record.getColumns(); + if (value.getValue().equals(columns.get("type"))) { + resultItem.putAll(columns); } - } + }); + BaseResult deleteStorageStatus = getDeleteStorageStatus(value.getValue()); if (deleteStorageStatus.isSuccess()) { - List data1 = (List) deleteStorageStatus.getData(); + List data = (List) deleteStorageStatus.getData(); resultItem.put("type", value.getLabel()); - resultItem.put("max_days", data1.get(0).getMaxDays()); - resultItem.put("default_max_days", data1.get(0).getDefaultMaxDays()); + resultItem.put("max_days", data.get(0).getMaxDays()); + resultItem.put("default_max_days", data.get(0).getDefaultMaxDays()); } + usageData.forEach(o -> { + Map map = JSON.parseObject(String.valueOf(o.get("executorParam")), Map.class); + if (value.getValue().equals(map.get("logType"))) { + resultItem.put("max_usage", map.get("maxUsage")); + } + }); result.add(resultItem); } @@ -322,13 +332,15 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { } if (StringUtil.isNotBlank(searchStartTime) && StringUtil.isNotBlank(searchEndTime)) { - SQLQueryContext param = new SQLQueryContext(); - String sql = String.format(env.getProperty("SYSTEM_DAILY_TREND_OF_STORAGE"), searchStartTime, searchEndTime); - param.setOriginalSQL(sql); - BaseResult result = sqlSyncQueryService.executeQuery(param); - List data = (List) result.getData(); - data.forEach(o -> o.put("type", LogType.getLabelByValue(String.valueOf(o.get("type"))))); - return result; + String sql = String.format(Objects.requireNonNull(env.getProperty("SYSTEM_DAILY_TREND_OF_STORAGE")), searchStartTime, searchEndTime); + List data = Db.find(sql); + List> result = Lists.newArrayList(); + data.forEach(record -> { + Map columns = record.getColumns(); + columns.put("type", LogType.getLabelByValue(String.valueOf(columns.get("type")))); + result.add(columns); + }); + return BaseResultGenerator.success(result); } else { throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.START_TIME_AND_END_TIME_NOT_NULL)); @@ -339,14 +351,14 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { public BaseResult deleteStorage(List list) { if (!logTypeValid(list)) { return BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(),"Match failed, please check log type!")); + String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "Match failed, please check log type!")); } preProcessOfLogType(list); if (jobIsBusy(list)) { - return BaseResultGenerator.failure(HttpStatusCodeEnum.LOCKED.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(),"The task is busy, please try later!")); + return BaseResultGenerator.failure(HttpStatusCodeEnum.LOCKED.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), + String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "The task is busy, please try later!")); } BaseResult baseResult = null; @@ -355,6 +367,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { } return baseResult; } + @Override public BaseResult getDeleteStorageStatus(String logType) { setCookie(); @@ -364,7 +377,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { StorageDeletionInfo trafficInfo = getStorageDeletionInfoByHandler(LogType.TRAFFIC_LOGS.getValue(), JobHandlerOption.DELETE_TRAFFIC_DATA_JOB_HANDLER.getValue()); list.add(trafficInfo); - StorageDeletionInfo reportInfo = getStorageDeletionInfoByHandler(LogType.REPORT_AND_METRICS.getValue(), JobHandlerOption.DELETE_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue()); + StorageDeletionInfo reportInfo = getStorageDeletionInfoByHandler(LogType.METRICS.getValue(), JobHandlerOption.DELETE_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue()); list.add(reportInfo); StorageDeletionInfo fileInfo = getStorageDeletionInfoByHandler(LogType.FILES.getValue(), JobHandlerOption.DELETE_FILES_JOB_HANDLER.getValue()); @@ -372,7 +385,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { } else if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(logType)) { StorageDeletionInfo trafficInfo = getStorageDeletionInfoByHandler(logType, JobHandlerOption.DELETE_TRAFFIC_DATA_JOB_HANDLER.getValue()); list.add(trafficInfo); - } else if (LogType.REPORT_AND_METRICS.getValue().equalsIgnoreCase(logType)) { + } else if (LogType.METRICS.getValue().equalsIgnoreCase(logType)) { StorageDeletionInfo reportInfo = getStorageDeletionInfoByHandler(logType, JobHandlerOption.DELETE_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue()); list.add(reportInfo); } else if (LogType.FILES.getValue().equalsIgnoreCase(logType)) { @@ -385,11 +398,18 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { return BaseResultGenerator.success("ok", list); } + @Override + public BaseResult getStorageUsageStatus() { + setCookie(); + List trafficDate = getAllDataByHandler(JobHandlerOption.DELETE_OLD_DATA_JOB_HANDLER.getValue()); + return BaseResultGenerator.success("ok", trafficDate); + } + @Override public String getCustomQueryId(String resultId, String query) { String tableName = SQLHelper.getTableName(query).get(0); String dbType = getDBEngineByTableName(tableName); - return DigestUtil.md5Hex(dbType) + queryIdSeparator + DigestUtil.md5Hex(resultId + query.trim()); + return DigestUtil.md5Hex(dbType) + QUERY_ID_SEPARATOR + DigestUtil.md5Hex(resultId + query.trim()); } private boolean updateSchema(String name, Map paramMap, String content) throws NacosException { @@ -413,12 +433,13 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { return systemConfigService.publishConfig(name.concat(".json"), nacosConfig.getGroup(), JSONUtil.formatJsonStr(content)); } else { throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),QGWMessageConst.QUERY_SCHEMA_ERROR)); + String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), QGWMessageConst.QUERY_SCHEMA_ERROR)); } } /** * Desc: 重写[tables |field].doc + * * @param key * @param original * @param param @@ -550,10 +571,10 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { } /** - * @param map 需操作对象 - * @param cfgName 当前文件名称 + * @param map 需操作对象 + * @param cfgName 当前文件名称 * @return void - * @Description $ref实际引用部分赋值 + * @Description $ref实际引用部分赋值 * @author wanghao * @date 2021/9/1 17:02 */ @@ -726,7 +747,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { } catch (JSONException | ClassCastException e) { log.error("update Schema error: {}", e); } - return schemaMap; + return schemaMap; } private boolean logTypeValid(List list) { @@ -746,19 +767,19 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { * @Created by wWei */ private List preProcessOfLogType(List list) { - if (list.size() == 1){ + if (list.size() == 1) { StorageDeletionInfo deletionInfo = list.get(0); Integer maxDays = deletionInfo.getMaxDays(); Integer defaultMaxDays = deletionInfo.getDefaultMaxDays(); - if (LogType.ALL.getValue().equalsIgnoreCase(list.get(0).getType())){ + if (LogType.ALL.getValue().equalsIgnoreCase(list.get(0).getType())) { list.clear(); list.add(new StorageDeletionInfo(LogType.TRAFFIC_LOGS.getValue(), maxDays, defaultMaxDays)); - list.add(new StorageDeletionInfo(LogType.REPORT_AND_METRICS.getValue(), maxDays, defaultMaxDays)); + list.add(new StorageDeletionInfo(LogType.METRICS.getValue(), maxDays, defaultMaxDays)); list.add(new StorageDeletionInfo(LogType.FILES.getValue(), maxDays, defaultMaxDays)); return list; - }else if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(list.get(0).getType()) | + } else if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(list.get(0).getType()) | LogType.FILES.getValue().equalsIgnoreCase(list.get(0).getType()) | - LogType.REPORT_AND_METRICS.getLabel().equalsIgnoreCase(list.get(0).getType())) { + LogType.METRICS.getLabel().equalsIgnoreCase(list.get(0).getType())) { list.clear(); list.add(new StorageDeletionInfo(LogType.getValueByLabel(deletionInfo.getType()), maxDays, defaultMaxDays)); return list; @@ -788,7 +809,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { String jobHandler = StringUtil.EMPTY; if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(logType)) { jobHandler = JobHandlerOption.DELETE_TRAFFIC_DATA_JOB_HANDLER.getValue(); - } else if (LogType.REPORT_AND_METRICS.getValue().equalsIgnoreCase(logType)) { + } else if (LogType.METRICS.getValue().equalsIgnoreCase(logType)) { jobHandler = JobHandlerOption.DELETE_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue(); } else if (LogType.FILES.getValue().equalsIgnoreCase(logType)) { jobHandler = JobHandlerOption.DELETE_FILES_JOB_HANDLER.getValue(); @@ -804,7 +825,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { return true; } else if (!result.getStatus().equals(HttpStatusCodeEnum.SUCCESS.getCode())) { throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),result.getMessage())); + String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), result.getMessage())); } return false; } @@ -863,7 +884,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { executeManageJob("update", mapToBean(dataByHandler, XxlJobInfo.class)); throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),"up schema error in storage set ", ex.getMessage())); + String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "up schema error in storage set ", ex.getMessage())); } } @@ -883,18 +904,41 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { Map maps = JSON.parseObject(resultPageList.get("result"), Map.class); if (StringUtil.isEmpty(maps)) { throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),"Get or package result exception: " + JSON.toJSONString(resultPageList))); + String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Get or package result exception: " + JSON.toJSONString(resultPageList))); } List data = (List) maps.get("data"); if (data.size() >= 1) { return data.get(0); } else { throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),"The scheduled task has no task information matching executorHandler (" + handlerValue + ")")); + String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "The scheduled task has no task information matching executorHandler (" + handlerValue + ")")); + } + } + throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), + String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Get scheduled task exception: " + JSON.toJSONString(resultPageList))); + } + + /** + * 通过Handler获取全量数据 + * + * @param handlerValue + * @return + */ + private List getAllDataByHandler(String handlerValue) { + StringBuilder url = new StringBuilder(jobAdminHttpSource.getUrl()). + append("/jobinfo/pageList?jobGroup=-1&triggerStatus=-1&executorHandler="). + append(handlerValue); + Map resultPageList = httpClientService.httpGet(url.toString(), headers, httpConfig.getServerResponseTimeOut()); + if (StringUtil.isNotEmpty(resultPageList) && resultPageList.get("status").equals(String.valueOf(HttpStatusCodeEnum.SUCCESS.getCode()))) { + Map maps = JSON.parseObject(resultPageList.get("result"), Map.class); + if (StringUtil.isEmpty(maps)) { + throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), + String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Get or package result exception: " + JSON.toJSONString(resultPageList))); } + return (List) maps.get("data"); } throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),"Get scheduled task exception: " + JSON.toJSONString(resultPageList))); + String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Get scheduled task exception: " + JSON.toJSONString(resultPageList))); } /** @@ -921,7 +965,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { BaseResult baseResult; if (StringUtil.isEmpty(resultMap)) { baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),"The service is busy, please contact the scheduling platform!")); + String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "The service is busy, please contact the scheduling platform!")); } else { if (resultMap.get("status").equals(String.valueOf(HttpStatusCodeEnum.SUCCESS.getCode()))) { Map result = JSON.parseObject(resultMap.get("result"), Map.class); @@ -946,7 +990,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { String jobHandler = StringUtil.EMPTY; if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(logType)) { jobHandler = JobHandlerOption.DELETE_ALL_TRAFFIC_DATA_JOB_HANDLER.getValue(); - } else if (LogType.REPORT_AND_METRICS.getValue().equalsIgnoreCase(logType)) { + } else if (LogType.METRICS.getValue().equalsIgnoreCase(logType)) { jobHandler = JobHandlerOption.DELETE_ALL_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue(); } else if (LogType.FILES.getValue().equalsIgnoreCase(logType)) { jobHandler = JobHandlerOption.DELETE_ALL_FILES_JOB_HANDLER.getValue(); @@ -960,7 +1004,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { * @return */ public void setCookie() { - StringBuilder url = new StringBuilder(jobAdminHttpSource.getUrl()); + String url = jobAdminHttpSource.getUrl(); headers.put("Content-Type", "application/json"); String urlParamsByMap = getUrlParamsByMap(getObjectToMap(jobAdminHttpSource)); int socketTimeOut = httpConfig.getServerResponseTimeOut(); @@ -973,9 +1017,9 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { if (map == null) { return ""; } - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (Map.Entry entry : map.entrySet()) { - sb.append(entry.getKey() + "=" + Encodes.urlEncode(String.valueOf(entry.getValue()))); + sb.append(entry.getKey()).append("=").append(Encodes.urlEncode(String.valueOf(entry.getValue()))); sb.append("&"); } String s = sb.toString(); @@ -1041,7 +1085,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { } if (!baseResult.getStatus().equals(HttpStatusCodeEnum.SUCCESS.getCode())) { throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),QGWMessageConst.SCHEDULED_TASK_ERROR)); + String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), QGWMessageConst.SCHEDULED_TASK_ERROR)); } return baseResult; } @@ -1061,11 +1105,11 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { baseResult = BaseResultGenerator.success("ok", null); } else { baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),"Update task failed:" + xxlJobInfo.getExecutorHandler())); + String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Update task failed:" + xxlJobInfo.getExecutorHandler())); } } else { baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),"Failed to execute task:" + xxlJobInfo.getExecutorHandler())); + String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Failed to execute task:" + xxlJobInfo.getExecutorHandler())); } return baseResult; } @@ -1126,7 +1170,7 @@ public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware { @AllArgsConstructor enum LogType { TRAFFIC_LOGS("Traffic Logs", "Traffic Logs"), - REPORT_AND_METRICS("Report and Metrics", "Metrics"), + METRICS("Metrics", "Metrics"), FILES("Files", "Files"), ALL("All", "All"); private final String value; diff --git a/src/main/java/com/mesalab/qgw/service/impl/SQLSyncQueryServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/SQLSyncQueryServiceImpl.java index 22bcbbe6..0ba15064 100644 --- a/src/main/java/com/mesalab/qgw/service/impl/SQLSyncQueryServiceImpl.java +++ b/src/main/java/com/mesalab/qgw/service/impl/SQLSyncQueryServiceImpl.java @@ -27,7 +27,6 @@ import com.mesalab.qgw.model.basic.udf.UDF; import com.mesalab.qgw.model.basic.udf.UDFElements; import com.mesalab.qgw.service.SQLSyncQueryService; import com.mesalab.qgw.service.DatabaseService; -import com.mesalab.qgw.service.SystemService; import com.geedgenetworks.utils.StringUtil; import lombok.extern.slf4j.Slf4j; import net.sf.jsqlparser.JSQLParserException; @@ -54,13 +53,6 @@ import java.util.regex.Pattern; @Slf4j public class SQLSyncQueryServiceImpl implements SQLSyncQueryService { private DatabaseService databaseService; - private SystemService systemService; - - @Autowired - public void SQLSyncQueryServiceImpl(DatabaseService databaseService, SystemService systemService) { - this.databaseService = databaseService; - this.systemService = systemService; - } private final int MAX_PARSER_LEVEL = 5; private final String PARSER_EXCEPTION_CLASS_NAME = "net.sf.jsqlparser.parser.ParseException:"; @@ -173,7 +165,7 @@ public class SQLSyncQueryServiceImpl implements SQLSyncQueryService { } else if (QueryOption.SYNTAX_VALIDATION.getValue().equalsIgnoreCase(queryContext.getOption())) { return executeSyntaxValidation(queryContext); } else if (QueryOption.LONG_TERM.getValue().equalsIgnoreCase(queryContext.getOption())) { - queryContext.setQueryId(systemService.getCustomQueryId(queryContext.getResultId(), queryContext.getOriginalSQL())); + queryContext.setQueryId(databaseService.getCustomQueryId(queryContext.getResultId(), queryContext.getOriginalSQL())); return executeLongTermQuery(queryContext); } else { return executeRealTimeQuery(queryContext); @@ -666,4 +658,9 @@ public class SQLSyncQueryServiceImpl implements SQLSyncQueryService { return baseResult; } + @Autowired + public void setDatabaseService(DatabaseService databaseService) { + this.databaseService = databaseService; + } + } diff --git a/src/main/java/com/mesalab/qgw/service/impl/SystemServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/SystemServiceImpl.java deleted file mode 100644 index 74c97579..00000000 --- a/src/main/java/com/mesalab/qgw/service/impl/SystemServiceImpl.java +++ /dev/null @@ -1,640 +0,0 @@ -package com.mesalab.qgw.service.impl; - -import cn.hutool.core.util.StrUtil; -import cn.hutool.crypto.digest.DigestUtil; -import cn.hutool.log.Log; -import cn.hutool.log.LogFactory; -import com.alibaba.fastjson2.JSON; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.jayway.jsonpath.JsonPath; -import com.mesalab.common.entity.BaseResult; -import com.mesalab.common.entity.BaseResultGenerator; -import com.mesalab.common.enums.*; -import com.mesalab.common.utils.sqlparser.SQLHelper; -import com.mesalab.qgw.constant.QGWMessageConst; -import com.mesalab.qgw.exception.QGWBusinessException; -import com.mesalab.common.exception.CommonErrorCode; -import com.mesalab.qgw.model.basic.ClickHouseHttpSource; -import com.mesalab.qgw.model.basic.HttpConfig; -import com.mesalab.qgw.model.basic.SQLQueryContext; -import com.mesalab.qgw.model.basic.JobAdminHttpSource; -import com.mesalab.qgw.model.job.EncryptionInfo; -import com.mesalab.qgw.model.job.ExecutorParam; -import com.mesalab.qgw.model.job.StorageDeletionInfo; -import com.mesalab.qgw.model.job.XxlJobInfo; -import com.mesalab.qgw.service.SQLSyncQueryService; -import com.mesalab.qgw.service.DatabaseService; -import com.mesalab.qgw.service.SystemService; -import com.mesalab.services.common.property.SqlPropertySourceFactory; -import com.geedgenetworks.utils.DateUtils; -import com.geedgenetworks.utils.Encodes; -import com.geedgenetworks.utils.StringUtil; -import lombok.AllArgsConstructor; -import lombok.Getter; -import org.jasypt.util.text.BasicTextEncryptor; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.cglib.beans.BeanMap; -import org.springframework.context.EnvironmentAware; -import org.springframework.context.annotation.PropertySource; -import org.springframework.core.env.Environment; -import org.springframework.stereotype.Service; - -import javax.annotation.Resource; -import java.lang.reflect.AccessibleObject; -import java.lang.reflect.Field; -import java.time.Duration; -import java.util.*; -import java.util.regex.Pattern; - -@Service("systemService") -@PropertySource(value = "classpath:http-sql-template.sql", factory = SqlPropertySourceFactory.class) -public class SystemServiceImpl implements SystemService, EnvironmentAware { - private static final Log log = LogFactory.get(); - private static Pattern pTTL = Pattern.compile(".*toIntervalSecond\\((\\d+)\\)", Pattern.CASE_INSENSITIVE); - private Environment env; - @Autowired - private SQLSyncQueryService sqlSyncQueryService; - @Autowired - private JobAdminHttpSource jobAdminHttpSource; - @Autowired - private HttpClientService httpClientService; - @Resource(name = "databaseService") - private DatabaseService databaseService; - @Autowired - HttpConfig httpConfig; - @Autowired - ClickHouseHttpSource clickHouseHttpSource; - - public final static String queryIdSeparator = ":"; - - private Map headers = Maps.newHashMap(); - - @Override - public BaseResult getStorageQuota() { - - //封装sql进行查询:Analytic Logs、Files、Traffic Logs - SQLQueryContext param = new SQLQueryContext(); - param.setOriginalSQL(env.getProperty("SYSTEM_STORAGE_QUOTA")); - BaseResult result = sqlSyncQueryService.executeQuery(param); - return resetBaseResult(result, "type"); - } - - @Override - public BaseResult dailyTrendOfStorage(String searchStartTime, String searchEndTime) { - - Date currentDate = DateUtils.convertStringToDate(DateUtils.getCurrentDate(), DateUtils.YYYY_MM_DD); - - if (StringUtil.isBlank(searchStartTime) && StringUtil.isBlank(searchEndTime)) { - searchStartTime = DateUtils.getFormatDate(DateUtils.getSomeDate(currentDate, -7), DateUtils.YYYY_MM_DD_HH24_MM_SS); - searchEndTime = DateUtils.getFormatDate(currentDate, DateUtils.YYYY_MM_DD_HH24_MM_SS); - } - - if (StringUtil.isNotBlank(searchStartTime) && StringUtil.isNotBlank(searchEndTime)) { - SQLQueryContext param = new SQLQueryContext(); - String sql = String.format(env.getProperty("SYSTEM_DAILY_TREND_OF_STORAGE"), searchStartTime, searchEndTime); - param.setOriginalSQL(sql); - BaseResult result = sqlSyncQueryService.executeQuery(param); - List data = (List) result.getData(); - data.forEach(o -> o.put("type", LogType.getLabelByValue(String.valueOf(o.get("type"))))); - return result; - } else { - throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.START_TIME_AND_END_TIME_NOT_NULL)); - } - } - - private BaseResult resetBaseResult(BaseResult result, String alias) { - if (!result.isSuccess()) { - return result; - } - List data = (List) result.getData(); - for (Map map : data){ - String logType = LogType.getLabelByValue(String.valueOf(map.get(alias))); - BaseResult deleteStorageStatus = getDeleteStorageStatus(String.valueOf(map.get(alias))); - List data1 = (List) deleteStorageStatus.getData(); - map.put(alias, logType); - map.put("max_days",data1.get(0).getMaxDays()); - map.put("default_max_days",data1.get(0).getDefaultMaxDays()); - } - return result; - } - - @Override - public BaseResult deleteStorage(List list) { - - if (!logTypeValid(list)) { - return BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(),"Match failed, please check log type!")); - } - - preProcessOfLogType(list); - - if (jobIsBusy(list)) { - return BaseResultGenerator.failure(HttpStatusCodeEnum.LOCKED.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(),"The task is busy, please try later!")); - } - - BaseResult baseResult = null; - for (StorageDeletionInfo info : list) { - baseResult = executeDeleteStorageJob(info); - } - return baseResult; - } - - @Override - public BaseResult getDeleteStorageStatus(String logType) { - setCookie(); - List list = new ArrayList<>(); - if (StringUtil.isBlank(logType)) { - - StorageDeletionInfo trafficInfo = getStorageDeletionInfoByHandler(LogType.TRAFFIC_LOGS.getValue(), JobHandlerOption.DELETE_TRAFFIC_DATA_JOB_HANDLER.getValue()); - list.add(trafficInfo); - - StorageDeletionInfo reportInfo = getStorageDeletionInfoByHandler(LogType.REPORT_AND_METRICS.getValue(), JobHandlerOption.DELETE_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue()); - list.add(reportInfo); - - StorageDeletionInfo fileInfo = getStorageDeletionInfoByHandler(LogType.FILES.getValue(), JobHandlerOption.DELETE_FILES_JOB_HANDLER.getValue()); - list.add(fileInfo); - } else if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(logType)) { - StorageDeletionInfo trafficInfo = getStorageDeletionInfoByHandler(logType, JobHandlerOption.DELETE_TRAFFIC_DATA_JOB_HANDLER.getValue()); - list.add(trafficInfo); - } else if (LogType.REPORT_AND_METRICS.getValue().equalsIgnoreCase(logType)) { - StorageDeletionInfo reportInfo = getStorageDeletionInfoByHandler(logType, JobHandlerOption.DELETE_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue()); - list.add(reportInfo); - } else if (LogType.FILES.getValue().equalsIgnoreCase(logType)) { - StorageDeletionInfo reportInfo = getStorageDeletionInfoByHandler(logType, JobHandlerOption.DELETE_FILES_JOB_HANDLER.getValue()); - list.add(reportInfo); - } else { - return BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), "No matching log type: " + logType); - } - list.forEach(o -> o.setType(LogType.getLabelByValue(o.getType()))); - return BaseResultGenerator.success("ok", list); - } - - - @Override - public String getCustomQueryId(String resultId, String query) { - String tableName = SQLHelper.getTableName(query).get(0); - String dbType = databaseService.getDBEngineByTableName(tableName); - return DigestUtil.md5Hex(dbType) + queryIdSeparator + DigestUtil.md5Hex(resultId + query.trim()); - } - - - @Override - public BaseResult getCiphertext(EncryptionInfo param) { - BaseResult baseResult; - BasicTextEncryptor textEncryptor = new BasicTextEncryptor(); - textEncryptor.setPassword(param.getSalt()); - String encrypt = textEncryptor.encrypt(param.getPassword()); - Map dataMap = new HashMap(); - dataMap.put("password", param.getPassword()); - dataMap.put("encrypted_password", encrypt); - baseResult = BaseResultGenerator.success("ok", Lists.newArrayList(dataMap)); - return baseResult; - } - - - - - private boolean logTypeValid(List list) { - for (StorageDeletionInfo info : list) { - String value = LogType.getValueByLabel(info.getType()); - if (StringUtil.isEmpty(value)) { - return false; - } - } - return true; - } - - /** - * @Description 涉及对ALL、LogType(value,label)处理 - * @Param list: - * @return: java.util.List - * @Date: 2021/1/4 11:36 上午 - * @Created by wWei - */ - private List preProcessOfLogType(List list) { - if (list.size() == 1){ - StorageDeletionInfo deletionInfo = list.get(0); - Integer maxDays = deletionInfo.getMaxDays(); - Integer defaultMaxDays = deletionInfo.getDefaultMaxDays(); - if (LogType.ALL.getValue().equalsIgnoreCase(list.get(0).getType())){ - list.clear(); - list.add(new StorageDeletionInfo(LogType.TRAFFIC_LOGS.getValue(), maxDays, defaultMaxDays)); - list.add(new StorageDeletionInfo(LogType.REPORT_AND_METRICS.getValue(), maxDays, defaultMaxDays)); - list.add(new StorageDeletionInfo(LogType.FILES.getValue(), maxDays, defaultMaxDays)); - return list; - }else if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(list.get(0).getType()) | - LogType.FILES.getValue().equalsIgnoreCase(list.get(0).getType()) | - LogType.REPORT_AND_METRICS.getLabel().equalsIgnoreCase(list.get(0).getType())) { - list.clear(); - list.add(new StorageDeletionInfo(LogType.getValueByLabel(deletionInfo.getType()), maxDays, defaultMaxDays)); - return list; - } - } - list.forEach(o -> o.setType(LogType.getValueByLabel(o.getType()))); - return list; - } - - private boolean jobIsBusy(List list) { - setCookie(); - for (StorageDeletionInfo info : list) { - - String handler = getDeletePartHandlerByLogType(info.getType()); - if (jobIsBusyByHandler(handler)) { - return true; - } - handler = getDeleteAllHandlerByLogType(info.getType()); - if (jobIsBusyByHandler(handler)) { - return true; - } - } - return false; - } - - private boolean jobIsBusyByHandler(String handler) { - Map dataByHandler = getDataByHandler(handler); - String id = String.valueOf(dataByHandler.get("id")); - BaseResult result = queryJobStatusByJobId(Integer.parseInt(id)); - if (result.getStatus().equals(HttpStatusCodeEnum.LOCKED.getCode())) { - return true; - } else if (!result.getStatus().equals(HttpStatusCodeEnum.SUCCESS.getCode())) { - throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),result.getMessage())); - } - return false; - } - - - /** - * 执行日志删除任务: 调用调度任务任务 - * - * @param info - * @return - */ - private BaseResult executeDeleteStorageJob(StorageDeletionInfo info) { - - BaseResult baseResult; - String jobHandler; - String logType = info.getType(); - ExecutorParam executorParam = new ExecutorParam(); - if (0 == info.getMaxDays()) { - jobHandler = getDeleteAllHandlerByLogType(logType); - Map dataByHandler = getDataByHandler(jobHandler); - - StorageDeletionInfo storageDeletionInfoByHandler = getStorageDeletionInfoByHandler(logType, getDeletePartHandlerByLogType(logType)); - executorParam.setMaxDays(storageDeletionInfoByHandler.getMaxDays()); - XxlJobInfo xxlJobInfo = setXxlJobInfoParam(dataByHandler, executorParam); - baseResult = executeTriggerAndUpdate(xxlJobInfo); - } else { - jobHandler = getDeletePartHandlerByLogType(info.getType()); - executorParam.setMaxDays(info.getMaxDays()); - Map oldData = getDataByHandler(jobHandler); - XxlJobInfo newData = setXxlJobInfoParam(oldData, executorParam); - baseResult = executeManageJob("update", newData); - if (baseResult.isSuccess() && LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(info.getType())) { - updateSchema(info, oldData); - } - if (baseResult.isSuccess()) { - baseResult = executeManageJob("trigger", newData); - } - } - if (!baseResult.getStatus().equals(HttpStatusCodeEnum.SUCCESS.getCode())) { - throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),QGWMessageConst.SCHEDULED_TASK_ERROR)); - } - return baseResult; - } - - private void updateSchema(StorageDeletionInfo info, Map dataByHandler) { - Map previous = Maps.newHashMap(); - try { - Map schemaInfo = databaseService.getSchemaInfo(MetadataType.TABLES.getValue(), clickHouseHttpSource.getDbName(), false); - Object tables = schemaInfo.get("symbols"); - if (StringUtil.isEmpty(tables)) { - return; - } - long ttl = Duration.ofDays(info.getMaxDays()).getSeconds(); - List list = (List) tables; - for (String tableName : list) { - Map schemaMap = databaseService.getSchemaInfo(MetadataType.FIELDS.getValue(), tableName, false); - List schemaDocTTL = JsonPath.read(schemaMap, "$.[?(@.doc.ttl != null)].doc.ttl"); - List fieldDocTTL = JsonPath.read(schemaMap, "$.fields[?(@.doc.ttl != null)].doc.ttl"); - if (schemaDocTTL.isEmpty() && fieldDocTTL.isEmpty()) { - continue; - } - previous.put(tableName, databaseService.getSchemaInfo(MetadataType.FIELDS.getValue(), tableName, false)); - Object schemaDoc = schemaMap.get("doc"); - Map map = Maps.newHashMap(); - if (StringUtil.isNotEmpty(schemaDoc)) { - map = (Map) schemaDoc; - } - if (StringUtil.isNotEmpty(map.get("ttl")) && ttl < Long.parseLong(map.get("ttl").toString())) { - map.put("ttl", ttl); - schemaMap.put("doc", map); - } - List> fields = JsonPath.read(schemaMap, "$.fields"); - List indexKey = databaseService.getIndexKey(tableName); - for (Map field : fields) { - Object doc = field.get("doc"); - if (StringUtil.isEmpty(doc)) { - doc = Maps.newHashMap(); - } - Map fieldDoc = (Map) doc; - if (indexKey.contains(field.get("name").toString())) { - fieldDoc.put("ttl", null); - field.put("doc", fieldDoc); - continue; - } - if (StringUtil.isNotEmpty(fieldDoc.get("ttl")) && ttl < Long.parseLong(fieldDoc.get("ttl").toString())) { - fieldDoc.put("ttl", ttl); - field.put("doc", fieldDoc); - } - } - databaseService.updateSchema(tableName, schemaMap); - } - } catch (RuntimeException ex) { - for (String tableName : previous.keySet()) { - databaseService.updateSchema(tableName, previous.get(tableName)); - } - executeManageJob("update", mapToBean(dataByHandler, XxlJobInfo.class)); - throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), - CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),"up schema error in storage set ", ex.getMessage())); - } - } - - private String getDeleteAllHandlerByLogType(String logType) { - String jobHandler = StringUtil.EMPTY; - if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(logType)) { - jobHandler = JobHandlerOption.DELETE_ALL_TRAFFIC_DATA_JOB_HANDLER.getValue(); - } else if (LogType.REPORT_AND_METRICS.getValue().equalsIgnoreCase(logType)) { - jobHandler = JobHandlerOption.DELETE_ALL_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue(); - } else if (LogType.FILES.getValue().equalsIgnoreCase(logType)) { - jobHandler = JobHandlerOption.DELETE_ALL_FILES_JOB_HANDLER.getValue(); - } - return jobHandler; - } - - private String getDeletePartHandlerByLogType(String logType) { - String jobHandler = StringUtil.EMPTY; - if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(logType)) { - jobHandler = JobHandlerOption.DELETE_TRAFFIC_DATA_JOB_HANDLER.getValue(); - } else if (LogType.REPORT_AND_METRICS.getValue().equalsIgnoreCase(logType)) { - jobHandler = JobHandlerOption.DELETE_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue(); - } else if (LogType.FILES.getValue().equalsIgnoreCase(logType)) { - jobHandler = JobHandlerOption.DELETE_FILES_JOB_HANDLER.getValue(); - } - return jobHandler; - } - - /** - * 通过handler获取数据配额设置状态 - * - * @param logType - * @param jobHandlerValue - * @return - */ - private StorageDeletionInfo getStorageDeletionInfoByHandler(String logType, String jobHandlerValue) { - Map trafficDate = getDataByHandler(jobHandlerValue); - XxlJobInfo xxlJobInfo = mapToBean(trafficDate, XxlJobInfo.class); - StorageDeletionInfo executorParam = JSON.parseObject(StrUtil.toUnderlineCase(xxlJobInfo.getExecutorParam()), StorageDeletionInfo.class); - executorParam.setType(logType); - return executorParam; - } - - /** - * 触发执行器并更新任务 - * - * @param xxlJobInfo - * @return - */ - private BaseResult executeTriggerAndUpdate(XxlJobInfo xxlJobInfo) { - BaseResult baseResult; - BaseResult resultExecute = executeManageJob("trigger", xxlJobInfo); - if (resultExecute.getStatus().equals(HttpStatusCodeEnum.SUCCESS.getCode())) { - BaseResult resultUpdate = executeManageJob("update", xxlJobInfo); - if (resultUpdate.getStatus().equals(HttpStatusCodeEnum.SUCCESS.getCode())) { - baseResult = BaseResultGenerator.success("ok", null); - } else { - baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),"Update task failed:" + xxlJobInfo.getExecutorHandler())); - } - } else { - baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),"Failed to execute task:" + xxlJobInfo.getExecutorHandler())); - } - return baseResult; - } - - /** - * 查询任务状态 - * - * @param jobId - * @return - */ - private BaseResult queryJobStatusByJobId(int jobId) { - BaseResult baseResult = null; - StringBuilder url = new StringBuilder(jobAdminHttpSource.getUrl()).append("/jobinfo/jobBeat?jobId=").append(jobId); - Map resultMap = httpClientService.httpGet(url.toString(), headers, httpConfig.getServerResponseTimeOut()); - baseResult = resultEncapsulationOfJob(resultMap); - return baseResult; - } - - /** - * 执行调度任务结果封装 - * - * @param resultMap - * @return - */ - private BaseResult resultEncapsulationOfJob(Map resultMap) { - BaseResult baseResult; - if (StringUtil.isEmpty(resultMap)) { - baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),"The service is busy, please contact the scheduling platform!")); - } else { - if (resultMap.get("status").equals(String.valueOf(HttpStatusCodeEnum.SUCCESS.getCode()))) { - Map result = JSON.parseObject(resultMap.get("result"), Map.class); - if (result.get("code").equals(HttpStatusCodeEnum.SUCCESS.getCode())) { - baseResult = BaseResultGenerator.success("ok", null); - } else if (result.get("code").equals(HttpStatusCodeEnum.LOCKED.getCode())) { - baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.LOCKED.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Clear Task is Running.")); - } else { - baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), result.get("msg"))); - } - } else { - baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Clear Task Failed.")); - } - } - return baseResult; - } - - - /** - * 执行调度平台任务更新 - * - * @param executeType - * @param xxlJobInfo - * @return - */ - private BaseResult executeManageJob(String executeType, XxlJobInfo xxlJobInfo) { - BaseResult baseResult = null; - String params = getUrlParamsByMap(getObjectToMap(xxlJobInfo)); - String url = String.format("%s/jobinfo/%s/?%s", jobAdminHttpSource.getUrl(), executeType, params); - Map resultMap = httpClientService.httpGet(url, headers, httpConfig.getServerResponseTimeOut()); - log.warn("请求调度任务" + executeType + "接口" + url); - baseResult = resultEncapsulationOfJob(resultMap); - return baseResult; - } - - - /** - * 获取调度平台Cookie - * - * @return - */ - public void setCookie() { - StringBuilder url = new StringBuilder(jobAdminHttpSource.getUrl()); - headers.put("Content-Type", "application/json"); - String urlParamsByMap = getUrlParamsByMap(getObjectToMap(jobAdminHttpSource)); - int socketTimeOut = httpConfig.getServerResponseTimeOut(); - Map httpPostResponseHeads = httpClientService.getHttpPostResponseHeads(url + "/login?" + urlParamsByMap, headers, socketTimeOut); - String cookie = String.valueOf(httpPostResponseHeads.get("SET-COOKIE")); - headers.put("Cookie", cookie); - } - - /** - * 通过handler Value获取jobInfo数据 - * - * @param handlerValue - * @return - */ - private Map getDataByHandler(String handlerValue) { - StringBuilder url = new StringBuilder(jobAdminHttpSource.getUrl()). - append("/jobinfo/pageList?jobGroup=-1&triggerStatus=-1&executorHandler="). - append(handlerValue); - Map resultPageList = httpClientService.httpGet(url.toString(), headers, httpConfig.getServerResponseTimeOut()); - if (StringUtil.isNotEmpty(resultPageList) && resultPageList.get("status").equals(String.valueOf(HttpStatusCodeEnum.SUCCESS.getCode()))) { - Map maps = JSON.parseObject(resultPageList.get("result"), Map.class); - if (StringUtil.isEmpty(maps)) { - throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),"Get or package result exception: " + JSON.toJSONString(resultPageList))); - } - List data = (List) maps.get("data"); - if (data.size() >= 1) { - return data.get(0); - } else { - throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),"The scheduled task has no task information matching executorHandler (" + handlerValue + ")")); - } - } - throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), - String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(),"Get scheduled task exception: " + JSON.toJSONString(resultPageList))); - } - - /** - * 更新jobInfo - * - * @param data - * @param executorParam - * @return - */ - private XxlJobInfo setXxlJobInfoParam(Map data, ExecutorParam executorParam) { - XxlJobInfo xxlJobInfo = mapToBean(data, XxlJobInfo.class); - ExecutorParam executor = JSON.parseObject(xxlJobInfo.getExecutorParam(), ExecutorParam.class); - executorParam.setDefaultMaxDays(executor.getDefaultMaxDays()); - xxlJobInfo.setExecutorParam(JSON.toJSONString(executorParam)); - return xxlJobInfo; - } - - public static String getUrlParamsByMap(Map map) { - if (map == null) { - return ""; - } - StringBuffer sb = new StringBuffer(); - for (Map.Entry entry : map.entrySet()) { - sb.append(entry.getKey() + "=" + Encodes.urlEncode(String.valueOf(entry.getValue()))); - sb.append("&"); - } - String s = sb.toString(); - if (s.endsWith("&")) { - s = StringUtil.substringBeforeLast(s, "&"); - } - return s; - } - - public static T mapToBean(Map map, Class clazz) { - T bean = null; - try { - bean = clazz.newInstance(); - } catch (InstantiationException e) { - log.error("Instantiation Exception: ", e); - } catch (IllegalAccessException e) { - log.error("Illegal Access Exception: ", e); - } - BeanMap beanMap = BeanMap.create(bean); - beanMap.putAll(map); - return bean; - } - - public static Map getObjectToMap(Object obj) { - Map map = new LinkedHashMap(); - Class clazz = obj.getClass(); - Field[] declaredFields = clazz.getDeclaredFields(); - AccessibleObject.setAccessible(declaredFields, true); - for (Field field : declaredFields) { - String fieldName = field.getName(); - Object value = null; - try { - value = field.get(obj); - } catch (IllegalAccessException e) { - log.error("Illegal Access Exception: ", e); - } - if (value == null) { - value = ""; - } - map.put(fieldName, value); - } - return map; - } - - @Override - public void setEnvironment(Environment environment) { - this.env = environment; - } - - - @Getter - @AllArgsConstructor - enum LogType { - TRAFFIC_LOGS("Traffic Logs", "Traffic Logs"), - REPORT_AND_METRICS("Report and Metrics", "Metrics"), - FILES("Files", "Files"), - ALL("All", "All"); - private final String value; - private final String label; - - - public static String getValueByLabel(String label) { - for (LogType enums : LogType.values()) { - if (enums.getLabel().equals(label)) { - return enums.getValue(); - } - } - return ""; - } - - public static String getLabelByValue(String value) { - for (LogType enums : LogType.values()) { - if (enums.getValue().equals(value)) { - return enums.getLabel(); - } - } - return ""; - } - } -} diff --git a/src/main/java/com/mesalab/qgw/service/impl/TroubleshootingServiceImp.java b/src/main/java/com/mesalab/qgw/service/impl/TroubleshootingServiceImp.java index 460be06e..3b74a3af 100644 --- a/src/main/java/com/mesalab/qgw/service/impl/TroubleshootingServiceImp.java +++ b/src/main/java/com/mesalab/qgw/service/impl/TroubleshootingServiceImp.java @@ -189,7 +189,7 @@ public class TroubleshootingServiceImp implements TroubleshootingService, Enviro Map logDiff = Maps.newHashMap(); Map tables = databaseService.getSchemaInfo(MetadataType.TABLES.getValue(), clickHouseHttpSource.getDbName(), false); List symbols = (List) tables.get("symbols"); - logDiff.put("logType", SystemServiceImpl.LogType.TRAFFIC_LOGS.getValue()); + logDiff.put("logType", DatabaseServiceImpl.LogType.TRAFFIC_LOGS.getValue()); Map> changeTTL = Maps.newHashMap(); List ipPorts = getClusterAddressOfCK(); try { diff --git a/src/main/java/com/mesalab/services/controller/SQLDatasetController.java b/src/main/java/com/mesalab/services/controller/SQLDatasetController.java index eb3b9502..a30c109d 100644 --- a/src/main/java/com/mesalab/services/controller/SQLDatasetController.java +++ b/src/main/java/com/mesalab/services/controller/SQLDatasetController.java @@ -1,5 +1,6 @@ package com.mesalab.services.controller; +import cn.hutool.core.exceptions.ExceptionUtil; import cn.hutool.log.Log; import cn.hutool.log.LogFactory; import com.geedgenetworks.utils.StringUtil; @@ -7,12 +8,18 @@ import com.mesalab.common.entity.BaseResult; import com.mesalab.common.entity.BaseResultGenerator; import com.mesalab.common.enums.HttpStatusCodeEnum; +import com.mesalab.common.utils.sqlparser.AutoPeriodHelper; import com.mesalab.qgw.constant.QGWMessageConst; import com.mesalab.qgw.exception.QGWBusinessException; import com.mesalab.common.exception.CommonErrorCode; import com.mesalab.qgw.model.basic.AuditLog; -import com.mesalab.services.service.SQLDatasetService; +import com.mesalab.qgw.service.DatabaseService; +import com.mesalab.services.service.SQLDatasetService; +import net.sf.jsqlparser.JSQLParserException; +import net.sf.jsqlparser.parser.CCJSqlParserUtil; +import net.sf.jsqlparser.parser.JJTCCJSqlParserState; +import net.sf.jsqlparser.statement.Statement; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; @@ -27,11 +34,10 @@ import java.util.*; */ @RestController @RequestMapping(value = "/v1") -public class SQLDatasetController -{ +public class SQLDatasetController { private static final Log log = LogFactory.get(); - @Autowired - SQLDatasetService sqlDatasetService; + private DatabaseService databaseService; + private SQLDatasetService sqlDatasetService; @DeleteMapping(value = "sql/query/{query_id}") @AuditLog("DatabaseController.killQuery") @@ -52,7 +58,15 @@ public class SQLDatasetController throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.QUERY_CANNOT_BE_EMPTY)); } - String queryId = sqlDatasetService.getCustomQueryId(resultId == null ? null : resultId.toString(), query.toString()); + Statement statement; + try { + statement = CCJSqlParserUtil.parse(query); + } catch (JSQLParserException e) { + log.error("Error Parsing SQL: {}", ExceptionUtil.getRootCauseMessage(e)); + throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(), + String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage())); + } + String queryId = databaseService.getCustomQueryId(resultId == null ? null : resultId.toString(), AutoPeriodHelper.buildSqlGranularity(statement).toString()); List> result = new ArrayList<>(); Map map = new HashMap<>(16); map.put("queryId", queryId); @@ -66,4 +80,16 @@ public class SQLDatasetController log.info("get query status, param: queryId is {}", queryId); return sqlDatasetService.getProcessesByQueryId(queryId); } + + + @Autowired + public void setDatabaseService(DatabaseService databaseService) { + this.databaseService = databaseService; + } + + @Autowired + public void setSQLDatasetService(SQLDatasetService sqlDatasetService) { + this.sqlDatasetService = sqlDatasetService; + } + } \ No newline at end of file diff --git a/src/main/java/com/mesalab/services/service/SQLDatasetService.java b/src/main/java/com/mesalab/services/service/SQLDatasetService.java index ab781a81..7878c99c 100644 --- a/src/main/java/com/mesalab/services/service/SQLDatasetService.java +++ b/src/main/java/com/mesalab/services/service/SQLDatasetService.java @@ -1,12 +1,7 @@ package com.mesalab.services.service; -import com.google.common.collect.Lists; import com.mesalab.common.entity.BaseResult; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - /** * SQL数据集市管理服务 * @@ -34,14 +29,4 @@ public interface SQLDatasetService { */ BaseResult deleteQueryTask(String queryId); - /** - * 自定义查询ID: 由 Catalog(数据库类型): resultID+query 组成。 - * - * @param resultId - * @param query - * @return {@link String} - * @created by wWei - * @date 2021/1/7 6:48 下午 - */ - String getCustomQueryId(String resultId, String query); } diff --git a/src/main/java/com/mesalab/services/service/impl/SQLDatasetServiceImpl.java b/src/main/java/com/mesalab/services/service/impl/SQLDatasetServiceImpl.java index 51ff5080..82580f21 100644 --- a/src/main/java/com/mesalab/services/service/impl/SQLDatasetServiceImpl.java +++ b/src/main/java/com/mesalab/services/service/impl/SQLDatasetServiceImpl.java @@ -34,7 +34,7 @@ public class SQLDatasetServiceImpl implements SQLDatasetService { private static final Log log = LogFactory.get(); - public final static String queryIdSeparator = ":"; + public final static String QUERY_ID_SEPARATOR = ":"; @Autowired private DatabaseService databaseService; @@ -52,14 +52,6 @@ public class SQLDatasetServiceImpl return dialect.executeKillQuery(queryId); } - @Override - public String getCustomQueryId(String resultId, String query) { - String tableName = SQLHelper.getTableName(query).get(0); - String dbType = databaseService.getDBEngineByTableName(tableName); - return DigestUtil.md5Hex(dbType) + queryIdSeparator + DigestUtil.md5Hex(resultId + query.trim()); - } - - public Dialect buildAndGetDialect(String queryId) { SQLQueryContext queryContext = new SQLQueryContext(); queryContext.setDbEngine(parseDBEngineType(queryId)); @@ -68,11 +60,11 @@ public class SQLDatasetServiceImpl private String parseDBEngineType(String queryId) { for (DBEngineType dbTypeEnum : DBEngineType.values()) { - if (queryId.startsWith(DigestUtil.md5Hex(dbTypeEnum.getValue()).concat(queryIdSeparator))) { + if (queryId.startsWith(DigestUtil.md5Hex(dbTypeEnum.getValue()).concat(QUERY_ID_SEPARATOR))) { return dbTypeEnum.getValue(); } } - if (queryId.contains(queryIdSeparator)) { + if (queryId.contains(QUERY_ID_SEPARATOR)) { throw new QGWBusinessException(HttpStatusCodeEnum.NOT_FOUND.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.QUERY_ID_IS_NOT_SUPPORTED)); } diff --git a/src/main/resources/http-sql-template.sql b/src/main/resources/http-sql-template.sql index feaae91d..079a74a0 100644 --- a/src/main/resources/http-sql-template.sql +++ b/src/main/resources/http-sql-template.sql @@ -93,13 +93,11 @@ select SUM(sent_bytes + received_bytes) AS bytes, COUNT(1) AS sessions from %s w #end #sql("SYSTEM_STORAGE_QUOTA") -SELECT type, SUM(used_size) as used_size, SUM(max_size) * 7 / 10 as max_size, TIME_FORMAT(MILLIS_TO_TIMESTAMP(ANY_VALUE(last_storage) * 1000), 'YYYY-MM-dd') as first_storage FROM - (SELECT log_type as type, LATEST(used_size) as used_size, LATEST(max_size) as max_size, LATEST(last_storage) as last_storage FROM sys_storage_log WHERE __time >= CURRENT_TIMESTAMP - INTERVAL '1' HOUR AND data_center != '' GROUP BY data_center, log_type) -GROUP BY type +SELECT log_type AS type, SUM(used_size) AS used_size, SUM(total_allocated_size) AS max_size, MIN(first_storage) AS first_storage FROM (SELECT log_type, used_size, total_allocated_size, DATE(FROM_UNIXTIME(since_time)) AS first_storage, ROW_NUMBER() OVER (PARTITION BY log_type ORDER BY generated_time DESC) AS row_num FROM sys_storage_event WHERE generated_time >= UNIX_TIMESTAMP(DATE_SUB(NOW(), INTERVAL 1 WEEK)) ) AS subquery WHERE row_num = 1 GROUP BY log_type #end #sql("SYSTEM_DAILY_TREND_OF_STORAGE") -select TIME_FORMAT(__time, 'YYYY-MM-dd') as stat_time, log_type as type, sum(aggregate_size) as used_size from sys_storage_log where __time >= '%s' and __time < '%s' group by TIME_FORMAT(__time, 'YYYY-MM-dd'), log_type +SELECT DATE(FROM_UNIXTIME(generated_time)) AS stat_time, log_type AS type, sum( bytes ) AS used_size FROM sys_storage_event WHERE generated_time >= UNIX_TIMESTAMP('%s') AND generated_time < UNIX_TIMESTAMP('%s') GROUP BY stat_time, type ORDER BY stat_time ASC #end #sql("RELATION_SUBSCRIBER_ID") -- cgit v1.2.3 From a480ad3d73a345ef4ee452e356a8dfaef78b0ce8 Mon Sep 17 00:00:00 2001 From: wangwei Date: Wed, 26 Jun 2024 17:04:09 +0800 Subject: [Fix][liveChart] App流量趋势图新增 bit_rate Metric(TSG-21420) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/main/resources/dsl-sql-template.sql | 5 +- src/main/resources/http-sql-template.sql | 102 ------------------------------- 2 files changed, 1 insertion(+), 106 deletions(-) (limited to 'src/main/resources/http-sql-template.sql') diff --git a/src/main/resources/dsl-sql-template.sql b/src/main/resources/dsl-sql-template.sql index 98a110e2..8eee1f48 100644 --- a/src/main/resources/dsl-sql-template.sql +++ b/src/main/resources/dsl-sql-template.sql @@ -23,14 +23,11 @@ SELECT app_name as app_name, SUM(bytes) as bytes, SUM(sessions) as sessions, MAX SELECT if(bitAnd(flags, 8) = 8, client_ip, server_ip) AS ip, SUM(sent_bytes + received_bytes) AS bytes FROM $table WHERE $intervals_and_filter GROUP BY ip ORDER BY bytes DESC $limit #end #sql("APPLICATION_AND_PROTOCOL_APP_THROUGHPUT") -SELECT FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time),'$granularity','zero')) AS stat_time, app_name, SUM(c2s_bytes + s2c_bytes) AS bytes FROM $table WHERE $intervals_and_filter GROUP BY FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time),'$granularity','zero')), app_name ORDER BY stat_time ASC +SELECT FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time),'$granularity','zero')) AS stat_time, app_name, SUM(c2s_bytes + s2c_bytes) AS bytes, SUM(c2s_bytes + s2c_bytes) * 8 / $granularity_seconds AS bit_rate FROM $table WHERE $intervals_and_filter GROUP BY FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time),'$granularity','zero')), app_name ORDER BY stat_time ASC #end #sql("APPLICATION_AND_PROTOCOL_APP_SUMMARY") SELECT app_name, SUM(sessions) AS sessions, SUM(c2s_bytes + s2c_bytes) AS bytes, SUM(s2c_bytes) AS received_bytes, SUM(c2s_bytes) AS sent_bytes, SUM(c2s_pkts + s2c_pkts) AS packets, SUM(c2s_pkts) AS sent_packets, SUM(s2c_pkts) AS received_packets FROM $table WHERE $intervals_and_filter GROUP BY app_name #end -#sql("REAL_TIME_DATA_ANALYTICS_SUBSCRIBER_ID_RELATE_IP") -SELECT account AS SUBSCRIBER_ID, framed_ip AS IP, first_found_time AS FIRST_FOUND_TIME, last_update_time AS LAST_FOUND_TIME, vsys_id as VSYS_ID FROM $table WHERE $intervals_and_filter AND acct_status_type != 2 $limit -#end #sql("DATAPATH_TELEMETRY_RECORD") SELECT * FROM $table WHERE $intervals_and_filter ORDER BY timestamp_us ASC #end diff --git a/src/main/resources/http-sql-template.sql b/src/main/resources/http-sql-template.sql index 079a74a0..8967a1e4 100644 --- a/src/main/resources/http-sql-template.sql +++ b/src/main/resources/http-sql-template.sql @@ -1,97 +1,3 @@ -#sql("NETWORK_OVERVIEW_ASYMMETRIC_FLOWS_STAT") -SELECT SUM(asymmetric_c2s_flows + asymmetric_s2c_flows) AS asymmetric_flows, SUM(closed_sessions) AS total_session_used_on_asymmetric_flows FROM traffic_general_stat WHERE %s %s LIMIT 1 -#end - -#sql("NETWORK_OVERVIEW_STAT") -SELECT SUM(c2s_fragments + s2c_fragments) AS fragmentation_packets, SUM(c2s_bytes + s2c_bytes) AS total_bytes, SUM(c2s_pkts + s2c_pkts) AS total_packets, SUM(sessions) AS total_sessions, (SUM(c2s_bytes + s2c_bytes) * 8)/(%s -%s) AS data_rate FROM %s WHERE %s %s AND protocol_stack_id = '%s' LIMIT 1 -#end - -#sql("NETWORK_OVERVIEW_TCP_STAT") -SELECT SUM(c2s_tcp_retransmitted_pkts + s2c_tcp_retransmitted_pkts) AS tcp_retransmissions_packets, SUM(c2s_pkts + s2c_pkts) AS tcp_total_packets FROM %s WHERE %s %s AND RIGHT(protocol_stack_id, 4) = '.TCP' LIMIT 1 -#end - -#sql("NETWORK_OVERVIEW_APP_STAT") -SELECT SUM(c2s_bytes + s2c_bytes) as unknown_app_bytes FROM %s WHERE %s %s AND app_name = 'unknown' LIMIT 1 -#end - -#sql("PROTOCOL_TREE_SUMMARY") -SELECT protocol_stack_id, SUM(sessions) as sessions,SUM(c2s_bytes) as c2s_bytes, SUM(c2s_pkts) as c2s_pkts, SUM(s2c_bytes) as s2c_bytes, SUM(s2c_pkts) as s2c_pkts FROM %s WHERE %s %s GROUP BY protocol_stack_id -#end - -#sql("PROTOCOL_DATA_RATE_SUMMARY") -(SELECT TIME_FORMAT(MILLIS_TO_TIMESTAMP( 1000 * TIME_FLOOR_WITH_FILL(TIMESTAMP_TO_MILLIS(__time)/1000, '%s', 'zero')), 'yyyy-MM-dd HH:mm:ss') as stat_time, protocol_stack_id as type, sum(c2s_bytes + s2c_bytes) as bytes from %s where %s %s and protocol_stack_id = '%s' group by TIME_FORMAT(MILLIS_TO_TIMESTAMP( 1000 * TIME_FLOOR_WITH_FILL(TIMESTAMP_TO_MILLIS(__time)/1000, '%s', 'zero')), 'yyyy-MM-dd HH:mm:ss'), protocol_stack_id order by stat_time asc) -union all -(SELECT TIME_FORMAT(MILLIS_TO_TIMESTAMP( 1000 * TIME_FLOOR_WITH_FILL(TIMESTAMP_TO_MILLIS(__time)/1000, '%s', 'zero')), 'yyyy-MM-dd HH:mm:ss') as stat_time, protocol_stack_id as type, sum(c2s_bytes + s2c_bytes) as bytes from %s where %s %s and protocol_stack_id like CONCAT('%s','.%s') and LENGTH(protocol_stack_id) = LENGTH(REPLACE(protocol_stack_id,'.','')) + 1 + %s group by TIME_FORMAT(MILLIS_TO_TIMESTAMP( 1000 * TIME_FLOOR_WITH_FILL(TIMESTAMP_TO_MILLIS(__time)/1000, '%s', 'zero')), 'yyyy-MM-dd HH:mm:ss'), protocol_stack_id order by stat_time asc) -#end - -#sql("APP_DATA_SUMMARY") -SELECT app_name as app_name, SUM(bytes) as bytes, SUM(sessions) as sessions, MAX(bytes_rate) as max_rate, AVG(bytes_rate) as avg_rate FROM (SELECT app_name as app_name, SUM(sessions) as sessions, SUM(c2s_bytes + s2c_bytes) as bytes, SUM(c2s_bytes + s2c_bytes) * 8 / %s as bytes_rate FROM %s WHERE %s %s AND notEmpty(app_name) GROUP BY app_name ORDER BY bytes DESC ) GROUP BY app_name ORDER BY bytes DESC %s -#end - -#sql("APP_INTERNAL_IP_SUMMARY") -SELECT if(bitAnd(flags, 8) = 8, client_ip, server_ip) AS ip, SUM(sent_bytes + received_bytes) AS bytes FROM %s WHERE %s %s GROUP BY ip ORDER BY bytes DESC %s -#end - -#sql("APP_DATA_RATE_SUMMARY") -SELECT TIME_FORMAT(MILLIS_TO_TIMESTAMP( 1000 * TIME_FLOOR_WITH_FILL(TIMESTAMP_TO_MILLIS(__time)/ 1000, '%s', 'zero')), 'yyyy-MM-dd HH:mm:ss') as stat_time, app_name , sum(c2s_bytes + s2c_bytes) as bytes from %s where %s %s group by TIME_FORMAT(MILLIS_TO_TIMESTAMP( 1000 * TIME_FLOOR_WITH_FILL(TIMESTAMP_TO_MILLIS(__time)/ 1000, '%s', 'zero')), 'yyyy-MM-dd HH:mm:ss'), app_name order by stat_time asc -#end - -#sql("APP_TRAFFIC_SUMMARY") -SELECT app_name, SUM(sessions) as sessions, sum(c2s_bytes + s2c_bytes) as bytes, sum(s2c_bytes) as received_bytes, sum(c2s_bytes) as sent_bytes, sum(c2s_pkts + s2c_pkts) as packets, sum(c2s_pkts) as sent_packets, sum(s2c_pkts) as received_packets from %s where %s %s group by app_name -#end - -#sql("ENTITY_ACTIVE_CLIENT_IP") -select client_ip as client_ip, vsys_id as vsys_id from %s where %s %s AND notEmpty(client_ip) GROUP BY client_ip, app, vsys_id ORDER BY COUNT(1) DESC LIMIT %s -#end - -#sql("ENTITY_UDP_SESSION") -select server_ip as server_ip,vsys_id as vsys_id ,COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'UDP') and server_port in (53 , 443) group by server_ip,vsys_id order by sessions desc limit %s -#end - -#sql("ENTITY_UDP_UNIQ_CLIENT_IPS") -select server_ip as server_ip, vsys_id as vsys_id from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'UDP') group by server_ip,vsys_id order by COUNT(DISTINCT(client_ip)) desc limit %s -#end - -#sql("ENTITY_TCP_SESSION") -select server_ip as server_ip,vsys_id as vsys_id ,COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'TCP') group by server_ip,vsys_id order by sessions desc limit %s -#end - -#sql("ENTITY_TCP_UNIQ_CLIENT_IPS") -select server_ip as server_ip, vsys_id as vsys_id from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'TCP') group by server_ip, vsys_id order by COUNT(DISTINCT(client_ip)) desc limit %s -#end - -#sql("TOP_ENTITY_TCP_UNIQ_CLIENT_IPS") -select COUNT(DISTINCT(client_ip)) as client_ips from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'TCP') AND server_ip in (select server_ip from %s as cc where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'TCP') group by server_ip order by COUNT(DISTINCT(client_ip)) desc limit %s) -#end - -#sql("TOP_ENTITY_UDP_UNIQ_CLIENT_IPS") -select COUNT(DISTINCT(client_ip)) as client_ips from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'UDP') AND server_ip in (select server_ip from %s as cc where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'UDP') group by server_ip order by COUNT(DISTINCT(client_ip)) desc limit %s) -#end - -#sql("TOTAL_ENTITY_UDP_SESSION") -select COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'UDP') and server_port in (53 , 443) limit 1 -#end - -#sql("TOTAL_ENTITY_UDP_UNIQ_CLIENT_IPS") -select COUNT(DISTINCT(client_ip)) as uniq_client_ips from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'UDP') limit 1 -#end - -#sql("TOTAL_ENTITY_TCP_SESSION") -select COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'TCP') limit 1 -#end - -#sql("TOTAL_ENTITY_TCP_UNIQ_CLIENT_IPS") -select COUNT(DISTINCT(client_ip)) as uniq_client_ips from %s where %s AND (arrayElement(splitByString('.',decoded_path),length(splitByString('.',decoded_path))) = 'TCP') limit 1 -#end - -#sql("ENTITY_TOP_SNI") -select ssl_sni, SUM(sent_bytes + received_bytes) AS bytes, COUNT(1) AS sessions,vsys_id as vsys_id from %s where %s and notEmpty(ssl_sni) and decoded_as = 'SSL' group by ssl_sni,vsys_id order by COUNT(1) desc limit %s -#end - -#sql("ENTITY_SNI_TOTAL") -select SUM(sent_bytes + received_bytes) AS bytes, COUNT(1) AS sessions from %s where %s and decoded_as = 'SSL' limit 1 -#end - #sql("SYSTEM_STORAGE_QUOTA") SELECT log_type AS type, SUM(used_size) AS used_size, SUM(total_allocated_size) AS max_size, MIN(first_storage) AS first_storage FROM (SELECT log_type, used_size, total_allocated_size, DATE(FROM_UNIXTIME(since_time)) AS first_storage, ROW_NUMBER() OVER (PARTITION BY log_type ORDER BY generated_time DESC) AS row_num FROM sys_storage_event WHERE generated_time >= UNIX_TIMESTAMP(DATE_SUB(NOW(), INTERVAL 1 WEEK)) ) AS subquery WHERE row_num = 1 GROUP BY log_type #end @@ -100,10 +6,6 @@ SELECT log_type AS type, SUM(used_size) AS used_size, SUM(total_allocated_size) SELECT DATE(FROM_UNIXTIME(generated_time)) AS stat_time, log_type AS type, sum( bytes ) AS used_size FROM sys_storage_event WHERE generated_time >= UNIX_TIMESTAMP('%s') AND generated_time < UNIX_TIMESTAMP('%s') GROUP BY stat_time, type ORDER BY stat_time ASC #end -#sql("RELATION_SUBSCRIBER_ID") -SELECT account AS SUBSCRIBER_ID,framed_ip AS IP,first_found_time AS FIRST_FOUND_TIME,last_update_time AS LAST_FOUND_TIME,vsys_id as VSYS_ID FROM tsg_galaxy.relation_account_framedip WHERE 1=1 %s AND acct_status_type != 2 LIMIT %s -#end - #sql("SCHEMA_STORAGE_SIZE") SELECT name AS field, IFNULL(SUM(data_compressed_bytes), 0) AS bytes FROM columns_cluster WHERE "table" = '%s_local' AND "database" = '%s' GROUP BY name ORDER BY bytes DESC #end @@ -112,10 +14,6 @@ SELECT name AS field, IFNULL(SUM(data_compressed_bytes), 0) AS bytes FROM column SELECT name, engine_full AS table_ttl FROM tables WHERE name ='%s_local' AND "database" = '%s' AND "engine" ='MergeTree' #end -#sql("GTPC_KNOWLEDGE_BASE") -SELECT uplink_teid AS teid, apn, phone_number, imsi, imei, last_update_time,vsys_id FROM tsg_galaxy.gtpc_knowledge_base WHERE msg_type = 1 and uplink_teid !=0 %s order by last_update_time desc limit %s -#end - #sql("TABLE_INDEX_KEY") SELECT any(splitByString(', ',sorting_key)) AS index_key FROM tables_cluster where name = '%s_local' AND "database" = '%s' #end -- cgit v1.2.3 From 34b828b9f98026f93c2499700f110ebdba20ca59 Mon Sep 17 00:00:00 2001 From: wangwei Date: Fri, 28 Jun 2024 18:02:56 +0800 Subject: [Fix][storage] 获取储存配额接口结果字段重命名: total_allocated_size、since_time(TSG-21554) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/main/resources/http-sql-template.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src/main/resources/http-sql-template.sql') diff --git a/src/main/resources/http-sql-template.sql b/src/main/resources/http-sql-template.sql index 8967a1e4..4be71868 100644 --- a/src/main/resources/http-sql-template.sql +++ b/src/main/resources/http-sql-template.sql @@ -1,5 +1,5 @@ #sql("SYSTEM_STORAGE_QUOTA") -SELECT log_type AS type, SUM(used_size) AS used_size, SUM(total_allocated_size) AS max_size, MIN(first_storage) AS first_storage FROM (SELECT log_type, used_size, total_allocated_size, DATE(FROM_UNIXTIME(since_time)) AS first_storage, ROW_NUMBER() OVER (PARTITION BY log_type ORDER BY generated_time DESC) AS row_num FROM sys_storage_event WHERE generated_time >= UNIX_TIMESTAMP(DATE_SUB(NOW(), INTERVAL 1 WEEK)) ) AS subquery WHERE row_num = 1 GROUP BY log_type +SELECT log_type AS type, SUM(used_size) AS used_size, SUM(total_allocated_size) AS total_allocated_size, MIN(since_time) AS since_time FROM (SELECT log_type, used_size, total_allocated_size, DATE(FROM_UNIXTIME(since_time)) AS since_time, ROW_NUMBER() OVER (PARTITION BY log_type ORDER BY generated_time DESC) AS row_num FROM sys_storage_event WHERE generated_time >= UNIX_TIMESTAMP(DATE_SUB(NOW(), INTERVAL 1 WEEK)) ) AS subquery WHERE row_num = 1 GROUP BY log_type #end #sql("SYSTEM_DAILY_TREND_OF_STORAGE") -- cgit v1.2.3