diff options
| author | wangwei <[email protected]> | 2024-10-31 20:04:59 +0800 |
|---|---|---|
| committer | wangwei <[email protected]> | 2024-10-31 20:04:59 +0800 |
| commit | 9a185437e92ae8b5c7bc04bc88fa847f60b37831 (patch) | |
| tree | 86a361fa1c9f55fee3cc22e63332687dd56367dd | |
| parent | 520393408703878bbb7f3d88ec1d7f164758037f (diff) | |
[Fix][DSL] 优化log-query、field-discovery fields请求参数(TSG-22484)
5 files changed, 20 insertions, 20 deletions
diff --git a/config/flyway/tsg/R__init_datasets.sql b/config/flyway/tsg/R__init_datasets.sql index 88ca4c21..66b1c699 100644 --- a/config/flyway/tsg/R__init_datasets.sql +++ b/config/flyway/tsg/R__init_datasets.sql @@ -153,9 +153,9 @@ INSERT INTO `dataset` (`identifier_name`, `category`, `backend_engine`, `type`, INSERT INTO `dataset` (`identifier_name`, `category`, `backend_engine`, `type`, `template`, `description`) VALUES ('datapath-telemetry-record-count', 'datapath_telemetry_record', 'qgw', 'sql', '{ "statement": "SELECT count(1) as count FROM datapath_telemetry_record WHERE recv_time >= UNIX_TIMESTAMP(''${start_time}'') AND recv_time < UNIX_TIMESTAMP(''${end_time}'') AND vsys_id in(${vsys_id}) AND ( ${filter})" }',null); INSERT INTO `dataset` (`identifier_name`, `category`, `backend_engine`, `type`, `template`, `description`) VALUES ('datapath-telemetry-record-list', 'datapath_telemetry_record', 'qgw', 'sql', '{ "statement": "SELECT ${columns} FROM datapath_telemetry_record WHERE recv_time >= UNIX_TIMESTAMP(''${start_time}'') AND recv_time < UNIX_TIMESTAMP(''${end_time}'') AND vsys_id IN(${vsys_id}) AND (${filter}) ORDER BY timestamp_us ASC LIMIT ${limit}" }',null); INSERT INTO `dataset` (`identifier_name`, `category`, `backend_engine`, `type`, `template`, `description`) VALUES ('datapath-telemetry-packet-combine', 'datapath_telemetry_record', 'qgw', 'dsl', '{"id":"${job_id}","name":"datapath_telemetry_packet_combine","data_source":"datapath_telemetry_record","filter":"job_id=''${job_id}'' AND vsys_id in (${vsys_id}) AND (${filter})"}',null); -INSERT INTO `dataset` (`identifier_name`, `category`, `backend_engine`, `type`, `template`, `description`) VALUES ('log-query', 'statistics', 'qgw', 'dsl', '{"name": "log-query", "data_source": "${source}", "filter": "vsys_id in (${vsys_id}) AND (${filter})", "intervals": ["${start_time}/${end_time}" ] }',null); +INSERT INTO `dataset` (`identifier_name`, `category`, `backend_engine`, `type`, `template`, `description`) VALUES ('log-query', 'statistics', 'qgw', 'dsl', ' {"name": "log-query", "data_source": "${source}", "fields": ["field_list" ], "filter": "vsys_id in (${vsys_id}) AND (${filter})", "intervals": ["${start_time}/${end_time}" ] }',null); INSERT INTO `dataset` (`identifier_name`, `category`, `backend_engine`, `type`, `template`, `description`) VALUES ('field-discovery-DEPRECATED', 'statistics', 'qgw', 'dsl', '{ "name": "field_discovery", "data_source": "${source}", "filter": "vsys_id in (${vsys_id}) AND (${filter})", "custom.field_discovery.metric": "${metric}", "custom.field_discovery.metric.fn": "${fn}", "custom.field_discovery.fields": ["${field_list}"] }',null); -INSERT INTO `dataset` (`identifier_name`, `category`, `backend_engine`, `type`, `template`, `description`) VALUES ('field-discovery', 'statistics', 'qgw', 'dsl', '{"name": "field-discovery", "data_source": "${source}", "filter": "vsys_id in (${vsys_id}) AND (${filter})", "custom.statistics.metrics": [{"function": {"name": "${fn}" }, "metric_name": "${metric}" } ], "custom.statistics.dimensions": ["${dimension_list}" ], "intervals": ["${start_time}/${end_time}" ] }',null); +INSERT INTO `dataset` (`identifier_name`, `category`, `backend_engine`, `type`, `template`, `description`) VALUES ('field-discovery', 'statistics', 'qgw', 'dsl', '{"name": "field-discovery", "data_source": "${source}", "fields": ["field_list" ], "filter": "vsys_id in (${vsys_id}) AND (${filter})", "custom.statistics.metrics": [{"function": {"name": "${fn}" }, "metric_name": "${metric}" } ], "intervals": ["${start_time}/${end_time}" ] }',null); INSERT INTO `dataset` (`identifier_name`, `category`, `backend_engine`, `type`, `template`, `description`) VALUES ('field-statistics-top-values', 'statistics', 'qgw', 'sql', '{ "statement":"SELECT ${column_name}, count(*) as cnt FROM ${source} where recv_time >= UNIX_TIMESTAMP(''${start_time}'') AND recv_time < UNIX_TIMESTAMP(''${end_time}'') AND ${filter} GROUP BY ${column_name} ORDER BY cnt DESC LIMIT ${limit}" }',null); INSERT INTO `dataset` (`identifier_name`, `category`, `backend_engine`, `type`, `template`, `description`) VALUES ('field-statistics-rare-values', 'statistics', 'qgw', 'sql', '{ "statement":"SELECT ${column_name}, count(*) as cnt FROM ${source} where recv_time >= UNIX_TIMESTAMP(''${start_time}'') AND recv_time < UNIX_TIMESTAMP(''${end_time}'') AND ${filter} GROUP BY ${column_name} ORDER BY cnt ASC LIMIT ${limit}" }',null); INSERT INTO `dataset` (`identifier_name`, `category`, `backend_engine`, `type`, `template`, `description`) VALUES ('field-statistics-avg-value-over-time', 'statistics', 'qgw', 'sql', '{ "statement":"SELECT TIME_FLOOR_WITH_FILL(${unix_timestamp_column}, CHART_GRANULARITY(''${start_time}'', ''${end_time}''), ''zero'') AS stat_time, avg(${column_name_long}) FROM ${source} WHERE recv_time >= UNIX_TIMESTAMP(''${start_time}'') AND recv_time < UNIX_TIMESTAMP(''${end_time}'') AND ${filter} GROUP BY TIME_FLOOR_WITH_FILL(${unix_timestamp_column}, CHART_GRANULARITY(''${start_time}'', ''${end_time}''), ''zero'') ORDER BY stat_time ASC LIMIT ${limit}" }',null); diff --git a/src/main/java/com/mesalab/qgw/controller/QueryController.java b/src/main/java/com/mesalab/qgw/controller/QueryController.java index 70177b81..78a67d8b 100644 --- a/src/main/java/com/mesalab/qgw/controller/QueryController.java +++ b/src/main/java/com/mesalab/qgw/controller/QueryController.java @@ -143,7 +143,7 @@ public class QueryController { @AuditLog("QueryController.getJobTimelineById") public BaseResult getJobListById(@PathVariable String id , @RequestParam(value = "is_saved_query", required = false, defaultValue = "0") Integer isSavedQuery - , @RequestParam(value = "fields", required = false, defaultValue = "*") String fields + , @RequestParam(value = "fields", required = false) String fields , @RequestParam(value = "limit", required = false, defaultValue = "10") Integer limit , @RequestParam(value = "offset", required = false, defaultValue = "0") Integer offset) { if (BooleanUtil.toBoolean(String.valueOf(isSavedQuery))) { diff --git a/src/main/java/com/mesalab/qgw/model/basic/DSLQueryRequestParam.java b/src/main/java/com/mesalab/qgw/model/basic/DSLQueryRequestParam.java index 4d2bfd5a..94f7f12a 100644 --- a/src/main/java/com/mesalab/qgw/model/basic/DSLQueryRequestParam.java +++ b/src/main/java/com/mesalab/qgw/model/basic/DSLQueryRequestParam.java @@ -34,6 +34,8 @@ public class DSLQueryRequestParam extends CommonRequestParam implements Serializ private String dataSource; @JsonProperty("granularity") private String granularity; + @JsonProperty("fields") + private List<String> fields; @JsonProperty("filter") private String filter; @JsonProperty("order_by") diff --git a/src/main/java/com/mesalab/qgw/service/impl/FieldDiscoveryServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/FieldDiscoveryServiceImpl.java index fc4317f2..4e31dd30 100644 --- a/src/main/java/com/mesalab/qgw/service/impl/FieldDiscoveryServiceImpl.java +++ b/src/main/java/com/mesalab/qgw/service/impl/FieldDiscoveryServiceImpl.java @@ -110,6 +110,15 @@ public class FieldDiscoveryServiceImpl implements FieldDiscoveryService { throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "Data Source cannot be null or empty.")); } + List<String> fields = request.getFields(); + if (fields == null || fields.isEmpty()) { + throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), + String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "Fields cannot be null or empty.")); + } + + List<String> fieldsDistinct = fields.stream().distinct().collect(Collectors.toList()); + request.setFields(fieldsDistinct); + List<String> intervals = request.getIntervals(); if (intervals == null || intervals.size() == 0) { throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), @@ -119,19 +128,9 @@ public class FieldDiscoveryServiceImpl implements FieldDiscoveryService { Map<String, Object> customRequestParam = request.getCustomRequestParam(); if (customRequestParam == null || customRequestParam.size() == 0) { - throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "custom.statistics.dimensions cannot be null or empty.")); - - } + return; - Object dimensions = customRequestParam.get("custom.statistics.dimensions"); - if (!(dimensions instanceof List)) { - throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), - String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "custom.statistics.dimensions cannot be null or empty.")); } - List<Map<String, Object>> collect = ((List<Map<String, Object>>) dimensions).stream().distinct().collect(Collectors.toList()); - request.getCustomRequestParam().put("custom.statistics.dimensions", collect); - Object metrics = customRequestParam.get("custom.statistics.metrics"); if ((metrics instanceof List) && ((List<Map<String, Object>>) metrics).size() == 1) { Map<String, Object> o = ((List<Map<String, Object>>) metrics).get(0); @@ -155,10 +154,10 @@ public class FieldDiscoveryServiceImpl implements FieldDiscoveryService { String dataSource = request.getDataSource(); String totalMetric = "count(*)"; Map<String, Object> customRequestParam = request.getCustomRequestParam(); - Object metrics = customRequestParam.get("custom.statistics.metrics"); Object metric = null; Object fn = null; - if (StringUtil.isNotEmpty(metrics)) { + if (StringUtil.isNotEmpty(request.getCustomRequestParam()) && StringUtil.isNotEmpty(customRequestParam.get("custom.statistics.metrics"))) { + Object metrics = customRequestParam.get("custom.statistics.metrics"); List<Map<String, Object>> metricsList = (List<Map<String, Object>>) metrics; Map<String, Object> map = metricsList.get(0); metric = map.get("metric_name"); @@ -178,9 +177,7 @@ public class FieldDiscoveryServiceImpl implements FieldDiscoveryService { return; } Map<String, Future<List<Map<String, Object>>>> taskCallbackList = Maps.newHashMap(); - List<String> fields = Lists.newArrayList(); - List<Map<String, Object>> o = (List<Map<String, Object>>) customRequestParam.get("custom.statistics.dimensions"); - o.forEach(x -> fields.add(String.valueOf(x.get("dimension_name")))); + List<String> fields = request.getFields(); for (String field : fields) { Object finalMetric = metric; diff --git a/src/main/java/com/mesalab/qgw/service/impl/LogQueryServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/LogQueryServiceImpl.java index d5b3c534..c75988ef 100644 --- a/src/main/java/com/mesalab/qgw/service/impl/LogQueryServiceImpl.java +++ b/src/main/java/com/mesalab/qgw/service/impl/LogQueryServiceImpl.java @@ -4,6 +4,7 @@ import cn.hutool.core.bean.BeanUtil; import cn.hutool.core.date.DatePattern; import cn.hutool.core.date.DateTime; import cn.hutool.core.date.DateUtil; +import cn.hutool.core.util.StrUtil; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.jayway.jsonpath.JsonPath; @@ -129,7 +130,7 @@ public class LogQueryServiceImpl implements LogQueryService { String partitionKeyLogicalType = partitionKeyLogicalTypes.isEmpty() ? null : partitionKeyLogicalTypes.get(0); String sqlTemplate2 = dslQueryContextTemp.toSql(sqlTemplate, dataSource, partitionKey, partitionKeyLogicalType); - String sql = String.format(sqlTemplate2, fields, partitionKey + " DESC "); + String sql = String.format(sqlTemplate2, StrUtil.isEmpty(fields) ? String.join(", ", requestParam.getFields()) : fields, partitionKey + " DESC "); return sqlSyncQueryService.executeQuery(SQLQueryContext.builder().originalSQL(sql).build()); } |
