summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
author窦凤虎 <[email protected]>2024-07-15 14:14:04 +0000
committer窦凤虎 <[email protected]>2024-07-15 14:14:04 +0000
commit3e68ae6f67eb16174807d83f1eae2a203ad235f0 (patch)
tree21eeb6684b826885824993a0cb4a2bf7cf32405b /src
parent1f8a10c9d5f1f839d22710aba953aadaba66494b (diff)
parentea9ae304972a683883515679c243dfc9c1b44a8d (diff)
Merge branch 'develop' into 'master'master
Revert "[Fix][schema] 修正dos event schema field_discovery_metric session计算方式(TSG-21222)" See merge request galaxy/platform/galaxy-qgw-service!40
Diffstat (limited to 'src')
-rw-r--r--src/main/java/com/mesalab/calcite/CalciteConnectionProcessor.java (renamed from src/main/java/com/mesalab/calcite/CalciteMemoryUtils.java)43
-rw-r--r--src/main/java/com/mesalab/cn/component/DslValidate.java123
-rw-r--r--src/main/java/com/mesalab/cn/entity/pojo/CNParser.java8
-rw-r--r--src/main/java/com/mesalab/cn/service/impl/CNServiceImpl.java16
-rw-r--r--src/main/java/com/mesalab/cn/service/impl/EntityDetailServiceImpl.java29
-rw-r--r--src/main/java/com/mesalab/common/configuration/HazelcastConfiguration.java35
-rw-r--r--src/main/java/com/mesalab/common/configuration/HeavyResourceThreadPoolConfiguration.java44
-rw-r--r--src/main/java/com/mesalab/common/configuration/LightWeightThreadPoolConfiguration.java46
-rw-r--r--src/main/java/com/mesalab/common/configuration/TaskThreadPoolConfiguration.java (renamed from src/main/java/com/mesalab/services/configuration/TaskThreadPoolCfg.java)27
-rw-r--r--src/main/java/com/mesalab/common/entity/BaseResult.java37
-rw-r--r--src/main/java/com/mesalab/common/entity/BaseResultGenerator.java59
-rw-r--r--src/main/java/com/mesalab/common/entity/DataTypeMapping.java1
-rw-r--r--src/main/java/com/mesalab/common/enums/BooleanOption.java (renamed from src/main/java/com/mesalab/common/enums/BooleanEnum.java)4
-rw-r--r--src/main/java/com/mesalab/common/enums/ConsistencyOption.java13
-rw-r--r--src/main/java/com/mesalab/common/enums/CookieOption.java (renamed from src/main/java/com/mesalab/common/enums/CookieEnum.java)4
-rw-r--r--src/main/java/com/mesalab/common/enums/DBEngineType.java (renamed from src/main/java/com/mesalab/common/enums/DBTypeEnum.java)6
-rw-r--r--src/main/java/com/mesalab/common/enums/DiagnosisOption.java (renamed from src/main/java/com/mesalab/common/enums/DiagnosisOptionEnum.java)4
-rw-r--r--src/main/java/com/mesalab/common/enums/Environment.java (renamed from src/main/java/com/mesalab/common/enums/EnvironmentEnum.java)4
-rw-r--r--src/main/java/com/mesalab/common/enums/EnvironmentGroupEnum.java10
-rw-r--r--src/main/java/com/mesalab/common/enums/ExecutionMode.java32
-rw-r--r--src/main/java/com/mesalab/common/enums/FileCategory.java15
-rw-r--r--src/main/java/com/mesalab/common/enums/FileFormat.java16
-rw-r--r--src/main/java/com/mesalab/common/enums/HttpStatusCodeEnum.java (renamed from src/main/java/com/mesalab/common/enums/ResultStatusEnum.java)11
-rw-r--r--src/main/java/com/mesalab/common/enums/JobHandlerOption.java (renamed from src/main/java/com/mesalab/common/enums/JobHandlerEnum.java)5
-rw-r--r--src/main/java/com/mesalab/common/enums/MetadataType.java (renamed from src/main/java/com/mesalab/common/enums/MetadataTypeEnum.java)4
-rw-r--r--src/main/java/com/mesalab/common/enums/OutputMode.java36
-rw-r--r--src/main/java/com/mesalab/common/enums/QueryFormatEnum.java11
-rw-r--r--src/main/java/com/mesalab/common/enums/QueryOption.java (renamed from src/main/java/com/mesalab/common/enums/QueryOptionEnum.java)4
-rw-r--r--src/main/java/com/mesalab/common/enums/QueryParam.java (renamed from src/main/java/com/mesalab/common/enums/QueryParamEnum.java)4
-rw-r--r--src/main/java/com/mesalab/common/enums/ResultCodeEnum.java8
-rw-r--r--src/main/java/com/mesalab/common/enums/SessionOption.java (renamed from src/main/java/com/mesalab/common/enums/SessionEnum.java)4
-rw-r--r--src/main/java/com/mesalab/common/exception/BusinessException.java8
-rw-r--r--src/main/java/com/mesalab/common/exception/CommonErrorCode.java37
-rw-r--r--src/main/java/com/mesalab/common/nacos/NacosConst.java2
-rw-r--r--src/main/java/com/mesalab/common/utils/HazelcastInstanceMapUtil.java49
-rw-r--r--src/main/java/com/mesalab/common/utils/JsonMapper.java429
-rw-r--r--src/main/java/com/mesalab/common/utils/JsonSchemaValidator.java148
-rw-r--r--src/main/java/com/mesalab/common/utils/MDCUtil.java23
-rw-r--r--src/main/java/com/mesalab/common/utils/QueryCacheUtils.java1
-rw-r--r--src/main/java/com/mesalab/common/utils/RandomNumberGenerator.java80
-rw-r--r--src/main/java/com/mesalab/common/utils/SavedQueryResultUtils.java31
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/AutoPeriodHelper.java469
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/ColumnCategoryHelper.java96
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/CondExpressionHelper.java21
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/ExampleDataHelper.java412
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/ExpressionColumnCollectAdapter.java29
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/FunctionsMergeHelper.java382
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/QueryTypeHelper.java92
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/SQLFunctionUtil.java467
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/SQLHelper.java110
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/SQLQueryTypeHelper.java0
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/SQLSyntaxParserUtil.java13
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/SQLVisitorUtil.java6
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/SampleSQLHelper.java (renamed from src/main/java/com/mesalab/common/utils/sqlparser/DruidSampleSQLHelper.java)64
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/SampleSQLHelperOfDataset.java (renamed from src/main/java/com/mesalab/common/utils/sqlparser/ClickhouseSampleSQLHelper.java)10
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/SelectItemHelper.java143
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/TopSQLVisitor.java6
-rw-r--r--src/main/java/com/mesalab/knowledge/controller/KnowledgeController.java17
-rw-r--r--src/main/java/com/mesalab/knowledge/service/KnowledgeService.java97
-rw-r--r--src/main/java/com/mesalab/knowledge/strategy/BaseQueryProvider.java47
-rw-r--r--src/main/java/com/mesalab/knowledge/strategy/FqdnProviderImpl.java19
-rw-r--r--src/main/java/com/mesalab/knowledge/strategy/SubscriberIdProviderImpl.java105
-rw-r--r--src/main/java/com/mesalab/network/controller/NetworkMonitorController.java115
-rw-r--r--src/main/java/com/mesalab/network/dsl/DSLObject.java43
-rw-r--r--src/main/java/com/mesalab/network/dsl/DSLValidate.java162
-rw-r--r--src/main/java/com/mesalab/network/exception/NWErrorCode.java31
-rw-r--r--src/main/java/com/mesalab/network/exception/NWErrorMessage.java29
-rw-r--r--src/main/java/com/mesalab/network/service/NetworkMonitorService.java30
-rw-r--r--src/main/java/com/mesalab/network/service/impl/NetworkMonitorServiceImpl.java607
-rw-r--r--src/main/java/com/mesalab/qgw/aspect/AuditLogAspect.java100
-rw-r--r--src/main/java/com/mesalab/qgw/benchmark/DialectWriter.java24
-rw-r--r--src/main/java/com/mesalab/qgw/benchmark/Writer.java90
-rw-r--r--src/main/java/com/mesalab/qgw/constant/DataTypeConst.java23
-rw-r--r--src/main/java/com/mesalab/qgw/constant/DslIdentifierNameConst.java34
-rw-r--r--src/main/java/com/mesalab/qgw/constant/ExampleDataModeConst.java14
-rw-r--r--src/main/java/com/mesalab/qgw/constant/MetaConst.java18
-rw-r--r--src/main/java/com/mesalab/qgw/constant/QGWMessageConst.java27
-rw-r--r--src/main/java/com/mesalab/qgw/constant/dsl/LiveChartConstants.java (renamed from src/main/java/com/mesalab/network/common/Constants.java)24
-rw-r--r--src/main/java/com/mesalab/qgw/constant/dsl/TrafficSpectrumConstants.java46
-rw-r--r--src/main/java/com/mesalab/qgw/controller/DatabaseController.java153
-rw-r--r--src/main/java/com/mesalab/qgw/controller/DatasetController.java62
-rw-r--r--src/main/java/com/mesalab/qgw/controller/DiagnosisController.java50
-rw-r--r--src/main/java/com/mesalab/qgw/controller/DslController.java56
-rw-r--r--src/main/java/com/mesalab/qgw/controller/HosController.java84
-rw-r--r--src/main/java/com/mesalab/qgw/controller/MetadataController.java95
-rw-r--r--src/main/java/com/mesalab/qgw/controller/QueryController.java260
-rw-r--r--src/main/java/com/mesalab/qgw/controller/SystemController.java74
-rw-r--r--src/main/java/com/mesalab/qgw/controller/TestController.java38
-rw-r--r--src/main/java/com/mesalab/qgw/controller/TroubleshootingController.java67
-rw-r--r--src/main/java/com/mesalab/qgw/controller/UtilController.java41
-rw-r--r--src/main/java/com/mesalab/qgw/dialect/AbstractDataSourceDialect.java87
-rw-r--r--src/main/java/com/mesalab/qgw/dialect/AbstractEngineDialect.java14
-rw-r--r--src/main/java/com/mesalab/qgw/dialect/ClickHouseDialect.java371
-rw-r--r--src/main/java/com/mesalab/qgw/dialect/Dialect.java2
-rw-r--r--src/main/java/com/mesalab/qgw/dialect/DruidDialect.java273
-rw-r--r--src/main/java/com/mesalab/qgw/dialect/FederationDialect.java202
-rw-r--r--src/main/java/com/mesalab/qgw/dialect/HbaseDialect.java91
-rw-r--r--src/main/java/com/mesalab/qgw/exception/QGWBusinessException.java12
-rw-r--r--src/main/java/com/mesalab/qgw/exception/QGWErrorCode.java37
-rw-r--r--src/main/java/com/mesalab/qgw/filter/ControllerFilter.java12
-rw-r--r--src/main/java/com/mesalab/qgw/filter/RequestParamWrapper.java8
-rw-r--r--src/main/java/com/mesalab/qgw/interceptor/GlobalExceptionHandler.java30
-rw-r--r--src/main/java/com/mesalab/qgw/interceptor/QuerySubmitInterceptor.java11
-rw-r--r--src/main/java/com/mesalab/qgw/listener/KnowledgeListener.java185
-rw-r--r--src/main/java/com/mesalab/qgw/listener/KnowledgeScheduler.java117
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/AuditServiceLog.java8
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/ClickHouseHttpSource.java5
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/ClickHouseQueryParam.java (renamed from src/main/java/com/mesalab/qgw/model/basic/ClickHouseHttpQuery.java)5
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/CommonRequestParam.java57
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/DSLQueryContext.java (renamed from src/main/java/com/mesalab/qgw/model/basic/DSLProfile.java)16
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/DSLQueryRequestParam.java47
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/DruidIoHttpSource.java1
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/EngineConfigSource.java15
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/EntityConfigSource.java66
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/HttpResponseResult.java22
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/QueryCache.java40
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/QueryProfile.java42
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/SQLQueryContext.java35
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/SQLQuerySource.java82
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/SelectStatement.java103
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/SqlQueryRequestParam.java31
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN.java10
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN_DETAIL.java10
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN_ORG.java10
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_CITY.java10
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_COUNTRY.java10
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_GEO.java10
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ISP.java10
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/udf/ROLLUP.java114
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/udf/TIME_FLOOR_WITH_FILL.java37
-rw-r--r--src/main/java/com/mesalab/qgw/model/basic/udf/UDF.java4
-rw-r--r--src/main/java/com/mesalab/qgw/model/dsl/LiveChartProtocol.java (renamed from src/main/java/com/mesalab/network/model/protocol/ProtocolTree.java)30
-rw-r--r--src/main/java/com/mesalab/qgw/monitor/ArangoHealthIndicator.java5
-rw-r--r--src/main/java/com/mesalab/qgw/monitor/ClickHouseHealthIndicator.java12
-rw-r--r--src/main/java/com/mesalab/qgw/monitor/DruidHealthIndicator.java6
-rw-r--r--src/main/java/com/mesalab/qgw/monitor/HbaseHealthIndicator.java19
-rw-r--r--src/main/java/com/mesalab/qgw/monitor/HosHealthIndicator.java4
-rw-r--r--src/main/java/com/mesalab/qgw/monitor/JVMHealthIndicator.java2
-rw-r--r--src/main/java/com/mesalab/qgw/monitor/JobAdminHealthIndicator.java6
-rw-r--r--src/main/java/com/mesalab/qgw/monitor/MariaDBHealthIndicator.java7
-rw-r--r--src/main/java/com/mesalab/qgw/monitor/NacosHealthIndicator.java8
-rw-r--r--src/main/java/com/mesalab/qgw/service/DSLService.java (renamed from src/main/java/com/mesalab/qgw/service/DslService.java)46
-rw-r--r--src/main/java/com/mesalab/qgw/service/DatabaseService.java (renamed from src/main/java/com/mesalab/qgw/service/MetadataService.java)68
-rw-r--r--src/main/java/com/mesalab/qgw/service/DatasetService.java57
-rw-r--r--src/main/java/com/mesalab/qgw/service/DiagnosisService.java16
-rw-r--r--src/main/java/com/mesalab/qgw/service/HosService.java20
-rw-r--r--src/main/java/com/mesalab/qgw/service/PacketCombineDslService.java26
-rw-r--r--src/main/java/com/mesalab/qgw/service/QueryJobService.java66
-rw-r--r--src/main/java/com/mesalab/qgw/service/QueryService.java10
-rw-r--r--src/main/java/com/mesalab/qgw/service/RewriteTable.java5
-rw-r--r--src/main/java/com/mesalab/qgw/service/SQLSyncQueryService.java10
-rw-r--r--src/main/java/com/mesalab/qgw/service/SystemService.java64
-rw-r--r--src/main/java/com/mesalab/qgw/service/TrafficSpectrumDslService.java39
-rw-r--r--src/main/java/com/mesalab/qgw/service/TroubleshootingService.java16
-rw-r--r--src/main/java/com/mesalab/qgw/service/UtilService.java10
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/DatabaseServiceImpl.java1197
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/DatasetServiceImp.java206
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/DiagnosisServiceImpl.java650
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/DslServiceImpl.java648
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/HosServiceImp.java250
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/HttpClientService.java43
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/HttpClientServiceV2.java366
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/MetadataServiceImpl.java575
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/PacketCombineDslServiceImpl.java268
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/QueryJobServiceImpl.java494
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/QueryServiceImpl.java618
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/SQLSyncQueryServiceImpl.java666
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/SystemServiceImpl.java650
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/TrafficSpectrumDslServiceImpl.java588
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/TroubleshootingServiceImp.java789
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/UtilServiceImp.java36
-rw-r--r--src/main/java/com/mesalab/services/common/dsl/ComDSLParse.java16
-rw-r--r--src/main/java/com/mesalab/services/common/dsl/ComDSLValidate.java96
-rw-r--r--src/main/java/com/mesalab/services/common/entity/KnowledgeBaseRequest.java37
-rw-r--r--src/main/java/com/mesalab/services/common/enums/EntityQueryType.java21
-rw-r--r--src/main/java/com/mesalab/services/common/enums/UnstructuredDataType.java14
-rw-r--r--src/main/java/com/mesalab/services/configuration/JobConfig.java43
-rw-r--r--src/main/java/com/mesalab/services/configuration/JobThreadPoolCfg.java41
-rw-r--r--src/main/java/com/mesalab/services/configuration/TaskConfig.java4
-rw-r--r--src/main/java/com/mesalab/services/configuration/ThreadPoolMonitor.java60
-rw-r--r--src/main/java/com/mesalab/services/controller/EntityController.java57
-rw-r--r--src/main/java/com/mesalab/services/controller/JobController.java192
-rw-r--r--src/main/java/com/mesalab/services/controller/KBController.java157
-rw-r--r--src/main/java/com/mesalab/services/controller/KnowledgeBaseController.java99
-rw-r--r--src/main/java/com/mesalab/services/controller/RelationController.java53
-rw-r--r--src/main/java/com/mesalab/services/controller/SQLDatasetController.java79
-rw-r--r--src/main/java/com/mesalab/services/controller/UnstructuredController.java54
-rw-r--r--src/main/java/com/mesalab/services/service/EntityService.java50
-rw-r--r--src/main/java/com/mesalab/services/service/KBService.java17
-rw-r--r--src/main/java/com/mesalab/services/service/KnowledgeBaseService.java21
-rw-r--r--src/main/java/com/mesalab/services/service/RelationService.java10
-rw-r--r--src/main/java/com/mesalab/services/service/SQLDatasetService.java69
-rw-r--r--src/main/java/com/mesalab/services/service/UnstructuredService.java12
-rw-r--r--src/main/java/com/mesalab/services/service/impl/EntityServiceImp.java581
-rw-r--r--src/main/java/com/mesalab/services/service/impl/JobExecuteService.java683
-rw-r--r--src/main/java/com/mesalab/services/service/impl/JobServiceImpl.java (renamed from src/main/java/com/mesalab/services/service/impl/JobServiceImp.java)203
-rw-r--r--src/main/java/com/mesalab/services/service/impl/KBServiceImp.java362
-rw-r--r--src/main/java/com/mesalab/services/service/impl/KnowledgeBaseServiceImpl.java516
-rw-r--r--src/main/java/com/mesalab/services/service/impl/RelationServiceImpl.java326
-rw-r--r--src/main/java/com/mesalab/services/service/impl/SQLDatasetServiceImp.java421
-rw-r--r--src/main/java/com/mesalab/services/service/impl/SQLDatasetServiceImpl.java88
-rw-r--r--src/main/java/com/mesalab/services/service/impl/TaskExecuteService.java152
-rw-r--r--src/main/java/com/mesalab/services/service/impl/UnstructuredServiceImpl.java222
-rw-r--r--src/main/resources/dsl-sql-template.sql32
-rw-r--r--src/main/resources/dsl-validation.json30
-rw-r--r--src/main/resources/http-sql-template.sql121
-rw-r--r--src/main/resources/job-sql-template.sql8
-rw-r--r--src/main/resources/schema-syntax-validation.json120
-rw-r--r--src/test/java/com/mesalab/GalaxyQGWApplicationTests.java6
-rw-r--r--src/test/java/com/mesalab/knowledge/JsonSchemaTest.java107
-rw-r--r--src/test/java/com/mesalab/qgw/service/ApplicationAndProtocolTest.java113
-rw-r--r--src/test/java/com/mesalab/qgw/service/AutoPeriodDSLTest.java204
-rw-r--r--src/test/java/com/mesalab/qgw/service/AutoPeriodSQLTest.java227
-rw-r--r--src/test/java/com/mesalab/qgw/service/AvroSchemaFormatTest.java40
-rw-r--r--src/test/java/com/mesalab/qgw/service/ClickHouseTest.java5
-rw-r--r--src/test/java/com/mesalab/qgw/service/DruidTest.java5
-rw-r--r--src/test/java/com/mesalab/qgw/service/EngineTest.java12
-rw-r--r--src/test/java/com/mesalab/qgw/service/EntityTest.java66
-rw-r--r--src/test/java/com/mesalab/qgw/service/ExampleDataTest.java62
-rw-r--r--src/test/java/com/mesalab/qgw/service/FieldDiscoveryTest.java114
-rw-r--r--src/test/java/com/mesalab/qgw/service/HttpClientServiceTest.java24
-rw-r--r--src/test/java/com/mesalab/qgw/service/JobTest.java98
-rw-r--r--src/test/java/com/mesalab/qgw/service/JsonSchemaValidatorTest.java86
-rw-r--r--src/test/java/com/mesalab/qgw/service/KnowledgeBaseTest.java55
-rw-r--r--src/test/java/com/mesalab/qgw/service/MergeFunctionsHelperTest.java36
-rw-r--r--src/test/java/com/mesalab/qgw/service/NetworkMonitorTest.java52
-rw-r--r--src/test/java/com/mesalab/qgw/service/ProtocolTreeTest.java11
-rw-r--r--src/test/java/com/mesalab/qgw/service/RecommendTest.java48
-rw-r--r--src/test/java/com/mesalab/qgw/service/SQLAdHocTest.java123
-rw-r--r--src/test/java/com/mesalab/qgw/service/SQLSavedTest.java41
-rw-r--r--src/test/java/com/mesalab/qgw/service/SchemaDynamicTest.java (renamed from src/test/java/com/mesalab/qgw/service/AvroSchemaDynamicTest.java)6
-rw-r--r--src/test/java/com/mesalab/qgw/service/TopEntityTest.java87
-rw-r--r--src/test/java/com/mesalab/qgw/service/UnstructuredTest.java44
-rw-r--r--src/test/resources/examples/invalidDSLRequestTest.json50
-rw-r--r--src/test/resources/examples/invalidSessionRecordTest.json6764
-rw-r--r--src/test/resources/examples/validDSLRequestTest.json55
-rw-r--r--src/test/resources/parameters/applicationAndProtocolTest.json60
-rw-r--r--src/test/resources/parameters/dslAutoGranularityTest.json27
-rw-r--r--src/test/resources/parameters/entityTest.json132
-rw-r--r--src/test/resources/parameters/fieldDiscoveryTest.json35
-rw-r--r--src/test/resources/parameters/jobTest.json37
-rw-r--r--src/test/resources/parameters/knowledgeBase.json20
-rw-r--r--src/test/resources/parameters/recommendTest.json20
-rw-r--r--src/test/resources/parameters/sqlAdHocTest.json29
-rw-r--r--src/test/resources/parameters/sqlSavedTest.json6
-rw-r--r--src/test/resources/parameters/unstructuredTest.json54
246 files changed, 21062 insertions, 10287 deletions
diff --git a/src/main/java/com/mesalab/calcite/CalciteMemoryUtils.java b/src/main/java/com/mesalab/calcite/CalciteConnectionProcessor.java
index 6c7fadc4..48c95f7b 100644
--- a/src/main/java/com/mesalab/calcite/CalciteMemoryUtils.java
+++ b/src/main/java/com/mesalab/calcite/CalciteConnectionProcessor.java
@@ -1,7 +1,5 @@
package com.mesalab.calcite;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
@@ -9,6 +7,7 @@ import com.mesalab.calcite.storage.DataTypeMapping;
import com.mesalab.calcite.storage.Storage;
import com.mesalab.common.exception.BusinessException;
import com.zdjizhi.calcite.jdbc.CalciteConnection;
+import lombok.extern.slf4j.Slf4j;
import java.io.File;
import java.io.IOException;
@@ -18,12 +17,13 @@ import java.util.*;
import static com.geedgenetworks.utils.StringUtil.setDefaultIfEmpty;
-public class CalciteMemoryUtils {
- private static final Log log = LogFactory.get();
+@Slf4j
+public class CalciteConnectionProcessor {
private static final String URL_PREFIX = "jdbc:calcite:model=";
private static final String URL_SUFFIX = "config" + File.separator + "memorySchema.json";
-
+ private CalciteConnectionProcessor() {
+ }
static {
try {
Class.forName("com.zdjizhi.calcite.jdbc.Driver");
@@ -32,8 +32,22 @@ public class CalciteMemoryUtils {
}
}
- public static Map<String, Object> executeMemoryQuery(String tableName, List<Map<String, String>> meta,
+ private static class CalciteQueryHolder {
+ private static final CalciteConnectionProcessor INSTANCE = new CalciteConnectionProcessor();
+ }
+
+ public static CalciteConnectionProcessor getInstance() {
+ return CalciteQueryHolder.INSTANCE;
+ }
+
+
+
+ public Map<String, Object> executeMemoryQuery(String tableName, List<Map<String, String>> meta,
List<Map<String, Object>> data, String sql) {
+ CalciteConnection connection = null;
+ ResultSet resultSet = null;
+ PreparedStatement statement = null;
+
Map<String, Object> result = Maps.newHashMap();
if (meta.isEmpty() || data.isEmpty()) {
data.clear();
@@ -42,9 +56,8 @@ public class CalciteMemoryUtils {
result.put("data", data);
return result;
}
- ResultSet resultSet = null;
- PreparedStatement statement = null;
- CalciteConnection connection = null;
+
+
try {
connection = getConnection().unwrap(CalciteConnection.class);
Storage.DummyTable table = buildDummyTable(meta, tableName);
@@ -89,7 +102,7 @@ public class CalciteMemoryUtils {
}
- private static Connection getConnection() throws IOException, SQLException {
+ private Connection getConnection() throws IOException, SQLException {
String url = URL_PREFIX + new File("").getCanonicalPath() + File.separator + URL_SUFFIX;
Properties info = new Properties();
info.setProperty("lex", "MYSQL_ANSI");
@@ -101,7 +114,7 @@ public class CalciteMemoryUtils {
- private static void close(ResultSet rs, PreparedStatement pst, Connection conn) {
+ private void close(ResultSet rs, PreparedStatement pst, Connection conn) {
if (rs != null) {
try {
rs.close();
@@ -136,7 +149,7 @@ public class CalciteMemoryUtils {
* @param data 原始结果集合
* @return
*/
- private static Storage.DummyTable loadData(Storage.DummyTable table, List<Map<String, Object>> data) {
+ private Storage.DummyTable loadData(Storage.DummyTable table, List<Map<String, Object>> data) {
List<Storage.DummyColumn> columns = table.getColumns();
data.stream().forEach(item -> {
List list = Lists.newArrayList();
@@ -164,7 +177,7 @@ public class CalciteMemoryUtils {
* @param tableName 表名
* @return
*/
- private static Storage.DummyTable buildDummyTable(List<Map<String, String>> meta, String tableName) {
+ private Storage.DummyTable buildDummyTable(List<Map<String, String>> meta, String tableName) {
Storage.DummyTable dummyTable = new Storage.DummyTable(tableName);
for (Map<String, String> column : meta) {
String fieldName = column.get("name");
@@ -183,7 +196,7 @@ public class CalciteMemoryUtils {
return dummyTable;
}
- private static final String intUnits = "Int8,Int16,Int32,Int64,UInt8,UInt16,UInt32,UInt64,bigint,int,long";
- private static final String doubleUnits = "Float32,Float64,float,double";
+ private final String intUnits = "Int8,Int16,Int32,Int64,UInt8,UInt16,UInt32,UInt64,bigint,int,long";
+ private final String doubleUnits = "Float32,Float64,float,double";
}
diff --git a/src/main/java/com/mesalab/cn/component/DslValidate.java b/src/main/java/com/mesalab/cn/component/DslValidate.java
index 4ba5d96b..c357bc42 100644
--- a/src/main/java/com/mesalab/cn/component/DslValidate.java
+++ b/src/main/java/com/mesalab/cn/component/DslValidate.java
@@ -3,12 +3,11 @@ package com.mesalab.cn.component;
import com.geedgenetworks.utils.StringUtil;
import com.mesalab.cn.entity.pojo.DSLObject;
import com.mesalab.cn.enums.*;
-import com.mesalab.cn.exception.CNErrorCode;
import com.mesalab.cn.exception.CNErrorMessage;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.common.exception.BusinessException;
-import com.mesalab.qgw.service.MetadataService;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.service.DatabaseService;
import org.apache.commons.lang3.EnumUtils;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
@@ -34,80 +33,67 @@ public class DslValidate {
public static final Pattern strFormatDateTime = Pattern.compile("\\d{4}-\\d{2}-\\d{2}\\s+\\d{2}:\\d{2}:\\d{2}", Pattern.CASE_INSENSITIVE);
@Autowired
- private MetadataService metadataService;
+ private DatabaseService databaseService;
public void executeValidate(DSLObject dslObject) throws BusinessException {
if (StringUtil.isEmpty(dslObject)) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.DSL_OBJECT_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.DSL_OBJECT_IS_INVALID));
}
DSLObject.Query query = dslObject.getQuery();
if (StringUtil.isEmpty(query)) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.QUERY_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.QUERY_IS_INVALID));
}
if (StringUtil.isEmpty(EngineTypeEnum.getByEngine(query.getDataEngine()))) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.DATA_ENGINE_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.DATA_ENGINE_IS_INVALID));
}
if (ANALYSIS_ENGINE.getEngine().equals(query.getDataEngine())) {
if (StringUtil.isBlank(query.getDataSource())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.DATASOURCE_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.DATASOURCE_IS_INVALID));
}
} else {
- if (StringUtil.isBlank(metadataService.getDBTypeByTableName(query.getDataSource()))) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.DATASOURCE_IS_INVALID));
+ if (StringUtil.isBlank(databaseService.getDBEngineByTableName(query.getDataSource()))) {
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.DATASOURCE_IS_INVALID));
}
}
DSLObject.Query.Parameters parameters = query.getParameters();
if (StringUtil.isEmpty(parameters)) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.PARAMETERS_IS_INVALID));
- }
- if (!isValidMatch(parameters.getMatch())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.MATCH_IS_INVALID));
- }
- if (!isValidRange(parameters.getRange())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.RANGE_TYPE_IS_INVALID));
- }
- if (!isValidDistinct(parameters.getDistinct())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.DISTINCT_TYPE_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.PARAMETERS_IS_INVALID));
}
+ validMatch(parameters.getMatch());
+ validRange(parameters.getRange());
+ validDistinct(parameters.getDistinct());
if (!isValidIntervals(parameters.getIntervals())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.INTERVALS_TYPE_IS_INVALID));
- }
- if (!isValidSort(parameters.getSort())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.SORT_TYPE_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.INTERVALS_TYPE_IS_INVALID));
}
+ validSort(parameters.getSort());
}
/**
* 校验sort:
- * 1.是否属于{@link RangeTypeEnum}限定类型
+ * 通过: 属于{@link RangeTypeEnum}限定类型
*
* @param sort
* @return
*/
- private boolean isValidSort(List<DSLObject.Query.Parameters.Sort> sort) {
+ private void validSort(List<DSLObject.Query.Parameters.Sort> sort) {
if (CollectionUtils.isEmpty(sort)) {
- return true;
+ return;
}
sort.forEach(o -> {
if (!StringUtil.isBlank(o.getType())) {
if (StringUtil.isEmpty(SortTypeEnum.getByType(StringUtil.lowerCase(o.getType())))) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.SORT_TYPE_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.SORT_TYPE_IS_INVALID));
}
}
});
- return true;
}
/**
@@ -129,19 +115,19 @@ public class DslValidate {
}
for (String dateTimeStr : split) {
if (!strFormatDateTime.matcher(dateTimeStr).find()) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.TIME_FORMAT_ERROR));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.TIME_FORMAT_ERROR));
}
}
try {
DateTimeFormatter dateTimeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss");
if (dateTimeFormatter.parseMillis(split[1]) < dateTimeFormatter.parseMillis(split[0])) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.INTERVALS_VALUE_ERROR));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.INTERVALS_VALUE_ERROR));
}
} catch (RuntimeException e) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.TIME_FORMAT_ERROR));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.TIME_FORMAT_ERROR));
}
return true;
}
@@ -149,79 +135,74 @@ public class DslValidate {
/**
* 校验Distinct:
- * 1.是否属于{@link RangeTypeEnum}限定类型
+ * 通过: 属于{@link RangeTypeEnum}限定类型
*
* @param distinct
* @return
*/
- private boolean isValidDistinct(DSLObject.Query.Parameters.Distinct distinct) {
+ private void validDistinct(DSLObject.Query.Parameters.Distinct distinct) {
if (StringUtil.isEmpty(distinct)) {
- return true;
+ return;
}
validLogicType(distinct.getLogicType());
distinct.getCondition().forEach(o -> {
if (StringUtil.isNotBlank(o.getType())) {
- if (!EnumUtils.isValidEnum(RangeTypeEnum.class, StringUtil.upperCase(o.getType()))){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.DISTINCT_TYPE_ERROR));
+ if (!EnumUtils.isValidEnum(RangeTypeEnum.class, StringUtil.upperCase(o.getType()))) {
}
}
});
- return true;
}
/**
* 校验range:
- * 1.是否属于{@link RangeTypeEnum}限定类型
+ * 通过: 属于{@link RangeTypeEnum}限定类型
*
* @param range
* @return
*/
- private boolean isValidRange(DSLObject.Query.Parameters.Range range) {
+ private void validRange(DSLObject.Query.Parameters.Range range) {
if (StringUtil.isEmpty(range)) {
- return true;
+ return;
}
validLogicType(range.getLogicType());
range.getCondition().forEach(o -> {
if (StringUtil.isNotBlank(o.getType())) {
- if (!EnumUtils.isValidEnum(RangeTypeEnum.class, StringUtil.upperCase(o.getType()))){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.RANGE_TYPE_ERROR));
+ if (!EnumUtils.isValidEnum(RangeTypeEnum.class, StringUtil.upperCase(o.getType()))) {
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.RANGE_TYPE_ERROR));
}
}
});
- return true;
}
/**
* 校验match:
- * 1.是否属于{@link MatchTypeEnum}限定类型
+ * 通过: 属于{@link MatchTypeEnum}限定类型
*
* @param match
* @return
*/
- private boolean isValidMatch(DSLObject.Query.Parameters.Match match) {
+ private void validMatch(DSLObject.Query.Parameters.Match match) {
if (StringUtil.isEmpty(match)) {
- return true;
+ return;
}
String logicType = match.getLogicType();
validLogicType(logicType);
match.getCondition().forEach(o -> {
if (StringUtil.isNotBlank(o.getType())) {
- if (!EnumUtils.isValidEnum(MatchTypeEnum.class, StringUtil.upperCase(o.getType()))){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.MATCH_TYPE_ERROR));
+ if (!EnumUtils.isValidEnum(MatchTypeEnum.class, StringUtil.upperCase(o.getType()))) {
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.MATCH_TYPE_ERROR));
}
}
});
- return true;
}
private void validLogicType(String logicType) {
if (StringUtil.isNotBlank(logicType)) {
if (StringUtil.isEmpty(LogicTypeEnum.getByType(StringUtil.lowerCase(logicType)))) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),CNErrorMessage.LOGIC_TYPE_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.LOGIC_TYPE_IS_INVALID));
}
}
}
diff --git a/src/main/java/com/mesalab/cn/entity/pojo/CNParser.java b/src/main/java/com/mesalab/cn/entity/pojo/CNParser.java
index 128aef7c..008d53b6 100644
--- a/src/main/java/com/mesalab/cn/entity/pojo/CNParser.java
+++ b/src/main/java/com/mesalab/cn/entity/pojo/CNParser.java
@@ -4,12 +4,10 @@ import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.geedgenetworks.utils.StringUtil;
import com.google.common.base.Joiner;
-import com.mesalab.cn.constant.ErrorMessage;
import com.mesalab.cn.constant.SqlKeywords;
import com.mesalab.cn.enums.RangeTypeEnum;
import com.mesalab.cn.exception.CNErrorCode;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.common.exception.BusinessException;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
@@ -43,7 +41,7 @@ public class CNParser extends DSLParser {
selectSql = parseQueryOfShareNet(dslObject, selectSql);
} catch (RuntimeException e) {
log.error("Build SQL Exception:{}",e);
- throw new BusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), CNErrorCode.SQL_BUILDER_EXCEPTION.getCode(),
+ throw new BusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CNErrorCode.SQL_BUILDER_EXCEPTION.getCode(),
String.format(CNErrorCode.SQL_BUILDER_EXCEPTION.getMessage(),e.getMessage()));
}
return selectSql;
@@ -58,7 +56,7 @@ public class CNParser extends DSLParser {
selectSql = parseQueryOfBannedApp(dslObject, selectSql);
} catch (RuntimeException e) {
log.error("Build SQL Exception:{}",e);
- throw new BusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), CNErrorCode.SQL_BUILDER_EXCEPTION.getCode(),
+ throw new BusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CNErrorCode.SQL_BUILDER_EXCEPTION.getCode(),
String.format(CNErrorCode.SQL_BUILDER_EXCEPTION.getMessage(),e.getMessage()));
}
return selectSql;
diff --git a/src/main/java/com/mesalab/cn/service/impl/CNServiceImpl.java b/src/main/java/com/mesalab/cn/service/impl/CNServiceImpl.java
index 3e7ea389..22958c1b 100644
--- a/src/main/java/com/mesalab/cn/service/impl/CNServiceImpl.java
+++ b/src/main/java/com/mesalab/cn/service/impl/CNServiceImpl.java
@@ -5,8 +5,8 @@ import cn.hutool.log.LogFactory;
import com.geedgenetworks.utils.StringUtil;
import com.mesalab.cn.entity.pojo.*;
import com.mesalab.cn.service.CNService;
-import com.mesalab.qgw.model.basic.QueryProfile;
-import com.mesalab.qgw.service.QueryService;
+import com.mesalab.qgw.model.basic.SQLQueryContext;
+import com.mesalab.qgw.service.SQLSyncQueryService;
import com.mesalab.ua.analyser.UaAnalyser;
import com.mesalab.ua.analyser.enums.UserInfo;
import com.mesalab.ua.analyser.impl.UaAnalyserImpl;
@@ -36,15 +36,15 @@ public class CNServiceImpl implements CNService {
private CNParser cnParser;
@Autowired
- private QueryService queryService;
+ private SQLSyncQueryService sqlSyncQueryService;
@Override
public BaseResult shareNetQuery(DSLObject dslObject) throws BusinessException {
dslObject.getQuery().getParameters().setGranularity("user_id,ip");
String executeSql = cnParser.buildShareNetSQL(dslObject);
- QueryProfile queryProfile = new QueryProfile();
- queryProfile.setQuery(executeSql);
- BaseResult baseResult = queryService.executeQuery(queryProfile);
+ SQLQueryContext queryProfile = new SQLQueryContext();
+ queryProfile.setOriginalSQL(executeSql);
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(queryProfile);
if (!baseResult.isSuccess()) {
return baseResult;
}
@@ -68,9 +68,7 @@ public class CNServiceImpl implements CNService {
public BaseResult bannedAppQuery(DSLObject dslObject) throws BusinessException {
dslObject.getQuery().getParameters().setGranularity("user_id,ip");
String executeSql = cnParser.bannedAppSQL(dslObject);
- QueryProfile queryProfile = new QueryProfile();
- queryProfile.setQuery(executeSql);
- BaseResult baseResult = queryService.executeQuery(queryProfile);
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(SQLQueryContext.builder().originalSQL(executeSql).build());
if (!baseResult.isSuccess()) {
return baseResult;
}
diff --git a/src/main/java/com/mesalab/cn/service/impl/EntityDetailServiceImpl.java b/src/main/java/com/mesalab/cn/service/impl/EntityDetailServiceImpl.java
index f3d7e17a..f7f4c3aa 100644
--- a/src/main/java/com/mesalab/cn/service/impl/EntityDetailServiceImpl.java
+++ b/src/main/java/com/mesalab/cn/service/impl/EntityDetailServiceImpl.java
@@ -13,15 +13,14 @@ import com.mesalab.cn.entity.pojo.Condition;
import com.mesalab.cn.entity.pojo.DSLObject;
import com.mesalab.cn.entity.pojo.DSLParser;
import com.mesalab.cn.entity.EntityDetail;
-import com.mesalab.cn.exception.CNErrorCode;
import com.mesalab.cn.exception.CNErrorMessage;
import com.mesalab.cn.service.EntityDetailService;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.common.exception.BusinessException;
import com.mesalab.knowledge.strategy.BaseQueryProvider;
+import com.mesalab.common.exception.CommonErrorCode;
import org.apache.commons.compress.utils.Lists;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@@ -511,29 +510,29 @@ public class EntityDetailServiceImpl implements EntityDetailService {
private void valid(DSLObject.Query.Parameters parameters) {
DSLObject.Query.Parameters.Match match = parameters.getMatch();
if (ObjectUtil.isEmpty(match)) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), CNErrorMessage.PARAMETERS_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.PARAMETERS_IS_INVALID));
}
List<Condition> condition = match.getCondition();
- if (ObjectUtil.isEmpty(condition)) { throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), CNErrorMessage.PARAMETERS_MATCH_CONDITION_IS_INVALID));
+ if (ObjectUtil.isEmpty(condition)) { throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.PARAMETERS_MATCH_CONDITION_IS_INVALID));
}
if (ObjectUtil.isEmpty(condition.get(0).getFieldValues())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), CNErrorMessage.PARAMETERS_MATCH_CONDITION_FIELD_VALUE_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.PARAMETERS_MATCH_CONDITION_FIELD_VALUE_IS_INVALID));
}
if (ObjectUtil.isEmpty(condition.get(0).getFieldKey())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), CNErrorMessage.PARAMETERS_MATCH_CONDITION_FIELD_KEY_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.PARAMETERS_MATCH_CONDITION_FIELD_KEY_IS_INVALID));
}
if (!(Arrays.asList(APP_NAME, DOMAIN, IP, DNS).contains(condition.get(0).getFieldKey()))) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), CNErrorMessage.PARAMETERS_MATCH_CONDITION_FIELD_KEY_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), CNErrorMessage.PARAMETERS_MATCH_CONDITION_FIELD_KEY_IS_INVALID));
}
Integer limit = parameters.getLimit();
if(ObjectUtil.isNotEmpty(limit)&&!(limit>=0&&limit<=DEF_LIMIT)){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), String.format(CNErrorMessage.PARAMETERS_LIMIT_IS_INVALID),DEF_LIMIT));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), String.format(CNErrorMessage.PARAMETERS_LIMIT_IS_INVALID),DEF_LIMIT));
}
}
private boolean existServerIp(String ip){
diff --git a/src/main/java/com/mesalab/common/configuration/HazelcastConfiguration.java b/src/main/java/com/mesalab/common/configuration/HazelcastConfiguration.java
new file mode 100644
index 00000000..29e38c85
--- /dev/null
+++ b/src/main/java/com/mesalab/common/configuration/HazelcastConfiguration.java
@@ -0,0 +1,35 @@
+package com.mesalab.common.configuration;
+
+import com.hazelcast.config.Config;
+import com.hazelcast.config.YamlConfigBuilder;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.core.io.ClassPathResource;
+import org.springframework.core.io.FileSystemResource;
+import org.springframework.core.io.Resource;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URISyntaxException;
+
+/**
+ * TODO
+ *
+ * @Classname HazelcastConfig
+ * @Date 2023/12/9 10:01
+ * @Author wWei
+ */
+@Configuration
+public class HazelcastConfiguration {
+ @Bean
+ public Config hazelcastConfig() throws IOException, URISyntaxException {
+ Resource resource = new FileSystemResource(new File("").getCanonicalPath() + File.separator + "config" + File.separator + "hazelcast.yaml");
+ if (!resource.exists()) {
+ resource = new ClassPathResource("hazelcast.yaml");
+ }
+ InputStream inputStream = resource.getInputStream();
+ YamlConfigBuilder configBuilder = new YamlConfigBuilder(inputStream);
+ return configBuilder.build();
+ }
+}
diff --git a/src/main/java/com/mesalab/common/configuration/HeavyResourceThreadPoolConfiguration.java b/src/main/java/com/mesalab/common/configuration/HeavyResourceThreadPoolConfiguration.java
new file mode 100644
index 00000000..d9a004cf
--- /dev/null
+++ b/src/main/java/com/mesalab/common/configuration/HeavyResourceThreadPoolConfiguration.java
@@ -0,0 +1,44 @@
+package com.mesalab.common.configuration;
+
+
+import com.mesalab.services.configuration.JobConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.scheduling.annotation.EnableAsync;
+import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
+
+import java.util.concurrent.ThreadPoolExecutor;
+
+/**
+ * TODO
+ *
+ * @Classname HeavyResourceThreadPoolConfiguration
+ * @Date 2021/12/1 9:20 上午
+ * @Author wWei
+ */
+@Configuration
+@EnableAsync
+public class HeavyResourceThreadPoolConfiguration {
+ private static final String THREAD_NAME_PREFIX = "Heavy-Resource-ThreadPool-";
+
+ private JobConfig jobCfg;
+
+ @Bean("heavyResourceThreadPool")
+ public ThreadPoolTaskExecutor getHeavyResourceThreadPool() {
+ ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
+ executor.setCorePoolSize(jobCfg.getHeavyResourceCorePoolSize());
+ executor.setMaxPoolSize(jobCfg.getHeavyResourceMaxPoolSize());
+ executor.setQueueCapacity(jobCfg.getHeavyResourceQueueCapacity());
+ executor.setKeepAliveSeconds(10);
+ executor.setThreadNamePrefix(THREAD_NAME_PREFIX);
+ executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
+ executor.initialize();
+ return executor;
+ }
+
+ @Autowired
+ public void setJobConfig(JobConfig jobCfg) {
+ this.jobCfg = jobCfg;
+ }
+} \ No newline at end of file
diff --git a/src/main/java/com/mesalab/common/configuration/LightWeightThreadPoolConfiguration.java b/src/main/java/com/mesalab/common/configuration/LightWeightThreadPoolConfiguration.java
new file mode 100644
index 00000000..95cba49b
--- /dev/null
+++ b/src/main/java/com/mesalab/common/configuration/LightWeightThreadPoolConfiguration.java
@@ -0,0 +1,46 @@
+package com.mesalab.common.configuration;
+
+import com.mesalab.services.configuration.JobConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.scheduling.annotation.EnableAsync;
+import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
+
+import java.util.concurrent.ThreadPoolExecutor;
+
+/**
+ * TODO
+ *
+ * @Classname lightWeightThreadPoolConfiguration
+ * @Date 2021/12/1 9:20 上午
+ * @Author wWei
+ */
+@Configuration
+@EnableAsync
+public class LightWeightThreadPoolConfiguration {
+
+ private JobConfig jobConfig;
+ private static final String THREAD_NAME_PREFIX = "Light-Weight-ThreadPool-";
+
+ @Bean("lightWeightThreadPool")
+ public ThreadPoolTaskExecutor getLightWeightThreadPool() {
+ ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
+ executor.setCorePoolSize(jobConfig.getLightWeightCorePoolSize());
+ executor.setMaxPoolSize(jobConfig.getLightWeightMaxPoolSize());
+ executor.setQueueCapacity(jobConfig.getLightWeightQueueCapacity());
+ executor.setKeepAliveSeconds(10);
+ executor.setThreadNamePrefix(THREAD_NAME_PREFIX);
+
+ // 线程池对拒绝任务的处理策略
+ executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
+ // 初始化
+ executor.initialize();
+ return executor;
+ }
+
+ @Autowired
+ private void setJobConfig(JobConfig jobConfig) {
+ this.jobConfig = jobConfig;
+ }
+} \ No newline at end of file
diff --git a/src/main/java/com/mesalab/services/configuration/TaskThreadPoolCfg.java b/src/main/java/com/mesalab/common/configuration/TaskThreadPoolConfiguration.java
index 8affab52..aa6d3524 100644
--- a/src/main/java/com/mesalab/services/configuration/TaskThreadPoolCfg.java
+++ b/src/main/java/com/mesalab/common/configuration/TaskThreadPoolConfiguration.java
@@ -1,5 +1,7 @@
-package com.mesalab.services.configuration;
+package com.mesalab.common.configuration;
+import com.mesalab.services.configuration.JobConfig;
+import com.mesalab.services.configuration.TaskConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@@ -11,28 +13,31 @@ import java.util.concurrent.ThreadPoolExecutor;
/**
* TODO
*
- * @Classname ThreadPoolCfgSubJob
- * @Date 2021/12/3 9:13 上午
+ * @Classname TaskThreadPoolConfig
+ * @Date 2024/6/26 10:56
* @Author wWei
*/
@Configuration
@EnableAsync
-public class TaskThreadPoolCfg {
- @Autowired
- TaskConfig taskConfig;
- private static final int keepAliveTime = 10;
- private static final String threadNamePrefix = "Async(sub)-Service-";
+public class TaskThreadPoolConfiguration {
+ private TaskConfig taskConfig;
+ private static final String THREAD_NAME_PREFIX = "Task-ThreadPool-";
- @Bean("taskExecutor")
+ @Bean("taskThreadPool")
public ThreadPoolTaskExecutor getAsyncExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(taskConfig.getPoolCorePoolSize());
executor.setMaxPoolSize(taskConfig.getPoolMaxPoolSize());
executor.setQueueCapacity(taskConfig.getPoolQueueCapacity());
- executor.setKeepAliveSeconds(keepAliveTime);
- executor.setThreadNamePrefix(threadNamePrefix);
+ executor.setKeepAliveSeconds(10);
+ executor.setThreadNamePrefix(THREAD_NAME_PREFIX);
executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
executor.initialize();
return executor;
}
+
+ @Autowired
+ private void setTaskConfig(TaskConfig taskConfig) {
+ this.taskConfig = taskConfig;
+ }
}
diff --git a/src/main/java/com/mesalab/common/entity/BaseResult.java b/src/main/java/com/mesalab/common/entity/BaseResult.java
index 6f4cfd20..1cff317b 100644
--- a/src/main/java/com/mesalab/common/entity/BaseResult.java
+++ b/src/main/java/com/mesalab/common/entity/BaseResult.java
@@ -1,6 +1,8 @@
package com.mesalab.common.entity;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
@@ -20,15 +22,17 @@ import java.util.Map;
@Builder
@NoArgsConstructor
@AllArgsConstructor
+@JsonPropertyOrder({"status", "message", "requestId", "success", "code", "job", "output_mode", "statistics", "meta", "data"})
public class BaseResult<T> implements Serializable {
private Integer status;
private String code;
- private String queryKey;
+ @JsonProperty("request_id")
+ private String requestId;
- private boolean success;
+ private Boolean success;
private String message;
@@ -36,23 +40,25 @@ public class BaseResult<T> implements Serializable {
private Map<String, Object> job;
- private String formatType;
+ @JsonProperty("output_mode")
+ private String outputMode;
private T meta;
private T data;
- public BaseResult(Integer status, String code, String queryKey, boolean success, String message, Map<String, Object> statistics, String formatType, T meta, T data) {
+ public BaseResult(Integer status, String code, String requestId, boolean success, String message, Map<String, Object> statistics, String outputMode, T meta, T data) {
this.status = status;
this.code = code;
- this.queryKey = queryKey;
+ this.requestId = requestId;
this.success = success;
this.message = message;
this.statistics = statistics;
- this.formatType = formatType;
+ this.outputMode = outputMode;
this.meta = meta;
this.data = data;
}
+
/**
* 判断是否是成功结果
* JsonIgnore使之不在json序列化结果当中
@@ -60,7 +66,22 @@ public class BaseResult<T> implements Serializable {
* @return 是否为成功结果
*/
public boolean isSuccess() {
- return this.status >= ResultStatusEnum.SUCCESS.getCode() && this.status < 300;
+ return this.status != null && this.status >= HttpStatusCodeEnum.SUCCESS.getCode() && this.status < 300;
}
+ @Override
+ public String toString() {
+ return "BaseResult{" +
+ "status=" + status +
+ ", code='" + code + '\'' +
+ ", requestId='" + requestId + '\'' +
+ ", success=" + success +
+ ", message='" + message + '\'' +
+ ", statistics=" + statistics +
+ ", job=" + job +
+ ", output_mode='" + outputMode + '\'' +
+ ", meta=" + meta +
+ ", data=" + data +
+ '}';
+ }
} \ No newline at end of file
diff --git a/src/main/java/com/mesalab/common/entity/BaseResultGenerator.java b/src/main/java/com/mesalab/common/entity/BaseResultGenerator.java
index 12caae20..ede4c628 100644
--- a/src/main/java/com/mesalab/common/entity/BaseResultGenerator.java
+++ b/src/main/java/com/mesalab/common/entity/BaseResultGenerator.java
@@ -2,9 +2,9 @@ package com.mesalab.common.entity;
import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.common.exception.BusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
+import com.mesalab.common.exception.CommonErrorCode;
import java.util.Map;
@@ -21,14 +21,15 @@ public class BaseResultGenerator {
*
* @param status 返回HTTP响应码
* @param code 返回业务编码
+ * @param success 是否成功
* @param message 返回消息
* @param data 返回数据
- * @param formatType 返回数据格式 JSON/CSV
+ * @param outputMode 返回数据格式 json/jsonWithMetadata/csv
* @param <T> 返回数据类型
* @return 返回结果
*/
- public static <T> BaseResult<T> generate(final int status, final String code, final String message, T data, T meta, final Map<String, Object> statistics, final String formatType) {
- return new BaseResult<>(status, code,null, false, message, statistics, formatType, meta, data);
+ public static <T> BaseResult<T> generate(final int status, final String code, final boolean success, final String message, T data, T meta, final Map<String, Object> statistics, final String outputMode) {
+ return new BaseResult<>(status, code,null, success, message, statistics, outputMode, meta, data);
}
/**
@@ -37,7 +38,7 @@ public class BaseResultGenerator {
* @return 操作成功的默认响应结果
*/
public static <T> BaseResult<T> success() {
- return new BaseResult<>(ResultStatusEnum.SUCCESS.getCode(), ResultCodeEnum.SUCCESS.getCode(),
+ return new BaseResult<>(HttpStatusCodeEnum.SUCCESS.getCode(), ResultCodeEnum.SUCCESS.getCode(),
null, true, ResultCodeEnum.SUCCESS.getMessage(), null, null, null, null);
}
@@ -50,7 +51,7 @@ public class BaseResultGenerator {
* @return 响应结果
*/
public static <T> BaseResult<T> success(final String message, final T data) {
- return new BaseResult<>(ResultStatusEnum.SUCCESS.getCode(), ResultCodeEnum.SUCCESS.getCode(),
+ return new BaseResult<>(HttpStatusCodeEnum.SUCCESS.getCode(), ResultCodeEnum.SUCCESS.getCode(),
null, true, message, null, null, null, data);
}
@@ -78,7 +79,7 @@ public class BaseResultGenerator {
* @return 响应结果
*/
public static <T> BaseResult<T> success(final String message, final T data, final Map<String, Object> statistics) {
- return new BaseResult<>(ResultStatusEnum.SUCCESS.getCode(), ResultCodeEnum.SUCCESS.getCode(),
+ return new BaseResult<>(HttpStatusCodeEnum.SUCCESS.getCode(), ResultCodeEnum.SUCCESS.getCode(),
null, true, message, statistics, null, null, data);
}
@@ -90,7 +91,7 @@ public class BaseResultGenerator {
* @return 响应结果
*/
public static <T> BaseResult<T> success(final T data) {
- return new BaseResult<>(ResultStatusEnum.SUCCESS.getCode(), ResultCodeEnum.SUCCESS.getCode(), null, true,
+ return new BaseResult<>(HttpStatusCodeEnum.SUCCESS.getCode(), ResultCodeEnum.SUCCESS.getCode(), null, true,
ResultCodeEnum.SUCCESS.getMessage(), null, null, null, data);
}
@@ -99,14 +100,16 @@ public class BaseResultGenerator {
*
* @param statistics
* @param job
+ * @param outputMode
+ * @param meta
* @param data
* @return {@link BaseResult<T>}
* @created by wWei
* @date 2021/11/26 11:46 上午
*/
- public static <T> BaseResult<T> success(final Map<String, Object> statistics, final Map<String, Object> job, final T meta, final T data) {
- return new BaseResult<>(ResultStatusEnum.SUCCESS.getCode(), ResultCodeEnum.SUCCESS.getCode(), null, true,
- ResultCodeEnum.SUCCESS.getMessage(), statistics, job, null, meta, data);
+ public static <T> BaseResult<T> success(final Map<String, Object> statistics, final Map<String, Object> job,final String outputMode, final T meta, final T data) {
+ return new BaseResult<>(HttpStatusCodeEnum.SUCCESS.getCode(), ResultCodeEnum.SUCCESS.getCode(), null, true,
+ ResultCodeEnum.SUCCESS.getMessage(), statistics, job, outputMode, meta, data);
}
/**
@@ -118,8 +121,8 @@ public class BaseResultGenerator {
* @date 2023/11/29 11:46 上午
*/
public static <T> BaseResult<T> successCreate(final Map<String, Object> job) {
- return new BaseResult<>(ResultStatusEnum.CREATED.getCode(), ResultCodeEnum.SUCCESS.getCode(), null, true,
- ResultCodeEnum.SUCCESS.getMessage(), null, job, null, null, null);
+ return new BaseResult<>(HttpStatusCodeEnum.CREATED.getCode(), ResultCodeEnum.SUCCESS.getCode(), null, true,
+ HttpStatusCodeEnum.CREATED.getMessage(), null, job, null, null, null);
}
@@ -130,7 +133,7 @@ public class BaseResultGenerator {
* @return 响应结果
*/
public static <T> BaseResult<T> success4Message(final String message) {
- return new BaseResult<>(ResultStatusEnum.SUCCESS.getCode(), ResultCodeEnum.SUCCESS.getCode(),
+ return new BaseResult<>(HttpStatusCodeEnum.SUCCESS.getCode(), ResultCodeEnum.SUCCESS.getCode(),
null, true, message, null, null, null, null);
}
@@ -140,8 +143,8 @@ public class BaseResultGenerator {
* @return 操作成功的默认响应结果
*/
public static <T> BaseResult<T> failure() {
- return new BaseResult<>(ResultStatusEnum.BAD_REQUEST.getCode(),
- ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), null, false, ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), null, null, null, null);
+ return new BaseResult<>(HttpStatusCodeEnum.BAD_REQUEST.getCode(),
+ CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), null, false, CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), null, null, null, null);
}
/**
@@ -152,7 +155,7 @@ public class BaseResultGenerator {
* @return 响应结果
*/
public static <T> BaseResult<T> failure(final int status, final String message) {
- return new BaseResult<>(status, ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), null, false, message, null, null, null, null);
+ return new BaseResult<>(status, CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), null, false, message, null, null, null, null);
}
/**
@@ -187,7 +190,7 @@ public class BaseResultGenerator {
* @return 响应结果
*/
public static <T> BaseResult<T> failure(final int status, final String message, T data) {
- return new BaseResult<>(status, ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), null, false, message, null, null, null, data);
+ return new BaseResult<>(status, CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), null, false, message, null, null, null, data);
}
/**
@@ -196,8 +199,8 @@ public class BaseResultGenerator {
* @param resultStatusEnum 自定义错误编码枚举
* @return 响应结果
*/
- public static <T> BaseResult<T> failure(final ResultStatusEnum resultStatusEnum) {
- return new BaseResult<>(resultStatusEnum.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), null, false, resultStatusEnum.getMessage(), null, null, null, null);
+ public static <T> BaseResult<T> failure(final HttpStatusCodeEnum resultStatusEnum) {
+ return new BaseResult<>(resultStatusEnum.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), null, false, resultStatusEnum.getMessage(), null, null, null, null);
}
@@ -210,7 +213,7 @@ public class BaseResultGenerator {
*/
public static <T> BaseResult<T> failure(final String message, final String queryKey) {
- return new BaseResult<>(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), queryKey, false, message, null, null, null, null);
+ return new BaseResult<>(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), queryKey, false, message, null, null, null, null);
}
@@ -220,7 +223,7 @@ public class BaseResultGenerator {
* @return 操作成功的默认响应结果
*/
public static <T> BaseResult<T> error() {
- return new BaseResult<>(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), null, false, ResultStatusEnum.SERVER_ERROR.getMessage(), null, null, null, null);
+ return new BaseResult<>(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), null, false, HttpStatusCodeEnum.SERVER_ERROR.getMessage(), null, null, null, null);
}
/**
@@ -231,7 +234,7 @@ public class BaseResultGenerator {
* @return 响应结果
*/
public static <T> BaseResult<T> error(final int code, final String message) {
- return new BaseResult<>(code, ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), null, false, message, null, null, null, null);
+ return new BaseResult<>(code, CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), null, false, message, null, null, null, null);
}
/**
@@ -251,8 +254,8 @@ public class BaseResultGenerator {
* @param resultStatusEnum 自定义错误编码枚举
* @return 响应结果
*/
- public static <T> BaseResult<T> error(final ResultStatusEnum resultStatusEnum) {
- return new BaseResult<>(resultStatusEnum.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), null, false, resultStatusEnum.getMessage(), null, null, null, null);
+ public static <T> BaseResult<T> error(final HttpStatusCodeEnum resultStatusEnum) {
+ return new BaseResult<>(resultStatusEnum.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), null, false, resultStatusEnum.getMessage(), null, null, null, null);
}
/**
@@ -262,7 +265,7 @@ public class BaseResultGenerator {
* @return 响应结果
*/
public static <T> BaseResult<T> error(final BusinessException be) {
- return new BaseResult<>(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), null, false, be.getMessage(), null, null, null, null);
+ return new BaseResult<>(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), null, false, be.getMessage(), null, null, null, null);
}
/**
@@ -272,7 +275,7 @@ public class BaseResultGenerator {
* @return 响应结果
*/
public static <T> BaseResult<T> error(final String message) {
- return new BaseResult<>(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), null, false, message, null, null, null, null);
+ return new BaseResult<>(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), null, false, message, null, null, null, null);
}
}
diff --git a/src/main/java/com/mesalab/common/entity/DataTypeMapping.java b/src/main/java/com/mesalab/common/entity/DataTypeMapping.java
index fea187cf..af501983 100644
--- a/src/main/java/com/mesalab/common/entity/DataTypeMapping.java
+++ b/src/main/java/com/mesalab/common/entity/DataTypeMapping.java
@@ -16,4 +16,5 @@ public class DataTypeMapping {
public static final String DATE = "date";
public static final String TIMESTAMP = "timestamp";
public static final String BOOLEAN = "boolean";
+ public static final String ARRAY = "array";
}
diff --git a/src/main/java/com/mesalab/common/enums/BooleanEnum.java b/src/main/java/com/mesalab/common/enums/BooleanOption.java
index bc5ee496..c4ad53e2 100644
--- a/src/main/java/com/mesalab/common/enums/BooleanEnum.java
+++ b/src/main/java/com/mesalab/common/enums/BooleanOption.java
@@ -9,7 +9,7 @@ import lombok.Getter;
* @date 2018/5/7
*/
@Getter
-public enum BooleanEnum {
+public enum BooleanOption {
/**
* YES: 1
@@ -23,7 +23,7 @@ public enum BooleanEnum {
private int value;
- BooleanEnum(int value) {
+ BooleanOption(int value) {
this.value = value;
}
diff --git a/src/main/java/com/mesalab/common/enums/ConsistencyOption.java b/src/main/java/com/mesalab/common/enums/ConsistencyOption.java
new file mode 100644
index 00000000..3d162ddc
--- /dev/null
+++ b/src/main/java/com/mesalab/common/enums/ConsistencyOption.java
@@ -0,0 +1,13 @@
+package com.mesalab.common.enums;
+
+import lombok.Getter;
+
+@Getter
+public enum ConsistencyOption {
+
+ TTL_CONSISTENCY("ttl_consistency"),
+ SCHEMA_CONSISTENCY("schema_consistency"),
+ DATASET_VERIFICATION("dataset_verification");
+ private String value;
+ ConsistencyOption(String value){this.value = value;}
+}
diff --git a/src/main/java/com/mesalab/common/enums/CookieEnum.java b/src/main/java/com/mesalab/common/enums/CookieOption.java
index 898369b1..0d3b9c4f 100644
--- a/src/main/java/com/mesalab/common/enums/CookieEnum.java
+++ b/src/main/java/com/mesalab/common/enums/CookieOption.java
@@ -9,7 +9,7 @@ import lombok.Getter;
* @date 2018/5/26
*/
@Getter
-public enum CookieEnum {
+public enum CookieOption {
/**
* REMEMBER_ME: Cookie中存储的REMEMBER_ME
*/
@@ -17,7 +17,7 @@ public enum CookieEnum {
private String value;
- CookieEnum(String value) {
+ CookieOption(String value) {
this.value = value;
}
}
diff --git a/src/main/java/com/mesalab/common/enums/DBTypeEnum.java b/src/main/java/com/mesalab/common/enums/DBEngineType.java
index e4ac8e3f..528e5144 100644
--- a/src/main/java/com/mesalab/common/enums/DBTypeEnum.java
+++ b/src/main/java/com/mesalab/common/enums/DBEngineType.java
@@ -3,12 +3,12 @@ package com.mesalab.common.enums;
import lombok.Getter;
@Getter
-public enum DBTypeEnum {
- ENGINE("qgw"),
+public enum DBEngineType {
+ QGW("qgw"),
CLICKHOUSE("clickHouse"),
DRUID("druid"),
HBASE("hbase"),
ARANGODB("arangoDB");
private String value;
- DBTypeEnum(String value) {this.value = value;}
+ DBEngineType(String value) {this.value = value;}
}
diff --git a/src/main/java/com/mesalab/common/enums/DiagnosisOptionEnum.java b/src/main/java/com/mesalab/common/enums/DiagnosisOption.java
index 2bf22760..ccd8c191 100644
--- a/src/main/java/com/mesalab/common/enums/DiagnosisOptionEnum.java
+++ b/src/main/java/com/mesalab/common/enums/DiagnosisOption.java
@@ -10,7 +10,7 @@ import lombok.Getter;
* @Author wWei
*/
@Getter
-public enum DiagnosisOptionEnum {
+public enum DiagnosisOption {
/**
* 解析SQL测试集,用于SQL语法解析
*/
@@ -26,7 +26,7 @@ public enum DiagnosisOptionEnum {
private String value;
- DiagnosisOptionEnum(String value) {
+ DiagnosisOption(String value) {
this.value = value;
}
diff --git a/src/main/java/com/mesalab/common/enums/EnvironmentEnum.java b/src/main/java/com/mesalab/common/enums/Environment.java
index 421fb63e..e81657cd 100644
--- a/src/main/java/com/mesalab/common/enums/EnvironmentEnum.java
+++ b/src/main/java/com/mesalab/common/enums/Environment.java
@@ -9,7 +9,7 @@ import lombok.Getter;
* @date 2018/5/26
*/
@Getter
-public enum EnvironmentEnum {
+public enum Environment {
/**
* 开发环境
@@ -22,7 +22,7 @@ public enum EnvironmentEnum {
private String name;
- EnvironmentEnum(String name) {
+ Environment(String name) {
this.name = name;
}
diff --git a/src/main/java/com/mesalab/common/enums/EnvironmentGroupEnum.java b/src/main/java/com/mesalab/common/enums/EnvironmentGroupEnum.java
index 5e433e05..b4d1540d 100644
--- a/src/main/java/com/mesalab/common/enums/EnvironmentGroupEnum.java
+++ b/src/main/java/com/mesalab/common/enums/EnvironmentGroupEnum.java
@@ -16,14 +16,14 @@ public enum EnvironmentGroupEnum {
* 1. DEV(开发环境)
* 2. PROD(生产环境)
*/
- RUNTIME(new EnvironmentEnum[]{EnvironmentEnum.DEV, EnvironmentEnum.PROD}),;
+ RUNTIME(new Environment[]{Environment.DEV, Environment.PROD}),;
/**
* 运行环境
*/
- private EnvironmentEnum[] environments;
+ private Environment[] environments;
- EnvironmentGroupEnum(EnvironmentEnum[] environments) {
+ EnvironmentGroupEnum(Environment[] environments) {
this.environments = environments;
}
@@ -34,8 +34,8 @@ public enum EnvironmentGroupEnum {
* @return boolean
*/
public static boolean isRuntime(String s) {
- EnvironmentEnum[] environmentEnums = RUNTIME.getEnvironments();
- for (EnvironmentEnum environmentEnum : environmentEnums) {
+ Environment[] environmentEnums = RUNTIME.getEnvironments();
+ for (Environment environmentEnum : environmentEnums) {
if (environmentEnum.getName().equals(s)) {
return true;
}
diff --git a/src/main/java/com/mesalab/common/enums/ExecutionMode.java b/src/main/java/com/mesalab/common/enums/ExecutionMode.java
new file mode 100644
index 00000000..5aebdcc7
--- /dev/null
+++ b/src/main/java/com/mesalab/common/enums/ExecutionMode.java
@@ -0,0 +1,32 @@
+package com.mesalab.common.enums;
+
+/**
+ * Create Query Job Execution Mode
+ *
+ * @Classname ExecutionMode
+ * @Date 2023/12/6 17:06
+ * @Author wWei
+ */
+public enum ExecutionMode {
+ /**
+ * Desc: run an asynchronous query
+ */
+ NORMAL("normal"),
+ /**
+ * Desc: return the Job ID when the job is complete (sync query)
+ */
+ BLOCKING("blocking"),
+ /**
+ * Desc: returns results in the same call(sync query). Does not return the Job ID
+ */
+ ONESHOT("oneshot");
+ private final String value;
+
+ ExecutionMode(String value) {
+ this.value = value.toLowerCase();
+ }
+
+ public String getValue() {
+ return value;
+ }
+}
diff --git a/src/main/java/com/mesalab/common/enums/FileCategory.java b/src/main/java/com/mesalab/common/enums/FileCategory.java
new file mode 100644
index 00000000..2042e6ab
--- /dev/null
+++ b/src/main/java/com/mesalab/common/enums/FileCategory.java
@@ -0,0 +1,15 @@
+package com.mesalab.common.enums;
+
+public enum FileCategory {
+ APP("APP"),
+ IP("IP"),
+ DOMAIN("DOMAIN");
+ private String value;
+ FileCategory(String value){
+ this.value = value;
+ }
+
+ public String getValue() {
+ return value;
+ }
+}
diff --git a/src/main/java/com/mesalab/common/enums/FileFormat.java b/src/main/java/com/mesalab/common/enums/FileFormat.java
new file mode 100644
index 00000000..6524415f
--- /dev/null
+++ b/src/main/java/com/mesalab/common/enums/FileFormat.java
@@ -0,0 +1,16 @@
+package com.mesalab.common.enums;
+
+public enum FileFormat {
+ MMDB("MMDB"),
+ CSV("CSV"),
+ TXT("TXT"),
+ AES("AES");
+ private String value;
+ FileFormat(String value){
+ this.value = value;
+ }
+
+ public String getValue() {
+ return value;
+ }
+}
diff --git a/src/main/java/com/mesalab/common/enums/ResultStatusEnum.java b/src/main/java/com/mesalab/common/enums/HttpStatusCodeEnum.java
index 7bf032fc..40b0ce40 100644
--- a/src/main/java/com/mesalab/common/enums/ResultStatusEnum.java
+++ b/src/main/java/com/mesalab/common/enums/HttpStatusCodeEnum.java
@@ -9,7 +9,7 @@ import lombok.Getter;
* @date 2018/3/21
*/
@Getter
-public enum ResultStatusEnum {
+public enum HttpStatusCodeEnum {
/**
* SUCCESS: 200 成功
* BAD_REQUEST: 400 服务端无法理解该请求
@@ -17,12 +17,15 @@ public enum ResultStatusEnum {
* SERVER_ERROR: 500 网络服务异常
*/
SUCCESS(200, "Success"),
- CREATED(201, "Create"),
- ACCEPTED(202, "Accept"),
+ CREATED(201, "Created"),
+ ACCEPTED(202, "Accepted"),
+ NO_CONTENT(204, "No Content"),
+ PARTIAL_CONTENT(206, "Partial Content"),
BAD_REQUEST(400, "Bad request"),
NOT_FOUND(404, "Not found"),
REQUEST_URI_TOO_LONG(414, "Request URI too long"),
REQ_FORBIDDEN(403, "Repeat request"),
+ REQ_CONFLICT(409, "Conflict"),
LOCKED(423,"locked"),
BAD_GATEWAY(502, "Bad gateway"),
SERVICE_UNAVAILABLE (503,"Service unavailable"),
@@ -33,7 +36,7 @@ public enum ResultStatusEnum {
private String message;
- ResultStatusEnum(int code, String message) {
+ HttpStatusCodeEnum(int code, String message) {
this.code = code;
}
diff --git a/src/main/java/com/mesalab/common/enums/JobHandlerEnum.java b/src/main/java/com/mesalab/common/enums/JobHandlerOption.java
index 1ef1d055..ab059723 100644
--- a/src/main/java/com/mesalab/common/enums/JobHandlerEnum.java
+++ b/src/main/java/com/mesalab/common/enums/JobHandlerOption.java
@@ -4,7 +4,7 @@ import lombok.Getter;
@Getter
-public enum JobHandlerEnum {
+public enum JobHandlerOption {
DELETE_TRAFFIC_DATA_JOB_HANDLER("deleteTrafficDataJobHandler"),
DELETE_REPORT_AND_METRICS_DATA_JOB_HANDLER("deleteReportAndMetricsDataJobHandler"),
@@ -12,10 +12,11 @@ public enum JobHandlerEnum {
DELETE_ALL_TRAFFIC_DATA_JOB_HANDLER("deleteAllTrafficDataJobHandler"),
DELETE_ALL_REPORT_AND_METRICS_DATA_JOB_HANDLER("deleteAllReportAndMetricsDataJobHandler"),
DELETE_ALL_FILES_JOB_HANDLER("deleteAllFilesJobHandler"),
+ DELETE_OLD_LOG_JOB_HANDLER("deleteOldLogJobHandler"),
;
private String value;
- JobHandlerEnum(String value) {
+ JobHandlerOption(String value) {
this.value = value;
}
}
diff --git a/src/main/java/com/mesalab/common/enums/MetadataTypeEnum.java b/src/main/java/com/mesalab/common/enums/MetadataType.java
index fe626f1d..ac3a1250 100644
--- a/src/main/java/com/mesalab/common/enums/MetadataTypeEnum.java
+++ b/src/main/java/com/mesalab/common/enums/MetadataType.java
@@ -9,7 +9,7 @@ import lombok.Getter;
* @Description : 元数据类型枚举
*/
@Getter
-public enum MetadataTypeEnum {
+public enum MetadataType {
TABLES("tables"),
@@ -18,7 +18,7 @@ public enum MetadataTypeEnum {
private String value;
- MetadataTypeEnum(String value) {
+ MetadataType(String value) {
this.value = value;
}
}
diff --git a/src/main/java/com/mesalab/common/enums/OutputMode.java b/src/main/java/com/mesalab/common/enums/OutputMode.java
new file mode 100644
index 00000000..d048845c
--- /dev/null
+++ b/src/main/java/com/mesalab/common/enums/OutputMode.java
@@ -0,0 +1,36 @@
+package com.mesalab.common.enums;
+
+import lombok.Getter;
+
+/**
+ * Create Query Job Execution Mode
+ *
+ * @Classname ExecutionMode
+ * @Date 2023/12/6 17:06
+ * @Author wWei
+ */
+@Getter
+public enum OutputMode {
+
+ /**
+ * Desc: Output data in JSON format, But no meta.
+ */
+ JSON("json"),
+ /**
+ * Desc: Output data in CSV format.
+ */
+ CSV("csv"),
+ /**
+ * Desc: Output data in JSON format. It also contains some meta and statistics.
+ */
+ JSON_WITH_METADATA("jsonWithMetadata"),
+ /**
+ * Desc: Output data in JSON format, But no key. It also contains some meta and statistics.
+ */
+ JSON_COMPACT("jsonCompact");
+ private final String value;
+
+ OutputMode(String value) {
+ this.value = value;
+ }
+}
diff --git a/src/main/java/com/mesalab/common/enums/QueryFormatEnum.java b/src/main/java/com/mesalab/common/enums/QueryFormatEnum.java
deleted file mode 100644
index 741139ca..00000000
--- a/src/main/java/com/mesalab/common/enums/QueryFormatEnum.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package com.mesalab.common.enums;
-
-import lombok.Getter;
-
-@Getter
-public enum QueryFormatEnum {
- JSON("json"),
- CSV("csv"),;
- private String value;
- QueryFormatEnum(String value) {this.value = value;}
-}
diff --git a/src/main/java/com/mesalab/common/enums/QueryOptionEnum.java b/src/main/java/com/mesalab/common/enums/QueryOption.java
index b10ff5a5..c8a4bd8f 100644
--- a/src/main/java/com/mesalab/common/enums/QueryOptionEnum.java
+++ b/src/main/java/com/mesalab/common/enums/QueryOption.java
@@ -3,12 +3,12 @@ package com.mesalab.common.enums;
import lombok.Getter;
@Getter
-public enum QueryOptionEnum {
+public enum QueryOption {
REAL_TIME("real_time"),
LONG_TERM("long_term"),
SYNTAX_VALIDATION("syntax_validation"),
SYNTAX_PARSE("syntax_parse");
private String value;
- QueryOptionEnum(String value) {this.value = value;}
+ QueryOption(String value) {this.value = value;}
}
diff --git a/src/main/java/com/mesalab/common/enums/QueryParamEnum.java b/src/main/java/com/mesalab/common/enums/QueryParam.java
index b9718eb5..868c7fae 100644
--- a/src/main/java/com/mesalab/common/enums/QueryParamEnum.java
+++ b/src/main/java/com/mesalab/common/enums/QueryParam.java
@@ -3,10 +3,10 @@ package com.mesalab.common.enums;
import lombok.Getter;
@Getter
-public enum QueryParamEnum {
+public enum QueryParam {
QUERY("query"),
FORMAT("format"),
OPTION("option"),;
private String value;
- QueryParamEnum(String value) {this.value = value;}
+ QueryParam(String value) {this.value = value;}
}
diff --git a/src/main/java/com/mesalab/common/enums/ResultCodeEnum.java b/src/main/java/com/mesalab/common/enums/ResultCodeEnum.java
index 8b0ac317..868aaa6f 100644
--- a/src/main/java/com/mesalab/common/enums/ResultCodeEnum.java
+++ b/src/main/java/com/mesalab/common/enums/ResultCodeEnum.java
@@ -10,13 +10,7 @@ import lombok.Getter;
*/
@Getter
public enum ResultCodeEnum {
-
-
- SUCCESS("20000666", "Success"),
- // 请求参数校验错误
- PARAMETER_ERROR("40000100", "Bad request - Invalid query parameters :%s"),
- // 未知异常
- UNKNOWN_EXCEPTION("50000999", "An unknown error occurred: %s");
+ SUCCESS("20000666", "Success");
private String code;
private String message;
diff --git a/src/main/java/com/mesalab/common/enums/SessionEnum.java b/src/main/java/com/mesalab/common/enums/SessionOption.java
index e475ddc8..efd3b9a8 100644
--- a/src/main/java/com/mesalab/common/enums/SessionEnum.java
+++ b/src/main/java/com/mesalab/common/enums/SessionOption.java
@@ -9,7 +9,7 @@ import lombok.Getter;
* @date 2018/5/10
*/
@Getter
-public enum SessionEnum {
+public enum SessionOption {
/**
* CURRENT_USER: session中存储的当前用户
@@ -18,7 +18,7 @@ public enum SessionEnum {
private String value;
- SessionEnum(String value) {
+ SessionOption(String value) {
this.value = value;
}
}
diff --git a/src/main/java/com/mesalab/common/exception/BusinessException.java b/src/main/java/com/mesalab/common/exception/BusinessException.java
index 2c9ce537..b8f40c74 100644
--- a/src/main/java/com/mesalab/common/exception/BusinessException.java
+++ b/src/main/java/com/mesalab/common/exception/BusinessException.java
@@ -1,8 +1,6 @@
package com.mesalab.common.exception;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.qgw.exception.QGWErrorCode;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import lombok.*;
/**
@@ -21,12 +19,12 @@ public class BusinessException extends RuntimeException {
/**
* 状态码
*/
- private int status = ResultStatusEnum.SERVER_ERROR.getCode();
+ private int status = HttpStatusCodeEnum.SERVER_ERROR.getCode();
/**
* 业务码
*/
- private String code = ResultCodeEnum.UNKNOWN_EXCEPTION.getCode();
+ private String code = CommonErrorCode.UNKNOWN_EXCEPTION.getCode();
/**
diff --git a/src/main/java/com/mesalab/common/exception/CommonErrorCode.java b/src/main/java/com/mesalab/common/exception/CommonErrorCode.java
new file mode 100644
index 00000000..c8163573
--- /dev/null
+++ b/src/main/java/com/mesalab/common/exception/CommonErrorCode.java
@@ -0,0 +1,37 @@
+package com.mesalab.common.exception;
+
+import lombok.Getter;
+
+/**
+ * @Author wxs
+ * @Date 2022/11/3
+ */
+@Getter
+public enum CommonErrorCode {
+
+ /**
+ * 1-3位:异常类型(HTTP协议状态码)
+ * 3-4位:模块 01
+ * 5-7:自然排序
+ */
+ PARAMETER_ERROR("40000100", "Bad request - Invalid query parameters :%s"),
+ BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION("40001300", "Error in SQL syntax parsing: %s"),
+ BAD_REQUEST_SQL_EXECUTION_EXCEPTION("40001301", "Error in database execution engine: %s"),
+ BAD_REQUEST_PARAM_SYNTAX_EXCEPTION("40001302", " Invalid Request Parameter: %s"),
+
+ SCHEMA_AND_TABLE_NOT_CONSISTENCY("50001010", "Schema and table structure are not consistent: %s"),
+ HTTP_REQUEST_EXCEPTION("50001011", "Error in HTTP request : %s"),
+ SQL_REWRITE_AND_TRANSFORMATION_EXCEPTION("50001100", "Error in SQL rewrite and transformation: %s"),
+ SQL_EXECUTION_EXCEPTION("50001300", "Error in SQL execution: %s"),
+ SQL_FEDERATION_QUERY_EXCEPTION("50001500", "Error in SQL federation query: %s"),
+
+ UNKNOWN_EXCEPTION("50000999", "An unknown error occurred: %s");
+
+ private String code;
+ private String message;
+
+ CommonErrorCode(String code, String message) {
+ this.code = code;
+ this.message = message;
+ }
+}
diff --git a/src/main/java/com/mesalab/common/nacos/NacosConst.java b/src/main/java/com/mesalab/common/nacos/NacosConst.java
index a4efb2e1..87bc919f 100644
--- a/src/main/java/com/mesalab/common/nacos/NacosConst.java
+++ b/src/main/java/com/mesalab/common/nacos/NacosConst.java
@@ -36,7 +36,7 @@ public class NacosConst {
public static final String META_DATA_ID = "meta_data.json";
- public static final String SQL_DATASETS_VARIABLES = "sql_datasets_variables.json";
+ public static final String DATASETS_VARIABLES = "datasets_variables.json";
public static final String CK_FILTER_DATA_ID = "ck-filter.json";
diff --git a/src/main/java/com/mesalab/common/utils/HazelcastInstanceMapUtil.java b/src/main/java/com/mesalab/common/utils/HazelcastInstanceMapUtil.java
new file mode 100644
index 00000000..28c314ae
--- /dev/null
+++ b/src/main/java/com/mesalab/common/utils/HazelcastInstanceMapUtil.java
@@ -0,0 +1,49 @@
+package com.mesalab.common.utils;
+
+import com.alibaba.fastjson2.JSON;
+import com.alibaba.fastjson2.JSONWriter;
+import com.hazelcast.core.HazelcastInstance;
+import com.hazelcast.map.IMap;
+import com.mesalab.qgw.model.basic.QueryCache;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Component;
+
+/**
+ * TODO
+ *
+ * @Classname HazelcastInstanceMapUtil
+ * @Date 2023/12/5 15:13
+ * @Author wWei
+ */
+@Component
+public class HazelcastInstanceMapUtil {
+
+ private static HazelcastInstance hazelcastInstance;
+ private static final String AD_HOC_CACHE = "adHocCache";
+
+ @Autowired
+ public HazelcastInstanceMapUtil(@Qualifier("hazelcastInstance") HazelcastInstance hazelcastInstance) {
+ HazelcastInstanceMapUtil.hazelcastInstance = hazelcastInstance;
+ }
+
+ public static IMap<String, String> retrieveMap() {
+ return hazelcastInstance.getMap(AD_HOC_CACHE);
+ }
+
+ public static QueryCache put(String key, QueryCache value) {
+ String vStr = JSON.toJSONString(value, JSONWriter.Feature.WriteNulls, JSONWriter.Feature.LargeObject);
+ String old = retrieveMap().put(key, vStr);
+ return old == null ? null : JSON.parseObject(old, QueryCache.class);
+ }
+
+ public static QueryCache get(String key) {
+ String v = retrieveMap().get(key);
+ return v == null ? null : JSON.parseObject(v, QueryCache.class);
+ }
+
+ public static QueryCache remove(String key) {
+ String d = retrieveMap().remove(key);
+ return d == null ? null : JSON.parseObject(d, QueryCache.class);
+ }
+}
diff --git a/src/main/java/com/mesalab/common/utils/JsonMapper.java b/src/main/java/com/mesalab/common/utils/JsonMapper.java
index 66b1c39d..896761bf 100644
--- a/src/main/java/com/mesalab/common/utils/JsonMapper.java
+++ b/src/main/java/com/mesalab/common/utils/JsonMapper.java
@@ -14,8 +14,6 @@ import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.databind.util.JSONPObject;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.lang3.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.text.SimpleDateFormat;
@@ -26,223 +24,224 @@ import java.util.TimeZone;
/**
* 简单封装Jackson,实现JSON String<->Java Object的Mapper. 封装不同的输出风格,
* 使用不同的builder函数创建实例.
- *
+ *
* @author
- * @version
*/
public class JsonMapper extends ObjectMapper {
- private static final long serialVersionUID = 1L;
- private static final Log log = LogFactory.get();
-
- private static JsonMapper mapper;
-
- public JsonMapper() {
- this(Include.ALWAYS);
- }
-
- public JsonMapper(Include include) {
- // 设置输出时包含属性的风格
- if (include != null) {
- this.setSerializationInclusion(include);
- }
- // 允许单引号、允许不带引号的字段名称
- this.enableSimple();
- // 设置输入时忽略在JSON字符串中存在但Java对象实际没有的属性
- this.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);
- // 空值
- /*
- * this.getSerializerProvider().setNullValueSerializer(new
- * JsonSerializer<Object>(){
- *
- * @Override public void serialize(Object value, JsonGenerator jgen,
- * SerializerProvider provider) throws IOException,
- * JsonProcessingException { jgen.writeString(""); } });
- */
- this.setDateFormat(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"));
- // 进行HTML解码。
- this.registerModule(new SimpleModule().addSerializer(String.class, new JsonSerializer<String>() {
- @Override
- public void serialize(String value, JsonGenerator jgen, SerializerProvider provider)
- throws IOException {
- jgen.writeString(StringEscapeUtils.unescapeHtml4(value));
- }
- }));
- // 设置时区
- this.setTimeZone(TimeZone.getDefault());// getTimeZone("GMT+8:00")
- }
-
- /**
- * 创建只输出非Null且非Empty(如List.isEmpty)的属性到Json字符串的Mapper,建议在外部接口中使用.
- */
- public static JsonMapper getInstance() {
- if (mapper == null) {
- mapper = new JsonMapper();// .enableSimple();只输出非Null且非Empty
- }
- return mapper;
- }
-
- /**
- * 创建只输出初始值被改变的属性到Json字符串的Mapper, 最节约的存储方式,建议在内部接口中使用。
- */
- public static JsonMapper nonDefaultMapper() {
- if (mapper == null) {
- mapper = new JsonMapper(Include.NON_DEFAULT);
- }
- return mapper;
- }
-
- /**
- * Object可以是POJO,也可以是Collection或数组。 如果对象为Null, 返回"null". 如果集合为空集合, 返回"[]".
- */
- public String toJson(Object object) {
- try {
- return this.writeValueAsString(object);
- } catch (IOException e) {
- log.error("write to json string error:" , e);
- return null;
- }
- }
-
- /**
- * 反序列化POJO或简单Collection如List<String>.
- *
- * 如果JSON字符串为Null或"null"字符串, 返回Null. 如果JSON字符串为"[]", 返回空集合.
- *
- * 如需反序列化复杂Collection如List<MyBean>, 请使用fromJson(String,JavaType)
- *
- * @see #fromJson(String, JavaType)
- */
- public <T> T fromJson(String jsonString, Class<T> clazz) {
- if (StringUtils.isEmpty(jsonString)) {
- return null;
- }
- try {
- return this.readValue(jsonString, clazz);
- } catch (IOException e) {
- log.error("parse json string error:" , e);
- return null;
- }
- }
-
- /**
- * 反序列化复杂Collection如List<Bean>, 先使用函數createCollectionType构造类型,然后调用本函数.
- *
- * @see #createCollectionType(Class, Class...)
- */
- @SuppressWarnings("unchecked")
- public <T> T fromJson(String jsonString, JavaType javaType) {
- if (StringUtils.isEmpty(jsonString)) {
- return null;
- }
- try {
- return (T) this.readValue(jsonString, javaType);
- } catch (IOException e) {
- log.error("parse json string error:" , e);
- return null;
- }
- }
-
- /**
- * 構造泛型的Collection Type如: ArrayList<MyBean>,
- * 则调用constructCollectionType(ArrayList.class,MyBean.class)
- * HashMap<String,MyBean>, 则调用(HashMap.class,String.class, MyBean.class)
- */
- public JavaType createCollectionType(Class<?> collectionClass, Class<?>... elementClasses) {
- return this.getTypeFactory().constructParametricType(collectionClass, elementClasses);
- }
-
- /**
- * 當JSON裡只含有Bean的部分屬性時,更新一個已存在Bean,只覆蓋該部分的屬性.
- */
- @SuppressWarnings("unchecked")
- public <T> T update(String jsonString, T object) {
- try {
- return (T) this.readerForUpdating(object).readValue(jsonString);
- } catch (JsonProcessingException e) {
- log.error("update json string:" + jsonString + " to object:" + object + " error.", e);
- } catch (IOException e) {
- log.error("update json string:" + jsonString + " to object:" + object + " error.", e);
- }
- return null;
- }
-
- /**
- * 輸出JSONP格式數據.
- */
- public String toJsonP(String functionName, Object object) {
- return toJson(new JSONPObject(functionName, object));
- }
-
- /**
- * 設定是否使用Enum的toString函數來讀寫Enum, 為False時時使用Enum的name()函數來讀寫Enum, 默認為False.
- * 注意本函數一定要在Mapper創建後, 所有的讀寫動作之前調用.
- */
- public JsonMapper enableEnumUseToString() {
- this.enable(SerializationFeature.WRITE_ENUMS_USING_TO_STRING);
- this.enable(DeserializationFeature.READ_ENUMS_USING_TO_STRING);
- return this;
- }
-
-
- /**
- * 允许单引号 允许不带引号的字段名称
- */
- public JsonMapper enableSimple() {
- this.configure(Feature.ALLOW_SINGLE_QUOTES, true);
- this.configure(Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
- return this;
- }
-
- /**
- * 取出Mapper做进一步的设置或使用其他序列化API.
- */
- public ObjectMapper getMapper() {
- return this;
- }
-
- /**
- * 对象转换为JSON字符串
- *
- * @param object
- * @return
- */
- public static String toJsonString(Object object) {
- return JsonMapper.getInstance().toJson(object);
- }
-
- /**
- * JSON字符串转换为对象
- *
- * @param jsonString
- * @param clazz
- * @return
- */
- public static <T> T fromJsonString(String jsonString, Class<T> clazz) {
- return JsonMapper.getInstance().fromJson(jsonString, clazz);
- }
-
- /**
- * JSON字符串转为集合对象
- * @param jsonString
- * @param collectionClass
- * @param elementClasses
- * @return
- */
- public static <T> T fromJsonString(String jsonString,Class<?> collectionClass, Class<?>... elementClasses) {
- return JsonMapper.getInstance().fromJson(jsonString, JsonMapper.getInstance().createCollectionType(collectionClass,elementClasses));
- }
- /**
- * JSON字符串转换为对象
- *
- * @param jsonString
- * @param clazz
- * @return
- */
- public static Object fromJsonList(String jsonString, Class<?> clazz) {
- JavaType javaType = JsonMapper.getInstance().createCollectionType(ArrayList.class, clazz);
- return JsonMapper.getInstance().fromJson(jsonString,javaType );
- }
+ private static final long serialVersionUID = 1L;
+ private static final Log log = LogFactory.get();
+
+ private static JsonMapper mapper;
+
+ public JsonMapper() {
+ this(Include.ALWAYS);
+ }
+
+ public JsonMapper(Include include) {
+ // 设置输出时包含属性的风格
+ if (include != null) {
+ this.setSerializationInclusion(include);
+ }
+ // 允许单引号、允许不带引号的字段名称
+ this.enableSimple();
+ // 设置输入时忽略在JSON字符串中存在但Java对象实际没有的属性
+ this.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);
+ // 空值
+ /*
+ * this.getSerializerProvider().setNullValueSerializer(new
+ * JsonSerializer<Object>(){
+ *
+ * @Override public void serialize(Object value, JsonGenerator jgen,
+ * SerializerProvider provider) throws IOException,
+ * JsonProcessingException { jgen.writeString(""); } });
+ */
+ this.setDateFormat(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"));
+ // 进行HTML解码。
+ this.registerModule(new SimpleModule().addSerializer(String.class, new JsonSerializer<String>() {
+ @Override
+ public void serialize(String value, JsonGenerator jgen, SerializerProvider provider)
+ throws IOException {
+ jgen.writeString(StringEscapeUtils.unescapeHtml4(value));
+ }
+ }));
+ // 设置时区
+ this.setTimeZone(TimeZone.getDefault());// getTimeZone("GMT+8:00")
+ }
+
+ /**
+ * 创建只输出非Null且非Empty(如List.isEmpty)的属性到Json字符串的Mapper,建议在外部接口中使用.
+ */
+ public static JsonMapper getInstance() {
+ if (mapper == null) {
+ mapper = new JsonMapper();// .enableSimple();只输出非Null且非Empty
+ }
+ return mapper;
+ }
+
+ /**
+ * 创建只输出初始值被改变的属性到Json字符串的Mapper, 最节约的存储方式,建议在内部接口中使用。
+ */
+ public static JsonMapper nonDefaultMapper() {
+ if (mapper == null) {
+ mapper = new JsonMapper(Include.NON_DEFAULT);
+ }
+ return mapper;
+ }
+
+ /**
+ * Object可以是POJO,也可以是Collection或数组。 如果对象为Null, 返回"null". 如果集合为空集合, 返回"[]".
+ */
+ public String toJson(Object object) {
+ try {
+ return this.writeValueAsString(object);
+ } catch (IOException e) {
+ log.error("write to json string error:", e);
+ return null;
+ }
+ }
+
+ /**
+ * 反序列化POJO或简单Collection如List<String>.
+ * <p>
+ * 如果JSON字符串为Null或"null"字符串, 返回Null. 如果JSON字符串为"[]", 返回空集合.
+ * <p>
+ * 如需反序列化复杂Collection如List<MyBean>, 请使用fromJson(String,JavaType)
+ *
+ * @see #fromJson(String, JavaType)
+ */
+ public <T> T fromJson(String jsonString, Class<T> clazz) {
+ if (StringUtils.isEmpty(jsonString)) {
+ return null;
+ }
+ try {
+ return this.readValue(jsonString, clazz);
+ } catch (IOException e) {
+ log.error("parse json string error:", e);
+ return null;
+ }
+ }
+
+ /**
+ * 反序列化复杂Collection如List<Bean>, 先使用函數createCollectionType构造类型,然后调用本函数.
+ *
+ * @see #createCollectionType(Class, Class...)
+ */
+ @SuppressWarnings("unchecked")
+ public <T> T fromJson(String jsonString, JavaType javaType) {
+ if (StringUtils.isEmpty(jsonString)) {
+ return null;
+ }
+ try {
+ return (T) this.readValue(jsonString, javaType);
+ } catch (IOException e) {
+ log.error("parse json string error:", e);
+ return null;
+ }
+ }
+
+ /**
+ * 構造泛型的Collection Type如: ArrayList<MyBean>,
+ * 则调用constructCollectionType(ArrayList.class,MyBean.class)
+ * HashMap<String,MyBean>, 则调用(HashMap.class,String.class, MyBean.class)
+ */
+ public JavaType createCollectionType(Class<?> collectionClass, Class<?>... elementClasses) {
+ return this.getTypeFactory().constructParametricType(collectionClass, elementClasses);
+ }
+
+ /**
+ * 當JSON裡只含有Bean的部分屬性時,更新一個已存在Bean,只覆蓋該部分的屬性.
+ */
+ @SuppressWarnings("unchecked")
+ public <T> T update(String jsonString, T object) {
+ try {
+ return (T) this.readerForUpdating(object).readValue(jsonString);
+ } catch (JsonProcessingException e) {
+ log.error("update json string:" + jsonString + " to object:" + object + " error.", e);
+ } catch (IOException e) {
+ log.error("update json string:" + jsonString + " to object:" + object + " error.", e);
+ }
+ return null;
+ }
+
+ /**
+ * 輸出JSONP格式數據.
+ */
+ public String toJsonP(String functionName, Object object) {
+ return toJson(new JSONPObject(functionName, object));
+ }
+
+ /**
+ * 設定是否使用Enum的toString函數來讀寫Enum, 為False時時使用Enum的name()函數來讀寫Enum, 默認為False.
+ * 注意本函數一定要在Mapper創建後, 所有的讀寫動作之前調用.
+ */
+ public JsonMapper enableEnumUseToString() {
+ this.enable(SerializationFeature.WRITE_ENUMS_USING_TO_STRING);
+ this.enable(DeserializationFeature.READ_ENUMS_USING_TO_STRING);
+ return this;
+ }
+
+
+ /**
+ * 允许单引号 允许不带引号的字段名称
+ */
+ public JsonMapper enableSimple() {
+ this.configure(Feature.ALLOW_SINGLE_QUOTES, true);
+ this.configure(Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
+ return this;
+ }
+
+ /**
+ * 取出Mapper做进一步的设置或使用其他序列化API.
+ */
+ public ObjectMapper getMapper() {
+ return this;
+ }
+
+ /**
+ * 对象转换为JSON字符串
+ *
+ * @param object
+ * @return
+ */
+ public static String toJsonString(Object object) {
+ return JsonMapper.getInstance().toJson(object);
+ }
+
+ /**
+ * JSON字符串转换为对象
+ *
+ * @param jsonString
+ * @param clazz
+ * @return
+ */
+ public static <T> T fromJsonString(String jsonString, Class<T> clazz) {
+ return JsonMapper.getInstance().fromJson(jsonString, clazz);
+ }
+
+ /**
+ * JSON字符串转为集合对象
+ *
+ * @param jsonString
+ * @param collectionClass
+ * @param elementClasses
+ * @return
+ */
+ public static <T> T fromJsonString(String jsonString, Class<?> collectionClass, Class<?>... elementClasses) {
+ return JsonMapper.getInstance().fromJson(jsonString, JsonMapper.getInstance().createCollectionType(collectionClass, elementClasses));
+ }
+
+ /**
+ * JSON字符串转换为对象
+ *
+ * @param jsonString
+ * @param clazz
+ * @return
+ */
+ public static Object fromJsonList(String jsonString, Class<?> clazz) {
+ JavaType javaType = JsonMapper.getInstance().createCollectionType(ArrayList.class, clazz);
+ return JsonMapper.getInstance().fromJson(jsonString, javaType);
+ }
}
diff --git a/src/main/java/com/mesalab/common/utils/JsonSchemaValidator.java b/src/main/java/com/mesalab/common/utils/JsonSchemaValidator.java
new file mode 100644
index 00000000..911248b8
--- /dev/null
+++ b/src/main/java/com/mesalab/common/utils/JsonSchemaValidator.java
@@ -0,0 +1,148 @@
+package com.mesalab.common.utils;
+
+import cn.hutool.core.collection.CollectionUtil;
+import cn.hutool.core.io.resource.ClassPathResource;
+import cn.hutool.core.io.resource.Resource;
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.alibaba.fastjson2.JSON;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.exception.BusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import com.networknt.schema.*;
+import lombok.Data;
+import org.apache.commons.io.IOUtils;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.*;
+
+
+public class JsonSchemaValidator {
+ private static final Log log = LogFactory.get();
+ private static final JsonSchemaValidator INSTANCE = new JsonSchemaValidator();
+ private final List<ValidationRule> rules;
+ private static final String ENUM_VALUES = "\"$ENUM_VALUES$\"";
+ private static final String EXPRESSION_FIELDS = "expression_fields";
+ private final JsonSchemaFactory schemaFactory;
+ private final SchemaValidatorsConfig schemaValidatorsConfig;
+
+ private JsonSchemaValidator() {
+ this.schemaFactory = JsonSchemaFactory
+ .builder(com.networknt.schema.JsonSchemaFactory.getInstance(SpecVersion.VersionFlag.V7))
+ .build();
+ this.schemaValidatorsConfig = new SchemaValidatorsConfig();
+ this.schemaValidatorsConfig.setLocale(Locale.ENGLISH);
+ this.rules = new ArrayList<>();
+ }
+
+ public static JsonSchemaValidator getInstance() {
+ return INSTANCE;
+ }
+
+ public JsonSchemaValidator addRule(String jsonSchemaFileName, String type) {
+ if (StrUtil.isNotBlank(jsonSchemaFileName) && StrUtil.isNotBlank(type)) {
+ ValidationRule rule = new ValidationRule(jsonSchemaFileName, type);
+ if (!rules.contains(rule)) {
+ rules.add(rule);
+ }
+ return this;
+ } else {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ "Json-schema ruleName and type cannot be empty.");
+ }
+ }
+
+
+ public boolean validateSchema(String jsonStr) {
+ for (ValidationRule rule : rules) {
+ if (rule.getType().equals("schema")){
+ Map map = JSON.parseObject(jsonStr, Map.class);
+ String tableName = String.valueOf(map.get("name"));
+ String jsonString = getJsonSchema(rule.getJsonSchemaFileName(), map);
+ JsonSchema schema = schemaFactory.getSchema(jsonString, schemaValidatorsConfig);
+ Set<ValidationMessage> validate = schema.validate(jsonStr, InputFormat.JSON);
+ if (!CollectionUtil.isEmpty(validate)) {
+ Iterator<ValidationMessage> iterator = validate.iterator();
+ while (iterator.hasNext()) {
+ ValidationMessage next = iterator.next();
+ String message = next.getMessage();
+ log.error(String.format("%s schema validation fail: %s", tableName, message));
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format("%s schema validation fail: %s", tableName, message));
+ }
+ }
+ }
+ }
+ return true;
+ }
+
+ public boolean validateDSL(String jsonStr) {
+ for (ValidationRule rule : rules) {
+ if (rule.getType().equals("dsl")){
+ try {
+ Resource resource = new ClassPathResource(rule.getJsonSchemaFileName());
+ String jsonSchemaStr = new String(IOUtils.toByteArray(resource.getStream()), StandardCharsets.UTF_8);
+ JsonSchema schema = schemaFactory.getSchema(jsonSchemaStr, schemaValidatorsConfig);
+ Set<ValidationMessage> validate = schema.validate(jsonStr, InputFormat.JSON);
+ if (!CollectionUtil.isEmpty(validate)) {
+ Iterator<ValidationMessage> iterator = validate.iterator();
+ while (iterator.hasNext()) {
+ ValidationMessage next = iterator.next();
+ String message = next.getMessage();
+ log.error(String.format("DSL validation fail: %s", message));
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format("DSL validation fail: %s", message));
+ }
+ }
+ } catch (IOException e) {
+ throw new BusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), e.getMessage());
+ }
+ }
+ }
+ return true;
+ }
+
+ private static String getJsonSchema(String jsonSchemaFileName, Map map) {
+ try {
+ Resource resource = new ClassPathResource(jsonSchemaFileName);
+ String jsonSchemaStr = new String(IOUtils.toByteArray(resource.getStream()), StandardCharsets.UTF_8);
+ StringBuffer sb = new StringBuffer();
+ if (jsonSchemaStr.contains(ENUM_VALUES)) {
+ List<Map<String, Object>> fields = (List<Map<String, Object>>) map.get("fields");
+ for (Map<String, Object> field : fields) {
+ String name = String.valueOf(field.get("name"));
+ sb.append("\"").append(name).append("\"").append(",");
+ }
+ if (map.containsKey("doc")) {
+ Map docMap = (Map) map.get("doc");
+ if (docMap.containsKey(EXPRESSION_FIELDS)) {
+ List<Map<String, Object>> expressionFields = (List<Map<String, Object>>) docMap.get(EXPRESSION_FIELDS);
+ for (Map<String, Object> field : expressionFields) {
+ String name = String.valueOf(field.get("name"));
+ sb.append("\"").append(name).append("\"").append(",");
+ }
+ }
+ }
+ sb.setLength(sb.length() - 1);
+ return jsonSchemaStr.replace(ENUM_VALUES, sb.toString());
+ } else {
+ return jsonSchemaStr;
+ }
+ } catch (IOException e) {
+ throw new BusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), e.getMessage());
+ }
+ }
+
+ @Data
+ public static class ValidationRule {
+ private final String jsonSchemaFileName;
+ private final String type;
+ }
+}
diff --git a/src/main/java/com/mesalab/common/utils/MDCUtil.java b/src/main/java/com/mesalab/common/utils/MDCUtil.java
new file mode 100644
index 00000000..88edd2c8
--- /dev/null
+++ b/src/main/java/com/mesalab/common/utils/MDCUtil.java
@@ -0,0 +1,23 @@
+package com.mesalab.common.utils;
+
+import org.slf4j.MDC;
+
+/**
+ * TODO
+ *
+ * @Classname MDCUtil
+ * @Date 2024/1/10 18:11
+ * @Author wWei
+ */
+public class MDCUtil {
+
+ public static final String TRACE_ID = "traceId";
+
+ public static String getTraceId() {
+ return MDC.get(TRACE_ID);
+ }
+
+ public static String generateTraceId() {
+ return RandomUtil.getUUID();
+ }
+}
diff --git a/src/main/java/com/mesalab/common/utils/QueryCacheUtils.java b/src/main/java/com/mesalab/common/utils/QueryCacheUtils.java
index 92a95f1a..8385596a 100644
--- a/src/main/java/com/mesalab/common/utils/QueryCacheUtils.java
+++ b/src/main/java/com/mesalab/common/utils/QueryCacheUtils.java
@@ -10,6 +10,7 @@ import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.concurrent.TimeUnit;
+@Deprecated
public class QueryCacheUtils {
private static Cache<String,Object> cache;
diff --git a/src/main/java/com/mesalab/common/utils/RandomNumberGenerator.java b/src/main/java/com/mesalab/common/utils/RandomNumberGenerator.java
new file mode 100644
index 00000000..784e3dd4
--- /dev/null
+++ b/src/main/java/com/mesalab/common/utils/RandomNumberGenerator.java
@@ -0,0 +1,80 @@
+package com.mesalab.common.utils;
+
+import java.text.DecimalFormat;
+import java.util.Random;
+
+/**
+ * TODO
+ *
+ * @Classname RandomNumberGenerator
+ * @Date 2024/3/7 18:16
+ * @Author wWei
+ */
+
+public class RandomNumberGenerator {
+ private static final double MIN_PERCENTAGE_CHANGE = 0.01;
+ private static final double MAX_PERCENTAGE_CHANGE = 0.1;
+ private static final int MIN_INIT_VALUE = 1000;
+ private static final int MAX_INIT_VALUE = 1000000;
+
+ /**
+ * Desc: Randomly generate an double within the range determined by the initial value and a percentage change ranging from 1% to 10%.
+ *
+ * @param initialValue initial value
+ * @param isDesc true increment, false decrement, null random
+ * @return {@link double}
+ * @created by wWei
+ * @date 2024/3/7 18:22
+ */
+ public static double generateRelativeRandomDouble(Double initialValue, Boolean isDesc) {
+ Random random = new Random();
+ if (initialValue == null) {
+ initialValue = MIN_INIT_VALUE + (MAX_INIT_VALUE - MIN_INIT_VALUE) * random.nextDouble();
+ }
+ DecimalFormat df = new DecimalFormat("#.##");
+
+ if (isDesc == null) {
+ isDesc = random.nextBoolean();
+ }
+ double percentageChange = MIN_PERCENTAGE_CHANGE + random.nextDouble() * (MAX_PERCENTAGE_CHANGE - MIN_PERCENTAGE_CHANGE);
+ double v = !isDesc ? initialValue * (1 + percentageChange) : initialValue * (1 - percentageChange);
+ return Double.parseDouble(df.format(v));
+ }
+
+ /**
+ * Desc: Randomly generate an integer within the range determined by the initial value and a percentage change ranging from 1% to 10%.
+ *
+ * @param initialValue initial value
+ * @param isDesc true increment, false decrement, null random
+ * @return {@link double}
+ * @created by wWei
+ * @date 2024/3/7 18:22
+ */
+ public static int generateRelativeRandomInt(Integer initialValue, Boolean isDesc) {
+ Random random = new Random();
+ if (initialValue == null) {
+ initialValue = random.nextInt(MAX_INIT_VALUE - MIN_INIT_VALUE + 1) + MIN_INIT_VALUE;
+ }
+
+ if (isDesc == null) {
+ isDesc = random.nextBoolean();
+ }
+ double percentageChange = MIN_PERCENTAGE_CHANGE + random.nextDouble() * (MAX_PERCENTAGE_CHANGE - MIN_PERCENTAGE_CHANGE);
+ double v = !isDesc ? initialValue * (1 + percentageChange) : initialValue * (1 - percentageChange);
+ return (int) v;
+ }
+
+ /**
+ * Desc: Randomly generate an integer within the range of [min, max]
+ *
+ * @param min
+ * @param max
+ * @return {@link int}
+ * @created by wWei
+ * @date 2024/3/11 15:49
+ */
+ public static int generateRangeRandomInt(int min, int max) {
+ return new Random().nextInt(max - min + 1) + min;
+ }
+
+}
diff --git a/src/main/java/com/mesalab/common/utils/SavedQueryResultUtils.java b/src/main/java/com/mesalab/common/utils/SavedQueryResultUtils.java
index 195fc605..b484b4c2 100644
--- a/src/main/java/com/mesalab/common/utils/SavedQueryResultUtils.java
+++ b/src/main/java/com/mesalab/common/utils/SavedQueryResultUtils.java
@@ -5,13 +5,12 @@ import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.qgw.model.basic.QueryProfile;
+import com.mesalab.qgw.model.basic.SQLQueryContext;
import com.mesalab.qgw.model.basic.HBaseAPISource;
-import com.mesalab.qgw.service.impl.QueryServiceImpl;
+import com.mesalab.qgw.service.SQLSyncQueryService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
-import javax.annotation.PostConstruct;
import java.util.List;
import java.util.Map;
@@ -23,16 +22,9 @@ import java.util.Map;
@Component
public class SavedQueryResultUtils {
private static final Log log = LogFactory.get();
- @Autowired
- private HBaseAPISource hBaseAPISourceCache;
private static HBaseAPISource hBaseAPISource;
-
- @PostConstruct
- public void init() {
- hBaseAPISource = this.hBaseAPISourceCache;
- }
-
+ private static SQLSyncQueryService sqlSyncQueryService;
public static BaseResult<Object> getByRowKey(List<String> rowKeys) {
List<Map<String, Object>> result = query(rowKeys);
@@ -41,14 +33,23 @@ public class SavedQueryResultUtils {
}
private static List<Map<String, Object>> query(List<String> rowKeys) {
- QueryServiceImpl queryService = new QueryServiceImpl();
- QueryProfile param = new QueryProfile();
+ SQLQueryContext param = new SQLQueryContext();
StringBuffer sb = new StringBuffer();
sb.append("select ROWKEY, ").append(hBaseAPISource.getColumnName());
sb.append(" from ").append(hBaseAPISource.getDbName()).append(".").append(hBaseAPISource.getTableName());
sb.append(" where ").append("ROWKEY IN ('").append(StrUtil.join("', '", rowKeys)).append("')");
- param.setQuery(sb.toString());
- BaseResult baseResult = queryService.executeQuery(param);
+ param.setOriginalSQL(sb.toString());
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(param);
return (List<Map<String, Object>>) baseResult.getData();
}
+
+ @Autowired
+ public void setJobExecuteService(HBaseAPISource hBaseAPISource) {
+ SavedQueryResultUtils.hBaseAPISource = hBaseAPISource;
+ }
+
+ @Autowired
+ public void sqlSyncQueryService(SQLSyncQueryService sqlSyncQueryService) {
+ SavedQueryResultUtils.sqlSyncQueryService = sqlSyncQueryService;
+ }
}
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/AutoPeriodHelper.java b/src/main/java/com/mesalab/common/utils/sqlparser/AutoPeriodHelper.java
new file mode 100644
index 00000000..0ce62ff6
--- /dev/null
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/AutoPeriodHelper.java
@@ -0,0 +1,469 @@
+package com.mesalab.common.utils.sqlparser;
+
+import cn.hutool.core.date.DateTime;
+import cn.hutool.core.date.DateUnit;
+import cn.hutool.core.date.DateUtil;
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.alibaba.fastjson2.JSON;
+import com.google.common.collect.Maps;
+import com.mesalab.common.utils.SpringContextUtil;
+import com.mesalab.qgw.constant.DslIdentifierNameConst;
+import com.mesalab.qgw.model.basic.DSLQueryRequestParam;
+import com.mesalab.qgw.service.DatabaseService;
+import net.sf.jsqlparser.JSQLParserException;
+import net.sf.jsqlparser.expression.Expression;
+import net.sf.jsqlparser.expression.ExpressionVisitorAdapter;
+import net.sf.jsqlparser.expression.Function;
+import net.sf.jsqlparser.expression.StringValue;
+import net.sf.jsqlparser.parser.CCJSqlParserUtil;
+import net.sf.jsqlparser.statement.ExplainStatement;
+import net.sf.jsqlparser.statement.Statement;
+import net.sf.jsqlparser.statement.select.*;
+import net.sf.jsqlparser.util.TablesNamesFinder;
+import org.joda.time.Period;
+
+import java.util.*;
+
+public class AutoPeriodHelper {
+ private static final Log log = LogFactory.get();
+ private static final DatabaseService databaseService = (DatabaseService) SpringContextUtil.getBean("databaseService");
+ private static final String TIME_FLOOR_WITH_FILL = "TIME_FLOOR_WITH_FILL";
+ private static final String RATE = "RATE";
+ private static final String CHART_GRANULARITY = "CHART_GRANULARITY";
+ private static final String SAMPLE_GRANULARITY = "SAMPLE_GRANULARITY";
+ private static final String SCHEMA_DOC_MEASUREMENTS_KEY = "measurements";
+ private static final String SKIP_COMPACTION_OFFSET_FROM_LATEST = "skip_compaction_offset_from_latest";
+ private static final String COMPACTION_PERIOD = "compaction_period";
+ private static final String INGESTION_PERIOD = "ingestion_period";
+ private static final String PT0S = "PT0S";
+ private static final String PT1S = "PT1S";
+ private static final String P1D = "P1D";
+ private static final Map<Long, String> STANDARD_CHART_PERIOD_MAP = new TreeMap<>();
+ private static final Map<Long, String> STANDARD_SAMPLE_PERIOD_MAP = new TreeMap<>();
+
+ static {
+ STANDARD_CHART_PERIOD_MAP.put(300L, "PT1S");
+ STANDARD_CHART_PERIOD_MAP.put(3600L, "PT30S");
+ STANDARD_CHART_PERIOD_MAP.put(3600 * 12L, "PT1M");
+ STANDARD_CHART_PERIOD_MAP.put(86400 * 3L, "PT5M");
+ STANDARD_CHART_PERIOD_MAP.put(86400 * 30L, "PT1H");
+ STANDARD_CHART_PERIOD_MAP.put(Long.MAX_VALUE, "P1D");
+ STANDARD_SAMPLE_PERIOD_MAP.put(3600L, "PT1S");
+ STANDARD_SAMPLE_PERIOD_MAP.put(3600 * 12L, "PT30S");
+ STANDARD_SAMPLE_PERIOD_MAP.put(86400 * 3L, "PT1M");
+ STANDARD_SAMPLE_PERIOD_MAP.put(86400 * 30L, "PT5M");
+ STANDARD_SAMPLE_PERIOD_MAP.put(Long.MAX_VALUE, "PT1H");
+ }
+
+ private AutoPeriodHelper() {
+ }
+
+ public static Statement buildSqlGranularity(Statement statement) {
+ DateTime now = new DateTime();
+ if (statement instanceof ExplainStatement) {
+ ExplainStatement explainStatement = (ExplainStatement) statement;
+ buildSelectStatement(now, explainStatement.getStatement());
+ return explainStatement;
+ } else if (statement instanceof Select) {
+ buildSelectStatement(now, (Select) statement);
+ return statement;
+ }
+ return statement;
+ }
+
+ public static void buildDslGranularity(DSLQueryRequestParam dslQueryRequestParam) {
+ Map<String, Map<String, Object>> measurements = getMeasurementsMap(dslQueryRequestParam);
+ if (measurements.isEmpty()) {
+ return;
+ }
+ String granularity = dslQueryRequestParam.getGranularity();
+ if (StrUtil.isEmpty(granularity)) {
+ List<String> intervals = dslQueryRequestParam.getIntervals();
+ if (intervals != null && intervals.size() == 1) {
+ String[] split = intervals.get(0).split("/");
+ DateTime start = DateUtil.parse(split[0]);
+ DateTime end = DateUtil.parse(split[1]);
+ dslQueryRequestParam.setGranularity(getGranularityPeriod(new DateTime(), start, end, measurements, true));
+ }
+ } else {
+ if (granularity.startsWith(CHART_GRANULARITY)) {
+ dslQueryRequestParam.setGranularity(parseGranularity(granularity, measurements, dslQueryRequestParam, true));
+ } else if (granularity.startsWith(SAMPLE_GRANULARITY)) {
+ parseGranularity(granularity, measurements, dslQueryRequestParam, false);
+ }
+ }
+ }
+
+ private static void buildSelectStatement(DateTime now, Select select) {
+ Map<String, Map<String, Object>> measurements = Maps.newHashMap();
+ List<String> tableList = SQLHelper.getTableName(select);
+ for (String table : tableList) {
+ String measurement = databaseService.getValueByKeyInSchemaDoc(table, SCHEMA_DOC_MEASUREMENTS_KEY);
+ if (StrUtil.isNotEmpty(measurement)) {
+ measurements.put(table, JSON.parseObject(measurement, Map.class));
+ }
+ }
+ select.getSelectBody().accept(new SelectAdapter(now, measurements));
+ }
+
+ private static String parseGranularity(String granularity, Map<String, Map<String, Object>> measurements, DSLQueryRequestParam dslQueryRequestParam, boolean isChartGranularity) {
+ try {
+ Expression expression = CCJSqlParserUtil.parseExpression(granularity);
+ if (expression instanceof Function) {
+ Function fun = (Function) expression;
+ Expression expression1 = fun.getParameters().getExpressions().get(0);
+ Expression expression2 = fun.getParameters().getExpressions().get(1);
+ DateTime start = DateUtil.parse(((StringValue) expression1).getValue());
+ DateTime end = DateUtil.parse(((StringValue) expression2).getValue());
+ return getGranularityPeriod(new DateTime(), start, end, measurements, isChartGranularity);
+ }
+ } catch (JSQLParserException e) {
+ log.error("Parse DSL auto granularity error", e);
+ }
+ return dslQueryRequestParam.getGranularity();
+ }
+
+ private static Map<String, Map<String, Object>> getMeasurementsMap(DSLQueryRequestParam dslQueryRequestParam) {
+ if (DslIdentifierNameConst.IP_LEARNING_FQDN_RELATE_IP.equalsIgnoreCase(dslQueryRequestParam.getName())
+ || DslIdentifierNameConst.IP_LEARNING_ACTIVE_IP.equalsIgnoreCase(dslQueryRequestParam.getName())) {
+ return Maps.newHashMap();
+ }
+ Map<String, Map<String, Object>> measurements = Maps.newHashMap();
+ if (StrUtil.isEmpty(dslQueryRequestParam.getDataSource())) {
+ Set<String> tables = DslIdentifierNameConst.IDENTIFIER_NAME_SOURCE_MAPPING.get(dslQueryRequestParam.getName());
+ if (tables != null && !tables.isEmpty()) {
+ for (String table : tables) {
+ String measurement = databaseService.getValueByKeyInSchemaDoc(table, SCHEMA_DOC_MEASUREMENTS_KEY);
+ if (StrUtil.isNotEmpty(measurement)) {
+ measurements.put(table, JSON.parseObject(measurement, Map.class));
+ }
+ }
+ }
+ } else {
+ String measurement = databaseService.getValueByKeyInSchemaDoc(dslQueryRequestParam.getDataSource(), SCHEMA_DOC_MEASUREMENTS_KEY);
+ if (StrUtil.isNotEmpty(measurement)) {
+ measurements.put(dslQueryRequestParam.getDataSource(), JSON.parseObject(measurement, Map.class));
+ }
+ }
+ return measurements;
+ }
+
+ private static String getGranularityPeriod(DateTime now, DateTime startTime, DateTime endTime, Map<String, Map<String, Object>> measurements, boolean isChartGranularity) {
+ Map<String, String> measurement = getMeasurementWithEffectiveGranularity(measurements);
+ long duration = DateUtil.between(startTime, endTime, DateUnit.SECOND, false);
+ DateTime agoTime = DateUtil.offsetSecond(now, -Period.parse(measurement.get(SKIP_COMPACTION_OFFSET_FROM_LATEST)).toStandardSeconds().getSeconds());
+ String dataPeriod = getDataPeriod(measurement, agoTime, startTime);
+ int dataPeriodSeconds = Period.parse(dataPeriod).toStandardSeconds().getSeconds();
+ return getStandardPeriod(isChartGranularity, duration, dataPeriodSeconds);
+ }
+
+ private static Map<String, String> getMeasurementWithEffectiveGranularity(Map<String, Map<String, Object>> measurements) {
+ Map<String, String> measurement = Maps.newHashMap();
+ measurements.values().forEach(m -> {
+ m.forEach((k, v) -> {
+ if (!SKIP_COMPACTION_OFFSET_FROM_LATEST.equalsIgnoreCase(k)
+ && !COMPACTION_PERIOD.equalsIgnoreCase(k)
+ && !INGESTION_PERIOD.equalsIgnoreCase(k)) {
+ return;
+ }
+ if (measurement.containsKey(k)) {
+ String oldPeriod = measurement.get(k);
+ if (SKIP_COMPACTION_OFFSET_FROM_LATEST.equalsIgnoreCase(k)) {
+ if (Period.parse(v.toString()).toStandardSeconds().getSeconds() < Period.parse(oldPeriod).toStandardSeconds().getSeconds()) {
+ measurement.put(k, v.toString());
+ }
+ } else {
+ if (Period.parse(v.toString()).toStandardSeconds().getSeconds() > Period.parse(oldPeriod).toStandardSeconds().getSeconds()) {
+ measurement.put(k, v.toString());
+ }
+ }
+ } else {
+ measurement.put(k, v.toString());
+ }
+ });
+ });
+ setDefaultValues(measurement);
+ return measurement;
+ }
+
+ private static void setDefaultValues(Map<String, String> measurement) {
+ String compactionPeriod = measurement.getOrDefault(COMPACTION_PERIOD, null);
+ String ingestionPeriod = measurement.getOrDefault(INGESTION_PERIOD, null);
+ measurement.putIfAbsent(SKIP_COMPACTION_OFFSET_FROM_LATEST, PT0S);
+
+ if (compactionPeriod == null && ingestionPeriod == null) {
+ measurement.put(COMPACTION_PERIOD, PT1S);
+ measurement.put(INGESTION_PERIOD, PT1S);
+ } else if (compactionPeriod == null) {
+ measurement.put(COMPACTION_PERIOD, ingestionPeriod);
+ } else if (ingestionPeriod == null) {
+ measurement.put(INGESTION_PERIOD, compactionPeriod);
+ }
+
+ }
+
+ private static String getDataPeriod(Map<String, String> measurement, DateTime splitTime, DateTime startTime) {
+ String dataPeriod = null;
+ if (DateUtil.compare(splitTime, startTime) > 0) {
+ dataPeriod = measurement.get(COMPACTION_PERIOD);
+ }
+ if (StrUtil.isEmpty(dataPeriod)) {
+ dataPeriod = measurement.get(INGESTION_PERIOD);
+ }
+ return dataPeriod;
+ }
+
+ private static String getStandardPeriod(boolean isChartGranularity, long duration, int dataPeriodSeconds) {
+ Map<Long, String> standard = isChartGranularity ? STANDARD_CHART_PERIOD_MAP : STANDARD_SAMPLE_PERIOD_MAP;
+ for (Map.Entry<Long, String> entry : standard.entrySet()) {
+ if (duration <= entry.getKey() && dataPeriodSeconds <= Period.parse(entry.getValue()).toStandardSeconds().getSeconds()) {
+ return entry.getValue();
+ }
+ }
+ return P1D;
+ }
+
+ private static class SelectAdapter extends SelectVisitorAdapter {
+ private final DateTime now;
+ private final Map<String, Map<String, Object>> measurements;
+
+ public SelectAdapter(DateTime now, Map<String, Map<String, Object>> measurements) {
+ this.now = now;
+ this.measurements = measurements;
+ }
+
+ @Override
+ public void visit(PlainSelect plainSelect) {
+ List<SelectItem> selectItems = plainSelect.getSelectItems();
+ if (selectItems != null) {
+ for (SelectItem selectItem : selectItems) {
+ SelectItemAdapter selectItemAdapter = new SelectItemAdapter(now, measurements);
+ selectItem.accept(selectItemAdapter);
+ }
+ }
+
+ FromItem fromItem = plainSelect.getFromItem();
+ if (fromItem != null) {
+ fromItem.accept(new FromItemAdapter(now, measurements));
+ }
+
+ Expression where = plainSelect.getWhere();
+ if (where != null) {
+ where.accept(new ExpressionAdapter(now, measurements));
+ }
+
+ GroupByElement groupBy = plainSelect.getGroupBy();
+ if (groupBy != null && groupBy.getGroupByExpressionList() != null && !groupBy.getGroupByExpressionList().getExpressions().isEmpty()) {
+ groupBy.accept(new GroupByAdapter(now, measurements));
+ }
+
+ Expression having = plainSelect.getHaving();
+ if (having != null) {
+ having.accept(new ExpressionAdapter(now, measurements));
+ }
+
+ List<OrderByElement> orderByElements = plainSelect.getOrderByElements();
+ if (orderByElements != null) {
+ for (OrderByElement orderByElement : orderByElements) {
+ orderByElement.accept(new OrderByAdapter(now, measurements));
+ }
+ }
+ }
+
+ @Override
+ public void visit(SetOperationList setOpList) {
+ for (SelectBody select : setOpList.getSelects()) {
+ if (select instanceof PlainSelect) {
+ select.accept(this);
+ }
+ }
+ }
+ }
+
+ private static class SelectItemAdapter extends SelectItemVisitorAdapter {
+
+ private final DateTime now;
+ private final Map<String, Map<String, Object>> measurements;
+
+ public SelectItemAdapter(DateTime now, Map<String, Map<String, Object>> measurements) {
+ this.now = now;
+ this.measurements = measurements;
+ }
+
+ @Override
+ public void visit(SelectExpressionItem selectExpressionItem) {
+ Expression expression = selectExpressionItem.getExpression();
+ FunctionAdapter functionAdapter = new FunctionAdapter(now, measurements);
+ expression.accept(functionAdapter);
+ }
+
+ }
+
+ private static class FromItemAdapter extends FromItemVisitorAdapter {
+ private final DateTime now;
+ private final Map<String, Map<String, Object>> measurements;
+
+ public FromItemAdapter(DateTime now, Map<String, Map<String, Object>> measurements) {
+ this.now = now;
+ this.measurements = measurements;
+ }
+
+ @Override
+ public void visit(SubSelect subSelect) {
+ SelectBody selectBody = subSelect.getSelectBody();
+ selectBody.accept(new SelectAdapter(now, measurements));
+ }
+ }
+
+ private static class ExpressionAdapter extends ExpressionVisitorAdapter {
+ private final DateTime now;
+ private final Map<String, Map<String, Object>> measurements;
+
+ public ExpressionAdapter(DateTime now, Map<String, Map<String, Object>> measurements) {
+ this.now = now;
+ this.measurements = measurements;
+ }
+
+ @Override
+ public void visit(SubSelect subSelect) {
+ SelectBody selectBody = subSelect.getSelectBody();
+ selectBody.accept(new SelectAdapter(now, measurements));
+ }
+
+ @Override
+ public void visit(Function function) {
+ FunctionAdapter functionAdapter = new FunctionAdapter(now, measurements);
+ function.accept(functionAdapter);
+ }
+ }
+
+ private static class GroupByAdapter implements GroupByVisitor {
+ private final DateTime now;
+ private final Map<String, Map<String, Object>> measurements;
+
+ public GroupByAdapter(DateTime now, Map<String, Map<String, Object>> measurements) {
+ this.now = now;
+ this.measurements = measurements;
+ }
+
+ @Override
+ public void visit(GroupByElement groupByElement) {
+ if (groupByElement.getGroupByExpressionList() == null
+ || groupByElement.getGroupByExpressionList().getExpressions() == null) {
+ return;
+ }
+ for (Expression expression : groupByElement.getGroupByExpressionList().getExpressions()) {
+ if (!(expression instanceof Function)) {
+ continue;
+ }
+ FunctionAdapter functionAdapter = new FunctionAdapter(now, measurements);
+ expression.accept(functionAdapter);
+ }
+ }
+ }
+
+ private static class OrderByAdapter implements OrderByVisitor {
+ private final DateTime now;
+ private final Map<String, Map<String, Object>> measurements;
+
+ public OrderByAdapter(DateTime now, Map<String, Map<String, Object>> measurements) {
+ this.now = now;
+ this.measurements = measurements;
+ }
+
+ @Override
+ public void visit(OrderByElement orderBy) {
+ if (orderBy.getExpression() == null) {
+ return;
+ }
+ if (!(orderBy.getExpression() instanceof Function)) {
+ return;
+ }
+ FunctionAdapter functionAdapter = new FunctionAdapter(now, measurements);
+ orderBy.getExpression().accept(functionAdapter);
+
+ }
+ }
+
+ private static class FunctionAdapter extends ExpressionVisitorAdapter {
+
+ private final DateTime now;
+ private final Map<String, Map<String, Object>> measurements;
+
+ public FunctionAdapter(DateTime now, Map<String, Map<String, Object>> measurements) {
+ this.now = now;
+ this.measurements = measurements;
+ }
+
+ @Override
+ public void visit(Function function) {
+ if (isTimeFloorWithFill(function)) {
+ buildNewFunction(function);
+ } else if (isRate(function)) {
+ buildNewFunction(function);
+ } else {
+ Optional.ofNullable(function.getParameters()).ifPresent(o -> {
+ for (Expression expression : o.getExpressions()) {
+ expression.accept(this);
+ }
+ });
+ }
+ }
+
+ private void buildNewFunction(Function function) {
+ Expression period = function.getParameters().getExpressions().get(1);
+ if (isChartGranularity(period)) {
+ Function periodFunction = (Function) period;
+ DateTime startTime = DateUtil.parse(((StringValue) periodFunction.getParameters().getExpressions().get(0)).getValue());
+ DateTime endTime = DateUtil.parse(((StringValue) periodFunction.getParameters().getExpressions().get(1)).getValue());
+ String chartGranularityPeriod = getGranularityPeriod(now, startTime, endTime, measurements, true);
+ function.getParameters().getExpressions().set(1, new StringValue("'" + chartGranularityPeriod + "'"));
+ } else if (isSampleGranularity(period)) {
+ Function periodFunction = (Function) period;
+ DateTime startTime = DateUtil.parse(((StringValue) periodFunction.getParameters().getExpressions().get(0)).getValue());
+ DateTime endTime = DateUtil.parse(((StringValue) periodFunction.getParameters().getExpressions().get(1)).getValue());
+ String chartGranularityPeriod = getGranularityPeriod(now, startTime, endTime, measurements, false);
+ function.getParameters().getExpressions().set(1, new StringValue("'" + chartGranularityPeriod + "'"));
+ }
+ }
+ }
+
+ private static boolean isChartGranularity(Expression expression) {
+ if (!(expression instanceof Function)) {
+ return false;
+ }
+ Function function = (Function) expression;
+ return CHART_GRANULARITY.equalsIgnoreCase(function.getName())
+ && function.getParameters().getExpressions().size() == 2;
+ }
+
+ private static boolean isSampleGranularity(Expression expression) {
+ if (!(expression instanceof Function)) {
+ return false;
+ }
+ Function function = (Function) expression;
+ return SAMPLE_GRANULARITY.equalsIgnoreCase(function.getName())
+ && function.getParameters().getExpressions().size() == 2;
+ }
+
+ private static boolean isTimeFloorWithFill(Expression expression) {
+ if (!(expression instanceof Function)) {
+ return false;
+ }
+ Function function = (Function) expression;
+ return TIME_FLOOR_WITH_FILL.equalsIgnoreCase(function.getName())
+ && function.getParameters().getExpressions().size() >= 2;
+ }
+
+ private static boolean isRate(Expression expression) {
+ if (!(expression instanceof Function)) {
+ return false;
+ }
+ Function function = (Function) expression;
+ return RATE.equalsIgnoreCase(function.getName())
+ && function.getParameters().getExpressions().size() >= 2;
+ }
+
+}
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/ColumnCategoryHelper.java b/src/main/java/com/mesalab/common/utils/sqlparser/ColumnCategoryHelper.java
new file mode 100644
index 00000000..61705022
--- /dev/null
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/ColumnCategoryHelper.java
@@ -0,0 +1,96 @@
+package com.mesalab.common.utils.sqlparser;
+
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.geedgenetworks.utils.StringUtil;
+import com.jayway.jsonpath.JsonPath;
+import com.mesalab.common.enums.DBEngineType;
+import com.mesalab.qgw.constant.MetaConst;
+import com.mesalab.qgw.dialect.ClickHouseDialect;
+import com.mesalab.qgw.dialect.DruidDialect;
+import com.mesalab.qgw.model.basic.SelectStatement;
+import net.sf.jsqlparser.JSQLParserException;
+import net.sf.jsqlparser.expression.Expression;
+import net.sf.jsqlparser.parser.CCJSqlParserUtil;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+
+import static com.mesalab.qgw.constant.MetaConst.*;
+
+/**
+ * TODO
+ *
+ * @Classname ColumnCategoryHelper
+ * @Date 2024/3/7 15:52
+ * @Author wWei
+ */
+public class ColumnCategoryHelper {
+ private static final Log log = LogFactory.get();
+
+ private ColumnCategoryHelper() {
+ }
+
+ public static Object expandMetaCategory(Object columnMeta, SelectStatement selectStatement, String dbEngine) {
+ if (StringUtil.isEmpty(selectStatement) || StringUtil.isEmpty(selectStatement.getExecSQL()) || StringUtil.isEmpty(columnMeta)) {
+ return columnMeta;
+ }
+ List<Map<String, String>> result = JsonPath.read(columnMeta, "$");
+ Map<String, String> groupDimension = selectStatement.getGroupDimension();
+ Map<String, String> aliasFields = selectStatement.getAliasFields();
+ meta:
+ for (Map<String, String> map : result) {
+ if (StrUtil.isEmptyIfStr(groupDimension)) {
+ map.put(META_CATEGORY, META_CATEGORY_METRIC);
+ continue;
+ }
+ String field = String.valueOf(map.get(META_NAME));
+ for (String dimensionKey : groupDimension.keySet()) {
+ if (Objects.equals(field, dimensionKey) || Objects.equals(field, groupDimension.get(dimensionKey))) {
+ map.put(META_CATEGORY, META_CATEGORY_DIMENSION);
+ continue meta;
+ }
+ try {
+ String selectItemExpr = aliasFields.get(field) == null ? SQLFunctionUtil.generateDateFunction(field, dbEngine) : aliasFields.get(field);
+ String dimensionExpr = groupDimension.get(dimensionKey) == null ? SQLFunctionUtil.generateDateFunction(dimensionKey, dbEngine) : groupDimension.get(dimensionKey);
+ if (dimensionExpr != null && selectItemExpr != null) {
+ ExpressionColumnCollectAdapter selectItemAdapter = new ExpressionColumnCollectAdapter();
+ Expression expr1 = CCJSqlParserUtil.parseExpression(selectItemExpr);
+ if (SQLFunctionUtil.pAggregateFunStandard.matcher(expr1.toString()).find()) {
+ map.put(META_CATEGORY, META_CATEGORY_METRIC);
+ continue meta;
+ }
+ if (DBEngineType.CLICKHOUSE.getValue().equalsIgnoreCase(dbEngine) && ClickHouseDialect.pAggregateFunSpecificOfCK.matcher(expr1.toString()).find()) {
+ map.put(META_CATEGORY, META_CATEGORY_METRIC);
+ continue meta;
+ }
+ if (DBEngineType.DRUID.getValue().equalsIgnoreCase(dbEngine) && DruidDialect.pAggregateFunSpecificOfDruid.matcher(expr1.toString()).find()) {
+ map.put(META_CATEGORY, META_CATEGORY_METRIC);
+ continue meta;
+ }
+ expr1.accept(selectItemAdapter);
+ Set<String> selectItemColumn = selectItemAdapter.getColumns();
+
+ ExpressionColumnCollectAdapter dimensionAdapter = new ExpressionColumnCollectAdapter();
+ Expression expr2 = CCJSqlParserUtil.parseExpression(dimensionExpr);
+ expr2.accept(dimensionAdapter);
+ Set<String> dimensionColumn = dimensionAdapter.getColumns();
+
+ boolean hasDuplicates = selectItemColumn.stream().anyMatch(dimensionColumn::contains);
+ if (hasDuplicates) {
+ map.put(META_CATEGORY, META_CATEGORY_DIMENSION);
+ continue meta;
+ }
+ }
+ } catch (JSQLParserException e) {
+ log.warn("build meta error, message is: {}", e.getMessage());
+ }
+ }
+ map.put(META_CATEGORY, META_CATEGORY_METRIC);
+ }
+ return result;
+ }
+}
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/CondExpressionHelper.java b/src/main/java/com/mesalab/common/utils/sqlparser/CondExpressionHelper.java
index bb93ba8b..5f821b59 100644
--- a/src/main/java/com/mesalab/common/utils/sqlparser/CondExpressionHelper.java
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/CondExpressionHelper.java
@@ -60,23 +60,6 @@ public class CondExpressionHelper {
return orderBy.getList();
}
- /**
- * Desc: Get filed name
- *
- * @param column
- * @return {@link String}
- * @created by wWei
- * @date 2023/3/2 09:58
- */
- public static String getFieldName(Column column) {
- String columnName = column.getColumnName();
- if ((columnName.startsWith("\"") && columnName.endsWith("\""))
- || (columnName.startsWith("`") && columnName.endsWith("`"))) {
- columnName = columnName.substring(1, columnName.length() - 1);
- }
- return columnName;
- }
-
private static class ExprFieldParser extends ExpressionVisitorAdapter {
private final List<String> list = Lists.newArrayList();
@@ -86,7 +69,7 @@ public class CondExpressionHelper {
@Override
public void visit(Column column) {
- list.add(getFieldName(column));
+ list.add(SQLHelper.removeQuotesAndBackticks(column.getColumnName()));
}
}
@@ -109,7 +92,7 @@ public class CondExpressionHelper {
}
return;
}
- String columnName = getFieldName(column);
+ String columnName = SQLHelper.removeQuotesAndBackticks(column.getColumnName());
if (map.containsKey(columnName)) {
column.setColumnName(String.valueOf(map.get(columnName)));
}
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/ExampleDataHelper.java b/src/main/java/com/mesalab/common/utils/sqlparser/ExampleDataHelper.java
new file mode 100644
index 00000000..c3eb9e52
--- /dev/null
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/ExampleDataHelper.java
@@ -0,0 +1,412 @@
+package com.mesalab.common.utils.sqlparser;
+
+import cn.hutool.core.date.DatePattern;
+import cn.hutool.core.date.DateUtil;
+import cn.hutool.core.io.FileUtil;
+import cn.hutool.core.util.CharsetUtil;
+import com.alibaba.fastjson2.JSONPath;
+import com.google.common.collect.Maps;
+import com.mesalab.common.entity.DataTypeMapping;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.common.utils.RandomNumberGenerator;
+import com.mesalab.qgw.constant.DataTypeConst;
+import com.mesalab.qgw.constant.ExampleDataModeConst;
+import com.mesalab.qgw.constant.MetaConst;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.RandomStringUtils;
+
+import javax.validation.constraints.NotNull;
+import java.io.File;
+import java.io.IOException;
+import java.util.*;
+import java.util.stream.Collectors;
+
+/**
+ * TODO
+ *
+ * @Classname ExampleDataHelper
+ * @Date 2024/3/4 18:49
+ * @Author wWei
+ */
+@Slf4j
+public class ExampleDataHelper {
+
+ public static final String QUERY_TYPE_TIMESERIES = "Timeseries";
+ public static final String QUERY_TYPE_GROUP_BY = "GroupBy";
+ public static final String QUERY_TYPE_AGG_STATISTICS = "AggStatistics";
+ public static final String QUERY_TYPE_OTHER = "Other";
+ private static final String[] JSONPATH_SPECIAL_CHARS = {"/", ".", "[", "]", "'", "\""};
+
+ private ExampleDataHelper() {
+ }
+
+ private static String exampleJsonData;
+
+ static {
+ initJsonData();
+ }
+
+
+ public static void initJsonData() {
+ String filePath;
+ try {
+ filePath = new File("").getCanonicalPath() + File.separator + "dat" + File.separator + "example_data.json";
+ exampleJsonData = FileUtil.readString(filePath, CharsetUtil.UTF_8);
+ } catch (IOException e) {
+ log.error("Init Example Data Error: Read File Error, Error is:{}", e.getMessage());
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), e.getMessage()));
+ }
+ }
+
+ /**
+ * Desc: TODO
+ *
+ * @param queryType Timeseries, TopN, GroupBy, AggStatistics, Other: Scan
+ * @param meta Column Meta
+ * @param sqlRowCount SQL结果行数
+ * @param schemaDataDict Json Schema
+ * @return {@link List<Object>}
+ * @created by wWei
+ * @date 2024/3/6 16:59
+ */
+ public static List<Object> buildExampleData(@NotNull String queryType, List<Map<String, String>> meta, Long sqlRowCount, Map<String, List<Object>> schemaDataDict, Object queryTypeParameters) {
+ int rowCount = (sqlRowCount == null) ? RandomNumberGenerator.generateRangeRandomInt(10, 50) : Math.min(sqlRowCount.intValue(), 50);
+ List<LinkedHashMap<String, Object>> result;
+ switch (queryType) {
+ case QUERY_TYPE_TIMESERIES:
+ result = buildExampleDataOfTimeseries(queryTypeParameters.toString(), meta, sqlRowCount, schemaDataDict);
+ break;
+ case QUERY_TYPE_GROUP_BY:
+ result = buildExampleDataOfGroupBy(meta, rowCount, Boolean.parseBoolean(queryTypeParameters.toString()), schemaDataDict);
+ break;
+ case QUERY_TYPE_AGG_STATISTICS:
+ result = buildExampleDataOfMergeAggregation(meta, schemaDataDict);
+ break;
+ default:
+ result = buildExampleDataOfScan(meta, rowCount, schemaDataDict);
+ break;
+ }
+ return result.stream()
+ .map(map -> (Object) map)
+ .collect(Collectors.toList());
+ }
+
+ private static List<LinkedHashMap<String, Object>> buildExampleDataOfTimeseries(String timeGranLabel, List<Map<String, String>> meta, Long sqlRowCount, Map<String, List<Object>> schemaDataDict) {
+ Map<String, String> timeGranMeta = meta.stream()
+ .filter(item -> timeGranLabel.equals(item.get(MetaConst.META_NAME)))
+ .findFirst()
+ .orElse(Maps.newHashMap());
+ List<LinkedHashMap<String, Object>> timeseriesList = new ArrayList<>();
+ if (!timeGranMeta.isEmpty()) {
+ String timeGranDataType = timeGranMeta.get(MetaConst.META_DATA_TYPE);
+ timeGranDataType = timeGranDataType == null ? timeGranMeta.get(MetaConst.META_TYPE) : timeGranDataType;
+ Date currentDate = DateUtil.beginOfHour(new Date());
+ int iteratorRowCount = (sqlRowCount == null) ? RandomNumberGenerator.generateRangeRandomInt(10, 50) : Math.min(sqlRowCount.intValue(), 50);
+ while (iteratorRowCount-- > 0) {
+ Object value = getDateTimeValue(timeGranDataType, iteratorRowCount, currentDate);
+ LinkedHashMap<String, Object> timeseries = Maps.newLinkedHashMap();
+ timeseries.put(timeGranLabel, value);
+ timeseriesList.add(timeseries);
+ }
+ }
+
+ int counter = 0;
+ List<LinkedHashMap<String, Object>> result = new ArrayList<>();
+ for (int i = 0; i < meta.size(); i++) {
+ Map<String, String> item = meta.get(i);
+ if (MetaConst.META_CATEGORY_DIMENSION.equals(item.get(MetaConst.META_CATEGORY)) && !timeGranLabel.equals(item.get(MetaConst.META_NAME))) {
+ if (counter >= 1) {
+ timeseriesList = new ArrayList<>(result);
+ result = new ArrayList<>();
+ }
+ counter++;
+ List exampleValue = getExampleValue(item, null, null, false, RandomNumberGenerator.generateRangeRandomInt(1, 5), schemaDataDict);
+ for (int i1 = 0; i1 < exampleValue.size(); i1++) {
+ List<LinkedHashMap<String, Object>> maps = new ArrayList<>();
+ for (int j = 0; j < timeseriesList.size(); j++) {
+ LinkedHashMap<String, Object> copiedMap = new LinkedHashMap<>(timeseriesList.get(j));
+ copiedMap.put(item.get(MetaConst.META_NAME), exampleValue.get(i1));
+ maps.add(copiedMap);
+ }
+ result.addAll(maps);
+ }
+ }
+ }
+
+ List<LinkedHashMap<String, Object>> finalResult = result.isEmpty() ? timeseriesList : result;
+ meta.stream().filter(item -> MetaConst.META_CATEGORY_METRIC.equals(item.get(MetaConst.META_CATEGORY)))
+ .forEach(item -> {
+ List exampleValue = getExampleValue(item, null, null, true, finalResult.size(), schemaDataDict);
+ for (int i = 0; i < finalResult.size(); i++) {
+ finalResult.get(i).put(item.get(MetaConst.META_NAME), exampleValue.get(i));
+ }
+ });
+ return (sqlRowCount == null) ? finalResult : finalResult.subList(0, Math.min(sqlRowCount.intValue(), finalResult.size()));
+ }
+
+ private static List<LinkedHashMap<String, Object>> buildExampleDataOfGroupBy(List<Map<String, String>> meta, Integer rowCount, boolean isDesc, Map<String, List<Object>> schemaDataDict) {
+ for (Map<String, String> item : meta) {
+ if (inExampleData(item, schemaDataDict)) {
+ Integer exampleDataSize = getExampleDataSize(item, schemaDataDict);
+ rowCount = exampleDataSize == null ? rowCount : Math.min(rowCount, exampleDataSize);
+ }
+ }
+ List<LinkedHashMap<String, Object>> result = new ArrayList<>();
+ for (int i = 0; i < rowCount; i++) {
+ result.add(Maps.newLinkedHashMap());
+ }
+ Integer finalRowCount = rowCount;
+ meta.stream().filter(item -> MetaConst.META_CATEGORY_DIMENSION.equals(item.get(MetaConst.META_CATEGORY)))
+ .forEach(item -> {
+ List exampleValue = getExampleValue(item, null, null, false, finalRowCount, schemaDataDict);
+ for (int i = 0; i < result.size(); i++) {
+ result.get(i).put(item.get(MetaConst.META_NAME), exampleValue.get(i));
+ }
+ });
+ Integer finalRowCount1 = rowCount;
+ meta.stream().filter(item -> MetaConst.META_CATEGORY_METRIC.equals(item.get(MetaConst.META_CATEGORY)))
+ .forEach(item -> {
+ List exampleValue = getExampleValue(item, null, isDesc, true, finalRowCount1, schemaDataDict);
+ for (int i = 0; i < result.size(); i++) {
+ result.get(i).put(item.get(MetaConst.META_NAME), exampleValue.get(i));
+ }
+ });
+ return result;
+ }
+
+ private static List<LinkedHashMap<String, Object>> buildExampleDataOfMergeAggregation(List<Map<String, String>> meta, Map<String, List<Object>> schemaDataDict) {
+ List<LinkedHashMap<String, Object>> result = new ArrayList<>();
+ result.add(Maps.newLinkedHashMap());
+ meta.stream().filter(item -> MetaConst.META_CATEGORY_METRIC.equals(item.get(MetaConst.META_CATEGORY)))
+ .forEach(item -> {
+ List exampleValue = getExampleValue(item, null, null, true, 1, schemaDataDict);
+ for (int i = 0; i < result.size(); i++) {
+ result.get(i).put(item.get(MetaConst.META_NAME), exampleValue.get(0));
+ }
+ });
+ return result;
+ }
+
+ private static List<LinkedHashMap<String, Object>> buildExampleDataOfScan(List<Map<String, String>> meta, int rowCount, Map<String, List<Object>> schemaDataDict) {
+ for (Map<String, String> item : meta) {
+ if (inExampleData(item, schemaDataDict)) {
+ Integer exampleDataSize = getExampleDataSize(item, schemaDataDict);
+ rowCount = exampleDataSize == null ? rowCount : Math.min(rowCount, exampleDataSize);
+ }
+ }
+ List<LinkedHashMap<String, Object>> result = new ArrayList<>();
+ for (int i = 0; i < rowCount; i++) {
+ result.add(Maps.newLinkedHashMap());
+ }
+ int finalRowCount = rowCount;
+ meta.forEach(item -> {
+ List exampleValue = getExampleValue(item, null, null, true, finalRowCount, schemaDataDict);
+ for (int i = 0; i < result.size(); i++) {
+ result.get(i).put(item.get(MetaConst.META_NAME), exampleValue.get(i));
+ }
+ });
+ return result;
+ }
+
+ private static List getExampleValue(Map<String, String> meta, Object initValue, Boolean isDesc, boolean canRepeat, int size, Map<String, List<Object>> schemaDataDict) {
+
+ if (inExampleData(meta, schemaDataDict)) {
+ return getExampleDataValues(meta, canRepeat, size, schemaDataDict);
+ }
+ String dataType = meta.get(MetaConst.META_DATA_TYPE) == null ? meta.get(MetaConst.META_TYPE) : meta.get(MetaConst.META_DATA_TYPE);
+
+ if (DataTypeMapping.LONG.equals(dataType) || DataTypeMapping.INT.equals(dataType)) {
+ return getExampleIntegerValues(null, isDesc, size);
+ }
+ if (DataTypeMapping.DOUBLE.equals(dataType) || DataTypeMapping.FLOAT.equals(dataType)) {
+ return getExampleDoubleValues(null, isDesc, size);
+ }
+ if (DataTypeMapping.BOOLEAN.equals(dataType)) {
+ return getExampleBooleanValues(size);
+ }
+ if (DataTypeMapping.DATE.equals(dataType) || DataTypeMapping.TIMESTAMP.equals(dataType)
+ || DataTypeConst.TIMESTAMP_FORMAT.equals(dataType)
+ || DataTypeConst.TIMESTAMP_MS_FORMAT.equals(dataType) || DataTypeConst.UNIX_TIMESTAMP.equals(dataType)
+ || DataTypeConst.UNIX_TIMESTAMP_MS.equals(dataType)
+ || DataTypeConst.DATE_TIME_64.equals(dataType)
+ ) {
+ return getExampleDateTimeValues(dataType, size);
+ }
+ return getExampleStrValues(size);
+ }
+
+ private static List<Object> getExampleDateTimeValues(String dataType, int rowCount) {
+ List<Object> result = new ArrayList<>();
+ Date currentDate = DateUtil.beginOfHour(new Date());
+ while (rowCount-- > 0) {
+ result.add(getDateTimeValue(dataType, rowCount, currentDate));
+ }
+ return result;
+ }
+
+ private static Object getDateTimeValue(String dataType, int rowCount, Date currentDate) {
+ Object value;
+ switch (dataType) {
+ case DataTypeConst.TIMESTAMP:
+ value = DateUtil.format(DateUtil.offsetHour(DateUtil.beginOfHour(currentDate), -rowCount), DatePattern.UTC_WITH_ZONE_OFFSET_PATTERN);
+ break;
+ case DataTypeConst.TIMESTAMP_MS_FORMAT:
+ value = DateUtil.format(DateUtil.offsetHour(currentDate, -rowCount), DatePattern.NORM_DATETIME_MS_FORMAT);
+ break;
+ case DataTypeConst.UNIX_TIMESTAMP:
+ value = DateUtil.offsetHour(currentDate, -rowCount).toInstant().getEpochSecond();
+ break;
+ case DataTypeConst.UNIX_TIMESTAMP_MS:
+ value = DateUtil.offsetHour(currentDate, -rowCount).toInstant().toEpochMilli();
+ break;
+ case DataTypeConst.DATE:
+ value = DateUtil.format(DateUtil.offsetHour(currentDate, -rowCount), DatePattern.NORM_DATE_PATTERN);
+ break;
+ case DataTypeConst.DATE_TIME_64:
+ value = DateUtil.format(DateUtil.offsetHour(currentDate, -rowCount), DatePattern.NORM_DATETIME_MS_FORMAT);
+ break;
+ default:
+ value = DateUtil.format(DateUtil.offsetHour(currentDate, -rowCount), DatePattern.NORM_DATETIME_FORMAT);
+ break;
+ }
+ return value;
+ }
+
+ private static List<Boolean> getExampleBooleanValues(int size) {
+ List<Boolean> result = new ArrayList<>();
+ for (int i = 0; i < size; i++) {
+ result.add(new Random().nextBoolean());
+ }
+ return result;
+ }
+
+ private static List<Double> getExampleDoubleValues(Double initValue, Boolean increase, int size) {
+ List<Double> result = new ArrayList<>();
+ Double relativeV = initValue;
+ for (int i = 0; i < size; i++) {
+ relativeV = RandomNumberGenerator.generateRelativeRandomDouble(relativeV, increase);
+ result.add(relativeV);
+ }
+ return result;
+ }
+
+ private static List<Integer> getExampleIntegerValues(Integer initValue, Boolean idDesc, int size) {
+ List<Integer> result = new ArrayList<>();
+ Integer relativeV = initValue;
+ for (int i = 0; i < size; i++) {
+ relativeV = RandomNumberGenerator.generateRelativeRandomInt(relativeV, idDesc);
+ result.add(relativeV);
+ }
+ return result;
+ }
+
+ private static List<Object> getExampleDataValues(Map<String, String> meta, boolean canRepeat, int size, Map<String, List<Object>> schemaDataDict) {
+ List<Object> result = new ArrayList<>();
+
+ if (schemaDataDict.containsKey(meta.get(MetaConst.META_FIELD_NAME))) {
+ List<Object> list = schemaDataDict.get(meta.get(MetaConst.META_FIELD_NAME));
+ buildEnumDataList(canRepeat, size, result, list);
+ return result;
+ }
+
+
+ String dataType = meta.get(MetaConst.META_DATA_TYPE) == null ? meta.get(MetaConst.META_TYPE) : meta.get(MetaConst.META_DATA_TYPE);
+ String path = "$." + dataType;
+ Object mode = JSONPath.extract(exampleJsonData, path + ".mode");
+ if (mode == null) {
+ for (int i = 0; i < size; i++) {
+ result.add(null);
+ }
+ } else if (ExampleDataModeConst.RANGE.equalsIgnoreCase(mode.toString())) {
+ Integer start = (Integer) JSONPath.extract(exampleJsonData, path + ".start");
+ Integer end = (Integer) JSONPath.extract(exampleJsonData, path + ".end");
+ for (int i = 0; i < size; i++) {
+ int v = RandomNumberGenerator.generateRangeRandomInt(start, end);
+ result.add(v);
+ }
+ } else if (ExampleDataModeConst.ENUM.equalsIgnoreCase(mode.toString())) {
+ List list = (List) JSONPath.extract(exampleJsonData, path + ".values");
+ buildEnumDataList(canRepeat, size, result, list);
+ } else if (ExampleDataModeConst.SEQUENCE.equalsIgnoreCase(mode.toString())) {
+ List list = (List) JSONPath.extract(exampleJsonData, path + ".values");
+ result.addAll(list.subList(0, size));
+ }
+ return result;
+ }
+
+ private static void buildEnumDataList(boolean canRepeat, int size, List<Object> result, List<Object> list) {
+ if (canRepeat) {
+ for (int i = 0; i < size; i++) {
+ int index = RandomNumberGenerator.generateRangeRandomInt(0, list.size() - 1);
+ result.add(list.get(index));
+ }
+ } else {
+ Collections.shuffle(list);
+ result.addAll(list.subList(0, Math.min(size, list.size() )));
+ }
+ }
+
+ private static Integer getExampleDataSize(Map<String, String> meta, Map<String, List<Object>> schemaDataDict) {
+ if (schemaDataDict.containsKey(meta.get(MetaConst.META_FIELD_NAME))) {
+ return schemaDataDict.get(meta.get(MetaConst.META_FIELD_NAME)).size();
+ }
+
+ String dataType = meta.get(MetaConst.META_DATA_TYPE) == null ? meta.get(MetaConst.META_TYPE) : meta.get(MetaConst.META_DATA_TYPE);
+ String path = "$." + dataType;
+ Object mode = JSONPath.extract(exampleJsonData, path + ".mode");
+ if (ExampleDataModeConst.ENUM.equalsIgnoreCase(mode.toString())) {
+ List list = (List) JSONPath.extract(exampleJsonData, path + ".values");
+ return list.size();
+ } else if (ExampleDataModeConst.SEQUENCE.equalsIgnoreCase(mode.toString())) {
+ List list = (List) JSONPath.extract(exampleJsonData, path + ".values");
+ return list.size();
+ }
+ return null;
+ }
+
+
+ /**
+ * Desc: Generate random data of type List<Object>
+ *
+ * @param size result size
+ * @return {@link List<Object>}
+ * @created by wWei
+ * @date 2024/3/9 15:00
+ */
+ private static List<Object> getExampleStrValues(int size) {
+ List<Object> result = new ArrayList<>();
+ for (int i = 0; i < size; i++) {
+ result.add(RandomStringUtils.randomAlphanumeric(3, 10));
+ }
+ return result;
+ }
+
+ /**
+ * Desc: Determine if the current result column is in the example data file or schema dictionary data
+ *
+ * @param meta meta information
+ * @param schemaDataDict schema data dictionary
+ * @return {@link boolean}
+ * @created by wWei
+ * @date 2024/3/9 15:02
+ */
+ private static boolean inExampleData(Map<String, String> meta, Map<String, List<Object>> schemaDataDict) {
+ if (schemaDataDict == null) {
+ return false;
+ }
+ if (schemaDataDict.containsKey(meta.get(MetaConst.META_FIELD_NAME))) {
+ return true;
+ }
+
+ String dataType = meta.get(MetaConst.META_DATA_TYPE) == null ? meta.get(MetaConst.META_TYPE) : meta.get(MetaConst.META_DATA_TYPE);
+ // 对特殊字符进行转义处理
+ for (String specialChar : JSONPATH_SPECIAL_CHARS) {
+ dataType = dataType.replace(specialChar, "\\" + specialChar);
+ }
+ String path = "$." + dataType;
+ return JSONPath.extract(exampleJsonData, path) != null;
+ }
+}
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/ExpressionColumnCollectAdapter.java b/src/main/java/com/mesalab/common/utils/sqlparser/ExpressionColumnCollectAdapter.java
new file mode 100644
index 00000000..acea2d6f
--- /dev/null
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/ExpressionColumnCollectAdapter.java
@@ -0,0 +1,29 @@
+package com.mesalab.common.utils.sqlparser;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+import lombok.Data;
+import net.sf.jsqlparser.expression.*;
+import net.sf.jsqlparser.schema.Column;
+
+import java.util.List;
+import java.util.Set;
+
+
+/**
+ * @Classname ExpressionColumnCollectAdapter
+ * @Date 2021/7/20 3:39 下午
+ * @Author wWei
+ */
+@Data
+public class ExpressionColumnCollectAdapter extends ExpressionVisitorAdapter {
+
+ public Set<String> columns = Sets.newHashSet();
+
+ @Override
+ public void visit(Column column) {
+ if (column != null) {
+ this.columns.add(column.getColumnName());
+ }
+ }
+}
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/FunctionsMergeHelper.java b/src/main/java/com/mesalab/common/utils/sqlparser/FunctionsMergeHelper.java
new file mode 100644
index 00000000..adc16c42
--- /dev/null
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/FunctionsMergeHelper.java
@@ -0,0 +1,382 @@
+package com.mesalab.common.utils.sqlparser;
+
+import lombok.SneakyThrows;
+import net.sf.jsqlparser.JSQLParserException;
+import net.sf.jsqlparser.expression.Expression;
+import net.sf.jsqlparser.expression.Function;
+import net.sf.jsqlparser.expression.LongValue;
+import net.sf.jsqlparser.expression.StringValue;
+import net.sf.jsqlparser.expression.operators.arithmetic.Division;
+import net.sf.jsqlparser.expression.operators.arithmetic.Multiplication;
+import net.sf.jsqlparser.expression.operators.relational.ExpressionList;
+import net.sf.jsqlparser.parser.CCJSqlParserUtil;
+import net.sf.jsqlparser.schema.Column;
+import net.sf.jsqlparser.statement.select.*;
+
+import java.util.*;
+
+/**
+ * TODO
+ *
+ * @Classname FunctionsMergeHelper
+ * @Date 2024/2/20 10:42
+ * @Author wWei
+ */
+public class FunctionsMergeHelper {
+
+ private static final String FROM_UNIXTIME = "FROM_UNIXTIME";
+ private static final String TIME_FLOOR_WITH_FILL = "TIME_FLOOR_WITH_FILL";
+ private static final String UNIX_TIMESTAMP = "UNIX_TIMESTAMP";
+ private static final String TIME_FORMAT = "TIME_FORMAT";
+ private static final String DATE_TIME_FORMAT_PATTERN = "'yyyy-MM-dd HH:mm:ss'";
+ private static final String TIME_FLOOR = "TIME_FLOOR";
+ private static final String MILLIS_TO_TIMESTAMP = "MILLIS_TO_TIMESTAMP";
+ private static final String TIMESTAMP_TO_MILLIS = "TIMESTAMP_TO_MILLIS";
+
+ private FunctionsMergeHelper() {
+ }
+
+ /**
+ * Desc: Merge the TIME_FLOOR_WITH_FILL functions in the SQL statement
+ * 1. FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(column), *[, *])) ==>
+ * 1.1 Select Item: TIME_FORMAT(TIME_FLOOR(column, *), 'yyyy-MM-dd HH:mm:ss')
+ * 1.2 GroupBy or Order By Expr: TIME_FLOOR(column, *)
+ *
+ * 2. FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(column, *[, *])) ==>
+ * 2.1 Select Item: TIME_FORMAT(TIME_FLOOR(MILLIS_TO_TIMESTAMP(column * 1000), *), 'yyyy-MM-dd HH:mm:ss')
+ * 2.2 GroupBy or Order By Expr: TIME_FLOOR(MILLIS_TO_TIMESTAMP(column * 1000), *)]
+ *
+ * 3. TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(column), *[, *]) ==>
+ * 3.1 Select Item: TIMESTAMP_TO_MILLIS(TIME_FLOOR(column, *)) / 1000
+ * 3.2 GroupBy or Order By Expr: TIME_FLOOR(column, *)
+ *
+ * 4. TIME_FLOOR_WITH_FILL(column, *[, *]) ==>
+ * 4.1 Select Item: TIMESTAMP_TO_MILLIS(TIME_FLOOR(MILLIS_TO_TIMESTAMP(column * 1000), *)) / 1000
+ * 4.2 GroupBy or Order By Expr: TIME_FLOOR(MILLIS_TO_TIMESTAMP(column * 1000), *)
+ *
+ * @param sql
+ * @return {@link String}
+ * @created by wWei
+ * @date 2024/2/24 14:49
+ */
+ public static String build(String sql) throws JSQLParserException {
+ Select select = (Select) CCJSqlParserUtil.parse(sql);
+ select.getSelectBody().accept(new SelectAdapter());
+ return select.toString();
+ }
+
+ private static class SelectAdapter extends SelectVisitorAdapter {
+
+ @SneakyThrows
+ @Override
+ public void visit(PlainSelect plainSelect) {
+
+ Map<String, String> aliasExpr = new HashMap<>();
+ List<SelectItem> selectItems = plainSelect.getSelectItems();
+ if (selectItems != null) {
+ for (SelectItem selectItem : selectItems) {
+ SelectItemAdapter selectItemAdapter = new SelectItemAdapter();
+ selectItem.accept(selectItemAdapter);
+ Map<String, String> itemAliasExpr = selectItemAdapter.getAliasExpr();
+ if (!itemAliasExpr.isEmpty()) {
+ aliasExpr.putAll(itemAliasExpr);
+ }
+ }
+ }
+
+ GroupByElement groupBy = plainSelect.getGroupBy();
+ if (groupBy != null && groupBy.getGroupByExpressionList() != null && !groupBy.getGroupByExpressionList().getExpressions().isEmpty()) {
+ groupBy.accept(new GroupByAdapter());
+ }
+ List<OrderByElement> orderByElements = plainSelect.getOrderByElements();
+ if (orderByElements != null) {
+ for (OrderByElement orderByElement : orderByElements) {
+ if (aliasExpr.containsKey(orderByElement.getExpression().toString())) {
+ Expression expression = CCJSqlParserUtil.parseExpression(aliasExpr.get(orderByElement.getExpression().toString()));
+ orderByElement.setExpression(expression);
+ }
+ orderByElement.accept(new OrderByAdapter());
+ }
+ }
+
+ FromItem fromItem = plainSelect.getFromItem();
+ if (fromItem != null) {
+ fromItem.accept(new FromItemAdapter());
+ }
+
+ }
+
+ @Override
+ public void visit(SetOperationList setOpList) {
+ for (SelectBody select : setOpList.getSelects()) {
+ if (select instanceof PlainSelect) {
+ select.accept(this);
+ }
+ }
+ }
+ }
+
+ private static Function createFunction(String funName, List<Expression> params) {
+ Function function = new Function();
+ function.setName(funName);
+ function.setParameters(new ExpressionList(params));
+ return function;
+ }
+
+ private static boolean isUnixTimestampFunction(Expression expression) {
+ if (!(expression instanceof Function)) {
+ return false;
+ }
+ Function function = (Function) expression;
+ return function.getName().equalsIgnoreCase(UNIX_TIMESTAMP)
+ && function.getParameters().getExpressions().size() == 1;
+ }
+
+ private static boolean isTimeFloorWithFillFunction(Expression expression) {
+ if (!(expression instanceof Function)) {
+ return false;
+ }
+ Function function = (Function) expression;
+ return TIME_FLOOR_WITH_FILL.equalsIgnoreCase(function.getName())
+ && function.getParameters().getExpressions().size() >= 2
+ && function.getParameters().getExpressions().size() <= 3;
+ }
+
+ private static boolean isFromUnixtimeFunction(Expression expression) {
+ if (!(expression instanceof Function)) {
+ return false;
+ }
+ Function function = (Function) expression;
+ return function.getName().equalsIgnoreCase(FROM_UNIXTIME)
+ && function.getParameters().getExpressions().size() == 1;
+ }
+
+
+ private static class SelectItemAdapter extends SelectItemVisitorAdapter {
+
+ private Map<String, String> aliasExpr = new HashMap<>();
+
+ public Map<String, String> getAliasExpr() {
+ return aliasExpr;
+ }
+
+ @Override
+ public void visit(SelectExpressionItem selectExpressionItem) {
+ Expression expression = selectExpressionItem.getExpression();
+ if (!(expression instanceof Function)) {
+ return;
+ }
+ Function mainFunction = (Function) expression;
+ if (isFromUnixtimeFunction(mainFunction)) {
+ Expression fromUnixtimeArg0 = mainFunction.getParameters().getExpressions().get(0);
+ if (isTimeFloorWithFillFunction(fromUnixtimeArg0)) {
+ Function timeFloorWithFillFun = (Function) fromUnixtimeArg0;
+ Expression timeFloorWithFillFunArg0 = timeFloorWithFillFun.getParameters().getExpressions().get(0);
+ Expression timeFloorWithFillFunArg1 = timeFloorWithFillFun.getParameters().getExpressions().get(1);
+ if (isUnixTimestampFunction(timeFloorWithFillFunArg0) && ((Function) timeFloorWithFillFunArg0).getParameters().getExpressions().get(0) instanceof Column) {
+ if (selectExpressionItem.getAlias() != null) {
+ aliasExpr.put(selectExpressionItem.getAlias().getName(), mainFunction.toString());
+ }
+
+ Function unixTimestampFun = (Function) timeFloorWithFillFunArg0;
+ List<Expression> timeFloorArgs = new ArrayList<>(Arrays.asList(unixTimestampFun.getParameters().getExpressions().get(0), timeFloorWithFillFunArg1));
+ Function timeFloor = createFunction(TIME_FLOOR, timeFloorArgs);
+ mainFunction.setName(TIME_FORMAT);
+ List<Expression> timeFormatArgs = new ArrayList<>(Arrays.asList(timeFloor, new StringValue(DATE_TIME_FORMAT_PATTERN)));
+ mainFunction.setParameters(new ExpressionList(timeFormatArgs));
+ } else if (timeFloorWithFillFunArg0 instanceof Column) {
+ if (selectExpressionItem.getAlias() != null) {
+ aliasExpr.put(selectExpressionItem.getAlias().getName(), mainFunction.toString());
+ }
+ Multiplication multiplication = new Multiplication();
+ multiplication.setLeftExpression(timeFloorWithFillFunArg0);
+ multiplication.setRightExpression(new LongValue(1000));
+
+ List<Expression> millisToTimestampArgs = new ArrayList<>(Collections.singletonList(multiplication));
+ Function millsToTimestamp = createFunction(MILLIS_TO_TIMESTAMP, millisToTimestampArgs);
+
+ ArrayList<Expression> timeFloorArgs = new ArrayList<>(Arrays.asList(millsToTimestamp, timeFloorWithFillFunArg1));
+ Function timeFloorFun = createFunction(TIME_FLOOR, timeFloorArgs);
+
+ mainFunction.setName(TIME_FORMAT);
+ List<Expression> timeFormatArgs = new ArrayList<>(Arrays.asList(timeFloorFun, new StringValue(DATE_TIME_FORMAT_PATTERN)));
+ mainFunction.setParameters(new ExpressionList(timeFormatArgs));
+ }
+ }
+ } else if (isTimeFloorWithFillFunction(mainFunction)) {
+ Expression timeFloorWithFillFunArg0 = mainFunction.getParameters().getExpressions().get(0);
+ Expression timeFloorWithFillFunArg1 = mainFunction.getParameters().getExpressions().get(1);
+ if (isUnixTimestampFunction(timeFloorWithFillFunArg0) && ((Function) timeFloorWithFillFunArg0).getParameters().getExpressions().get(0) instanceof Column) {
+
+ if (selectExpressionItem.getAlias() != null) {
+ aliasExpr.put(selectExpressionItem.getAlias().getName(), mainFunction.toString());
+ }
+ Function unixTimestampFun = (Function) timeFloorWithFillFunArg0;
+
+ List<Expression> expressions = new ArrayList<>(Arrays.asList(unixTimestampFun.getParameters().getExpressions().get(0), timeFloorWithFillFunArg1));
+ Function timeFloor = createFunction(TIME_FLOOR, expressions);
+
+ List<Expression> timestampToMillsArgs = new ArrayList<>(Collections.singletonList(timeFloor));
+ Function timestampToMillis = createFunction(TIMESTAMP_TO_MILLIS, timestampToMillsArgs);
+
+ Division division = new Division();
+ division.setLeftExpression(timestampToMillis);
+ division.setRightExpression(new LongValue(1000));
+
+ selectExpressionItem.setExpression(division);
+ } else if (timeFloorWithFillFunArg0 instanceof Column) {
+ if (selectExpressionItem.getAlias() != null) {
+ aliasExpr.put(selectExpressionItem.getAlias().getName(), mainFunction.toString());
+ }
+
+ Multiplication multiplication = new Multiplication();
+ multiplication.setLeftExpression(timeFloorWithFillFunArg0);
+ multiplication.setRightExpression(new LongValue(1000));
+
+ List<Expression> millisToTimestampArgs = new ArrayList<>(Collections.singletonList(multiplication));
+ Function millsToTimestamp = createFunction(MILLIS_TO_TIMESTAMP, millisToTimestampArgs);
+
+ List<Expression> timeFloorArgs = new ArrayList<>(Arrays.asList(millsToTimestamp, timeFloorWithFillFunArg1));
+ Function timeFloor = createFunction(TIME_FLOOR, timeFloorArgs);
+
+ List<Expression> timestampToMillsArgs = new ArrayList<>(Collections.singletonList(timeFloor));
+ Function timestampToMills = createFunction(TIMESTAMP_TO_MILLIS, timestampToMillsArgs);
+
+ Division division = new Division();
+ division.setLeftExpression(timestampToMills);
+ division.setRightExpression(new LongValue(1000));
+
+ selectExpressionItem.setExpression(division);
+ }
+ }
+ }
+
+ }
+
+ private static class FromItemAdapter extends FromItemVisitorAdapter {
+ @Override
+ public void visit(SubSelect subSelect) {
+ SelectBody selectBody = subSelect.getSelectBody();
+ selectBody.accept(new SelectAdapter());
+ }
+ }
+
+ private static class GroupByAdapter implements GroupByVisitor {
+ @Override
+ public void visit(GroupByElement groupByElement) {
+ if (groupByElement.getGroupByExpressionList() == null
+ || groupByElement.getGroupByExpressionList().getExpressions() == null) {
+ return;
+ }
+ for (Expression expression : groupByElement.getGroupByExpressionList().getExpressions()) {
+ if (!(expression instanceof Function)) {
+ continue;
+ }
+ Function mainFunction = (Function) expression;
+ if (isFromUnixtimeFunction(expression)) {
+ Expression fromUnixtimeArg0 = mainFunction.getParameters().getExpressions().get(0);
+ if (isTimeFloorWithFillFunction(fromUnixtimeArg0)) {
+ Function timeFloorWithFillFun = (Function) fromUnixtimeArg0;
+ Expression timeFloorWithFillFunArg0 = timeFloorWithFillFun.getParameters().getExpressions().get(0);
+ Expression timeFloorWithFillFunArg1 = timeFloorWithFillFun.getParameters().getExpressions().get(1);
+ if (isUnixTimestampFunction(timeFloorWithFillFunArg0) && ((Function) timeFloorWithFillFunArg0).getParameters().getExpressions().get(0) instanceof Column) {
+ Function unixTimestampFun = (Function) timeFloorWithFillFunArg0;
+ mainFunction.setName(TIME_FLOOR);
+ List<Expression> timeFloorArgs = new ArrayList<>(Arrays.asList(unixTimestampFun.getParameters().getExpressions().get(0), timeFloorWithFillFunArg1));
+ mainFunction.setParameters(new ExpressionList(timeFloorArgs));
+ } else if (timeFloorWithFillFunArg0 instanceof Column) {
+ Multiplication multiplication = new Multiplication();
+ multiplication.setLeftExpression(timeFloorWithFillFunArg0);
+ multiplication.setRightExpression(new LongValue(1000));
+
+ List<Expression> millisToTimestampArgs = new ArrayList<>(Collections.singletonList(multiplication));
+ Function millsToTimestamp = createFunction(MILLIS_TO_TIMESTAMP, millisToTimestampArgs);
+
+ mainFunction.setName(TIME_FLOOR);
+ ArrayList<Expression> timeFloorArgs = new ArrayList<>(Arrays.asList(millsToTimestamp, timeFloorWithFillFunArg1));
+ mainFunction.setParameters(new ExpressionList(timeFloorArgs));
+ }
+ }
+ } else if (isTimeFloorWithFillFunction(mainFunction)) {
+ Expression timeFloorWithFillFunArg0 = mainFunction.getParameters().getExpressions().get(0);
+ Expression timeFloorWithFillFunArg1 = mainFunction.getParameters().getExpressions().get(1);
+ if (isUnixTimestampFunction(timeFloorWithFillFunArg0) && ((Function) timeFloorWithFillFunArg0).getParameters().getExpressions().get(0) instanceof Column) {
+ Function unixTimestampFun = (Function) timeFloorWithFillFunArg0;
+ mainFunction.setName(TIME_FLOOR);
+ List<Expression> timeFloorArgs = new ArrayList<>(Arrays.asList(unixTimestampFun.getParameters().getExpressions().get(0), timeFloorWithFillFunArg1));
+ mainFunction.setParameters(new ExpressionList(timeFloorArgs));
+ } else if (timeFloorWithFillFunArg0 instanceof Column) {
+ Multiplication multiplication = new Multiplication();
+ multiplication.setLeftExpression(timeFloorWithFillFunArg0);
+ multiplication.setRightExpression(new LongValue(1000));
+
+ List<Expression> millisToTimestampArgs = new ArrayList<>(Collections.singletonList(multiplication));
+ Function millsToTimestamp = createFunction(MILLIS_TO_TIMESTAMP, millisToTimestampArgs);
+
+ mainFunction.setName(TIME_FLOOR);
+ List<Expression> timeFloorArgs = new ArrayList<>(Arrays.asList(millsToTimestamp, timeFloorWithFillFunArg1));
+ mainFunction.setParameters(new ExpressionList(timeFloorArgs));
+ }
+ }
+ }
+ }
+ }
+
+ private static class OrderByAdapter implements OrderByVisitor {
+ @Override
+ public void visit(OrderByElement orderBy) {
+ if (orderBy.getExpression() == null) {
+ return;
+ }
+ if (!(orderBy.getExpression() instanceof Function)) {
+ return;
+ }
+ Function mainFunction = (Function) orderBy.getExpression();
+ if (isFromUnixtimeFunction(mainFunction)) {
+ Expression fromUnixtimeArg0 = mainFunction.getParameters().getExpressions().get(0);
+ if (isTimeFloorWithFillFunction(fromUnixtimeArg0)) {
+ Function timeFloorWithFillFun = (Function) fromUnixtimeArg0;
+ Expression timeFloorWithFillFunArg0 = timeFloorWithFillFun.getParameters().getExpressions().get(0);
+ Expression timeFloorWithFillFunArg1 = timeFloorWithFillFun.getParameters().getExpressions().get(1);
+ if (isUnixTimestampFunction(timeFloorWithFillFunArg0) && ((Function) timeFloorWithFillFunArg0).getParameters().getExpressions().get(0) instanceof Column) {
+ Function unixTimestampFun = (Function) timeFloorWithFillFunArg0;
+ mainFunction.setName(TIME_FLOOR);
+ List<Expression> timeFloorArgs = new ArrayList<>(Arrays.asList(unixTimestampFun.getParameters().getExpressions().get(0), timeFloorWithFillFunArg1));
+ mainFunction.setParameters(new ExpressionList(timeFloorArgs));
+ } else if (timeFloorWithFillFunArg0 instanceof Column) {
+ Multiplication multiplication = new Multiplication();
+ multiplication.setLeftExpression(timeFloorWithFillFunArg0);
+ multiplication.setRightExpression(new LongValue(1000));
+
+ List<Expression> millisToTimestampArgs = new ArrayList<>(Collections.singletonList(multiplication));
+ Function millsToTimestamp = createFunction(MILLIS_TO_TIMESTAMP, millisToTimestampArgs);
+
+ mainFunction.setName(TIME_FLOOR);
+ ArrayList<Expression> timeFloorArgs = new ArrayList<>(Arrays.asList(millsToTimestamp, timeFloorWithFillFunArg1));
+ mainFunction.setParameters(new ExpressionList(timeFloorArgs));
+ }
+ }
+ } else if (isTimeFloorWithFillFunction(mainFunction)) {
+ Expression timeFloorWithFillFunArg0 = mainFunction.getParameters().getExpressions().get(0);
+ Expression timeFloorWithFillFunArg1 = mainFunction.getParameters().getExpressions().get(1);
+ if (isUnixTimestampFunction(timeFloorWithFillFunArg0) && ((Function) timeFloorWithFillFunArg0).getParameters().getExpressions().get(0) instanceof Column) {
+ Function unixTimestampFun = (Function) timeFloorWithFillFunArg0;
+ mainFunction.setName(TIME_FLOOR);
+ List<Expression> timeFloorArgs = new ArrayList<>(Arrays.asList(unixTimestampFun.getParameters().getExpressions().get(0), timeFloorWithFillFunArg1));
+ mainFunction.setParameters(new ExpressionList(timeFloorArgs));
+ } else if (timeFloorWithFillFunArg0 instanceof Column) {
+ Multiplication multiplication = new Multiplication();
+ multiplication.setLeftExpression(timeFloorWithFillFunArg0);
+ multiplication.setRightExpression(new LongValue(1000));
+
+ List<Expression> millisToTimestampArgs = new ArrayList<>(Collections.singletonList(multiplication));
+ Function millsToTimestamp = createFunction(MILLIS_TO_TIMESTAMP, millisToTimestampArgs);
+
+ mainFunction.setName(TIME_FLOOR);
+ List<Expression> timeFloorArgs = new ArrayList<>(Arrays.asList(millsToTimestamp, timeFloorWithFillFunArg1));
+ mainFunction.setParameters(new ExpressionList(timeFloorArgs));
+ }
+ }
+ }
+ }
+}
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/QueryTypeHelper.java b/src/main/java/com/mesalab/common/utils/sqlparser/QueryTypeHelper.java
new file mode 100644
index 00000000..ecb0bfad
--- /dev/null
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/QueryTypeHelper.java
@@ -0,0 +1,92 @@
+package com.mesalab.common.utils.sqlparser;
+
+import com.mesalab.qgw.dialect.ClickHouseDialect;
+import com.mesalab.qgw.dialect.DruidDialect;
+import com.mesalab.qgw.model.basic.SelectStatement;
+import com.mesalab.qgw.model.basic.udf.TIME_FLOOR_WITH_FILL;
+import com.mesalab.qgw.model.basic.udf.UDF;
+import net.sf.jsqlparser.expression.Function;
+import net.sf.jsqlparser.statement.select.OrderByElement;
+import net.sf.jsqlparser.statement.select.SelectExpressionItem;
+import net.sf.jsqlparser.statement.select.SelectItem;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * TODO
+ *
+ * @Classname QueryTypeHelper
+ * @Date 2024/3/11 14:43
+ * @Author wWei
+ */
+public class QueryTypeHelper {
+
+ private QueryTypeHelper() {
+ }
+
+ public static String determineQueryType(SelectStatement dbSelectStatement) {
+ Set<UDF> udfSet = dbSelectStatement.getUdfSet();
+ if (udfSet != null && !udfSet.isEmpty()) {
+ for (UDF udf : udfSet) {
+ if (udf instanceof TIME_FLOOR_WITH_FILL) {
+ return ExampleDataHelper.QUERY_TYPE_TIMESERIES;
+ }
+ }
+ }
+
+ Map<String, String> groupDimension = dbSelectStatement.getGroupDimension();
+ if (groupDimension != null && !groupDimension.isEmpty()) {
+ return ExampleDataHelper.QUERY_TYPE_GROUP_BY;
+ }
+
+ List<SelectItem> selectItems = dbSelectStatement.getSelectItems();
+ if (selectItems != null && !selectItems.isEmpty()) {
+ for (SelectItem selectItem : selectItems) {
+ if (selectItem instanceof SelectExpressionItem) {
+ SelectExpressionItem selectExpressionItem = ((SelectExpressionItem) selectItem);
+ if (selectExpressionItem.getExpression() instanceof Function) {
+ Function function = (Function) selectExpressionItem.getExpression();
+ if (SQLFunctionUtil.pAggregateFunStandard.matcher(function.toString()).find()
+ || ClickHouseDialect.pAggregateFunSpecificOfCK.matcher(function.toString()).find()
+ || DruidDialect.pAggregateFunSpecificOfDruid.matcher(function.toString()).find()) {
+ return ExampleDataHelper.QUERY_TYPE_AGG_STATISTICS;
+ }
+ }
+ }
+ }
+ }
+ return ExampleDataHelper.QUERY_TYPE_OTHER;
+ }
+
+ public static Object determineParam(SelectStatement dbSelectStatement, String queryType) {
+ if (ExampleDataHelper.QUERY_TYPE_TIMESERIES.equals(queryType)) {
+ TIME_FLOOR_WITH_FILL udfElements = dbSelectStatement.getUdfSet().stream()
+ .filter(udf -> udf instanceof TIME_FLOOR_WITH_FILL)
+ .map(udf -> (TIME_FLOOR_WITH_FILL) udf)
+ .findFirst()
+ .orElse(null);
+
+ if (udfElements != null) {
+ String name = udfElements.getName();
+ String timestamp = udfElements.getTimestamp();
+ Map<String, String> groupDimension = dbSelectStatement.getGroupDimension();
+ for (String groupKey : groupDimension.keySet()) {
+ if (groupDimension.get(groupKey).contains(name) && groupDimension.get(groupKey).contains(timestamp)) {
+ return groupKey;
+ }
+ }
+ }
+ } else if (ExampleDataHelper.QUERY_TYPE_GROUP_BY.equals(queryType)) {
+ List<OrderByElement> orderByElements = dbSelectStatement.getOrderByElements();
+ if (orderByElements != null && !orderByElements.isEmpty()) {
+ return !orderByElements.get(0).isAsc();
+ } else {
+ return false;
+ }
+ }
+
+ return null;
+ }
+}
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/SQLFunctionUtil.java b/src/main/java/com/mesalab/common/utils/sqlparser/SQLFunctionUtil.java
index 8c0d7351..c65cc6f8 100644
--- a/src/main/java/com/mesalab/common/utils/sqlparser/SQLFunctionUtil.java
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/SQLFunctionUtil.java
@@ -1,17 +1,19 @@
package com.mesalab.common.utils.sqlparser;
+import cn.hutool.core.util.NumberUtil;
+import cn.hutool.core.util.StrUtil;
import cn.hutool.core.util.URLUtil;
import com.alibaba.fastjson2.JSON;
import com.geedgenetworks.utils.Encodes;
import com.google.common.base.Splitter;
import com.google.common.collect.Maps;
-import com.mesalab.common.enums.DBTypeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.DBEngineType;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.common.utils.SpringContextUtil;
import com.mesalab.common.utils.StringUtil;
import com.mesalab.common.utils.TimeZoneUtil;
import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
+import com.mesalab.common.exception.CommonErrorCode;
import com.mesalab.qgw.model.basic.HttpConfig;
import com.mesalab.qgw.model.basic.ClickHouseHttpSource;
import com.mesalab.qgw.model.basic.DruidIoHttpSource;
@@ -24,7 +26,9 @@ import net.sf.jsqlparser.parser.CCJSqlParserUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
+import org.joda.time.Period;
+import javax.validation.constraints.NotNull;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.HashMap;
@@ -32,6 +36,7 @@ import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import java.util.stream.Collectors;
/**
* @author wangwei
@@ -39,6 +44,7 @@ import java.util.regex.Pattern;
*/
public class SQLFunctionUtil {
+ private static final boolean CONVERT_DEFAULT_TIMEZONE_INTO_UTC;
public static final String TIME_FLOOR_WITH_FILL = "TIME_FLOOR_WITH_FILL";
public static final String IP_TO_GEO = "IP_TO_GEO";
public static final String IP_TO_CITY = "IP_TO_CITY";
@@ -47,6 +53,8 @@ public class SQLFunctionUtil {
public static final String IP_TO_ASN = "IP_TO_ASN";
public static final String IP_TO_ASN_DETAIL = "IP_TO_ASN_DETAIL";
public static final String IP_TO_ASN_ORG = "IP_TO_ASN_ORG";
+ public static final String ROLLUP = "ROLLUP";
+ public static final String RATE = "RATE";
public static final String MAX_DURATION = "MAX_DURATION";
public static final String MEDIAN_HDR = "MEDIAN_HDR";
public static final String QUANTILE_HDR = "QUANTILE_HDR";
@@ -68,6 +76,7 @@ public class SQLFunctionUtil {
*/
public static final Pattern pTimeFloorWithFill = Pattern.compile("\\b" + TIME_FLOOR_WITH_FILL + "\\(", Pattern.CASE_INSENSITIVE);
public static final Pattern pMaxDuration = Pattern.compile("\\b" + MAX_DURATION + "\\(", Pattern.CASE_INSENSITIVE);
+ public static final Pattern pRate = Pattern.compile("\\b" + RATE + "\\(", Pattern.CASE_INSENSITIVE);
public static final Pattern pIpToGeo = Pattern.compile("\\b" + IP_TO_GEO + "\\(", Pattern.CASE_INSENSITIVE);
public static final Pattern pIpToCity = Pattern.compile("\\b" + IP_TO_CITY + "\\(", Pattern.CASE_INSENSITIVE);
public static final Pattern pIpToCountry = Pattern.compile("\\b" + IP_TO_COUNTRY + "\\(", Pattern.CASE_INSENSITIVE);
@@ -88,38 +97,40 @@ public class SQLFunctionUtil {
public static final Pattern pApproxCountDistinctHlld = Pattern.compile("\\b" + APPROX_COUNT_DISTINCT_HLLD + "\\(", Pattern.CASE_INSENSITIVE);
- public static final HashMap<String, String> functions = Maps.newHashMap();
+ public static final HashMap<String, String> federationUDFFunctions = Maps.newHashMap();
private static final Map<String, String> distributedGroupByFunction = Maps.newHashMap();
public static final Map<String, String> timeSlicingFunction = Maps.newHashMap();
static {
- functions.put(TIME_FLOOR_WITH_FILL, null);
- functions.put(IP_TO_GEO, null);
- functions.put(IP_TO_CITY, null);
- functions.put(IP_TO_COUNTRY, null);
- functions.put(IP_TO_ISP, null);
- functions.put(IP_TO_ASN, null);
- functions.put(IP_TO_ASN_DETAIL, null);
- functions.put(IP_TO_ASN_ORG, null);
- distributedGroupByFunction.put("count","sum");
- distributedGroupByFunction.put("sum","sum");
- distributedGroupByFunction.put("avg","avg");
- distributedGroupByFunction.put("min","min");
- distributedGroupByFunction.put("max","max");
- distributedGroupByFunction.put("median","median");
- timeSlicingFunction.put("count","sum");
- timeSlicingFunction.put("sum","sum");
- timeSlicingFunction.put("avg","avg");
- timeSlicingFunction.put("min","min");
- timeSlicingFunction.put("max","max");
+ federationUDFFunctions.put(TIME_FLOOR_WITH_FILL, null);
+ federationUDFFunctions.put(IP_TO_GEO, null);
+ federationUDFFunctions.put(IP_TO_CITY, null);
+ federationUDFFunctions.put(IP_TO_COUNTRY, null);
+ federationUDFFunctions.put(IP_TO_ISP, null);
+ federationUDFFunctions.put(IP_TO_ASN, null);
+ federationUDFFunctions.put(IP_TO_ASN_DETAIL, null);
+ federationUDFFunctions.put(IP_TO_ASN_ORG, null);
+ federationUDFFunctions.put(ROLLUP, null);
+ distributedGroupByFunction.put("count", "sum");
+ distributedGroupByFunction.put("sum", "sum");
+ distributedGroupByFunction.put("avg", "avg");
+ distributedGroupByFunction.put("min", "min");
+ distributedGroupByFunction.put("max", "max");
+ distributedGroupByFunction.put("median", "median");
+ timeSlicingFunction.put("count", "sum");
+ timeSlicingFunction.put("sum", "sum");
+ timeSlicingFunction.put("avg", "avg");
+ timeSlicingFunction.put("min", "min");
+ timeSlicingFunction.put("max", "max");
+ CONVERT_DEFAULT_TIMEZONE_INTO_UTC = clickHouseHttpSource.isConvertDefaultTimezoneIntoUTC();
}
/**
* 标准函数
*/
//time functions
- public static final Pattern pAggregateFunStandard = Pattern.compile("\\b(count|min|max|sum|avg|any|stddevPop|stddevSamp|varPop|varSamp|covarPop|covarSamp)\\s*\\(", Pattern.CASE_INSENSITIVE);
+ public static final Pattern pAggregateFunStandard = Pattern.compile("\\b(count|min|max|sum|avg|any|rate|stddevPop|stddevSamp|varPop|varSamp|covarPop|covarSamp|MEDIAN|QUANTILE|MAX_DURATION|MEDIAN_HDR|QUANTILE_HDR|PERCENTILES_HDR|COUNT_DISTINCT|APPROX_COUNT_DISTINCT_HLLD)\\s*\\(", Pattern.CASE_INSENSITIVE);
public static final Pattern pDateFormat = Pattern.compile("\\W(FROM_UNIXTIME|DATE_FORMAT|STR_TO_DATE)\\s*\\(", Pattern.CASE_INSENSITIVE);
public static final Pattern pGroupByFormat = Pattern.compile("(SELECT\\s+|GROUP\\s*BY\\s+|,\\s*)(FROM_UNIXTIME|DATE_FORMAT|STR_TO_DATE)\\s*\\(", Pattern.CASE_INSENSITIVE);
public static final Pattern pFromUnixTimeMillis = Pattern.compile("\\WFROM_UNIXTIME_MILLIS\\s*\\(", Pattern.CASE_INSENSITIVE);
@@ -142,12 +153,14 @@ public class SQLFunctionUtil {
public static final Pattern strFormatDateTime = Pattern.compile("\\d{4}-\\d{2}-\\d{2}\\s+\\d{2}:\\d{2}:\\d{2}", Pattern.CASE_INSENSITIVE);
//other
+ public static final Pattern pRollUp = Pattern.compile("\\Wrollup\\s*\\(", Pattern.CASE_INSENSITIVE);
public static final Pattern pQuantile = Pattern.compile("\\WQUANTILE\\s*\\(");
public static final Pattern pMedian = Pattern.compile("\\WMEDIAN\\s*\\(");
public static final Pattern pCast = Pattern.compile("\\WCAST\\s*\\(");
public static final Pattern pNotEmpty = Pattern.compile("\\b" + NOT_EMPTY + "\\(");
public static final Pattern pEmpty = Pattern.compile("\\b" + EMPTY + "\\(");
public static final Pattern pConcat = Pattern.compile("\\b" + CONCAT + "\\((?!toString\\(|CAST\\()", Pattern.CASE_INSENSITIVE);
+ public static final Pattern pIsIPAddressInRange = Pattern.compile("\\WisIPAddressInRange\\s*\\(", Pattern.CASE_INSENSITIVE);
/**
* 解析mysql函数替换为对应数据库类型函数
@@ -159,6 +172,7 @@ public class SQLFunctionUtil {
public static String generateDateFunction(String sql, String dbType) {
sql = translateCommonFun(sql, dbType);
sql = parseQuantile(sql, dbType);
+ sql = parseIsIPAddressInRange(sql, dbType);
sql = parseMedian(sql, dbType);
sql = parseCast(sql, dbType);
sql = parseMergepFunction(sql, dbType);
@@ -179,6 +193,44 @@ public class SQLFunctionUtil {
return sql;
}
+ public static String replaceRollup(String sql) {
+ sql = parseRollup(sql);
+ return sql;
+ }
+
+ public static String parseRollup(String sql) {
+ while (true) {
+ Matcher matcher = pRollUp.matcher(sql);
+ if (!matcher.find()) {
+ return sql;
+ }
+ int start = matcher.start() + 1;
+ String sqlParse = sql.substring(start);
+ int[] bracketsMatch = StringUtil.getBracketsMatch(sqlParse, "(", false);
+ if (bracketsMatch[0] >= 1) {
+ --bracketsMatch[0];
+ }
+ String str = translateRollup(sqlParse, bracketsMatch[0], null);
+ sql = sql.substring(0, start) + str;
+ }
+ }
+
+ public static String translateRollup(String sqlParse, int num, String dbType) {
+ Pattern pDateFormatMark = Pattern.compile("(ROLLUP)\\s*\\((.*?(.*?\\).*?){" + num + "})\\)", Pattern.CASE_INSENSITIVE);
+ Matcher mDateFormatMark = pDateFormatMark.matcher(sqlParse);
+ StringBuffer sb = new StringBuffer();
+ if (mDateFormatMark.find()) {
+ String group2 = mDateFormatMark.group(2);
+ String replaceValue = group2;
+ if (StringUtil.getBracketsMatch(group2, "(", false)[0] >= 0) {
+ replaceValue = mDateFormatMark.group(2);
+ }
+ mDateFormatMark.appendReplacement(sb, replaceValue);
+ }
+ mDateFormatMark.appendTail(sb);
+ return sb.toString();
+ }
+
public static String timeZoneFunc(String sql, String dbType) {
while (true) {
Matcher matcher = pTimeZone.matcher(sql);
@@ -226,15 +278,15 @@ public class SQLFunctionUtil {
int startTZInt = (Integer.parseInt(TimeZoneUtil.timeZoneTransition(startTZString))) / 100;
timeDifference = startTZInt - oldTZInt;
}
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
replaceValue = "addHours#[toDateTime#[" + params.get(0) + "," + params.get(1) + "#]," + timeDifference + "," + params.get(1) + "#]";
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
if (!strFormatDateTime.matcher(params.get(0)).find()) {
replaceValue = "TIME_FORMAT#[TIME_SHIFT#[" + params.get(0) + "," + "'PT1H'," + timeDifference + "," + params.get(1) + "#],'yyyy-MM-dd HH:mm:ss'#]";
} else {
replaceValue = "TIME_FORMAT#[TIME_SHIFT#[TIME_PARSE#[" + params.get(0) + "#]," + "'PT1H'," + timeDifference + "," + params.get(1) + "#],'yyyy-MM-dd HH:mm:ss'#]";
}
- } else if (DBTypeEnum.HBASE.getValue().equals(dbType)) {
+ } else if (DBEngineType.HBASE.getValue().equals(dbType)) {
if (!strFormatDateTime.matcher(params.get(0)).find()) {
replaceValue = "TO_CHAR#[" + func + "#[ " + params.get(0) + ",'" + TimeZoneUtil.timeZoneTransition(oldTZString) + "','" + TimeZoneUtil.timeZoneTransition(startTZString) + "'#],'yyyy-MM-dd HH:mm:ss'#]";
} else {
@@ -248,6 +300,7 @@ public class SQLFunctionUtil {
public static String getDistributedGroupByFunction(String sourceAggregateFunctionName) {
return distributedGroupByFunction.get(sourceAggregateFunctionName);
}
+
/**
* 公共函数转义
*
@@ -289,6 +342,7 @@ public class SQLFunctionUtil {
private static String translateUDF(String dbType, String resultSql) {
resultSql = translateByPattern(resultSql, dbType, pTimeFloorWithFill);
resultSql = translateByPattern(resultSql, dbType, pMaxDuration);
+ resultSql = translateByPattern(resultSql, dbType, pRate);
resultSql = translateByPattern(resultSql, dbType, pIpToGeo);
resultSql = translateByPattern(resultSql, dbType, pIpToCity);
resultSql = translateByPattern(resultSql, dbType, pIpToCountry);
@@ -312,7 +366,7 @@ public class SQLFunctionUtil {
* @return
*/
public static String getQueryValue(String query, String dbType) {
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
query = generateDateFunction("select " + query + " as actualValue", dbType);
query = Encodes.urlEncode(query);
Map<String, String> results = executeHttpGetOfCK(query);
@@ -321,7 +375,7 @@ public class SQLFunctionUtil {
Map<String, Object> data = dates.get(0);
Object actualValue = data.get("actualValue");
return actualValue.toString();
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
query = generateDateFunction("select TIME_FORMAT(" + query + ", 'yyyy-MM-dd HH:mm:ss') as actualValue", dbType);
Map<String, String> results = executeHttpPostOfDruid(query);
List<Object> maps = (List<Object>) JSON.parseObject(results.get("result"), Object.class);
@@ -446,6 +500,30 @@ public class SQLFunctionUtil {
}
/**
+ * 匹配范围:isIPAddressInRange(
+ *
+ * @param sql 需要解析的sql
+ * @param dbType 数据库的类型
+ * @return
+ */
+ public static String parseIsIPAddressInRange(String sql, String dbType) {
+ while (true) {
+ Matcher matcher = pIsIPAddressInRange.matcher(sql);
+ if (!matcher.find()) {
+ return sql;
+ }
+ int start = matcher.start() + 1;
+ String sqlParse = sql.substring(start);
+ int[] bracketsMatch = StringUtil.getBracketsMatch(sqlParse, "(", false);
+ if (bracketsMatch[0] >= 1) {
+ --bracketsMatch[0];
+ }
+ String str = parseIsIPAddressInRangeReplace(sqlParse, bracketsMatch[0], dbType);
+ sql = sql.substring(0, start) + str;
+ }
+ }
+
+ /**
* 替换:QUANTILE(expr,level)、QUANTILE(expr)
*
* @param sqlParse
@@ -470,6 +548,30 @@ public class SQLFunctionUtil {
}
/**
+ * 替换:isIPAddressInRange
+ *
+ * @param sqlParse
+ * @param num 包含括号的个数
+ * @param dbType
+ * @return
+ */
+ public static String parseIsIPAddressInRangeReplace(String sqlParse, int num, String dbType) {
+ Pattern pNumQuantile = Pattern.compile("(isIPAddressInRange)\\s*\\((.*?(.*?\\).*?){" + num + "})\\)");
+ Matcher matcher = pNumQuantile.matcher(sqlParse);
+ StringBuffer sb = new StringBuffer();
+ if (matcher.find()) {
+ String group2 = matcher.group(2);
+ String replaceValue = group2;
+ if (StringUtil.getBracketsMatch(group2, "(", false)[0] >= 0) {
+ replaceValue = isIPAddressInRangeReplaceByDB(group2, dbType);
+ }
+ matcher.appendReplacement(sb, replaceValue);
+ }
+ matcher.appendTail(sb);
+ return sb.toString();
+ }
+
+ /**
* 替换:匹配范围:QUANTILE(
*
* @param param 截取到的括号内的参数
@@ -482,18 +584,34 @@ public class SQLFunctionUtil {
if (params.size() == 1) {
params.add("0.5");
}
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
replaceValue = "quantile#[" + params.get(1) + "#]#[" + params.get(0) + "#]";
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
replaceValue = "APPROX_QUANTILE_DS#[" + params.get(0) + "," + params.get(1) + "#]";
}
return replaceValue;
}
- public static String parseMedian(String sql, String dbType){
+ /**
+ * 替换:匹配范围:isIPAddressInRange(
+ *
+ * @param param 截取到的括号内的参数
+ * @param dbType
+ * @return
+ */
+ private static String isIPAddressInRangeReplaceByDB(String param, String dbType) {
+ List<String> params = diviParam(param, ",");
+ String replaceValue = null;
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
+ replaceValue = "IF#[empty#[" + params.get(0) + "#], 0, isIPAddressInRange#[" + params.get(0) + "," + params.get(1) + "#]#]";
+ }
+ return replaceValue;
+ }
+
+ public static String parseMedian(String sql, String dbType) {
while (true) {
Matcher matcher = pMedian.matcher(sql);
- if (!matcher.find()){
+ if (!matcher.find()) {
return sql;
}
int start = matcher.start() + 1;
@@ -507,7 +625,7 @@ public class SQLFunctionUtil {
}
}
- public static String parseMedianReplace(String sqlParse, int num, String dbType){
+ public static String parseMedianReplace(String sqlParse, int num, String dbType) {
Pattern pNumMedian = Pattern.compile("(MEDIAN)\\s*\\((.*?(.*?\\).*?){" + num + "})\\)");
Matcher matcher = pNumMedian.matcher(sqlParse);
StringBuffer sb = new StringBuffer();
@@ -524,11 +642,11 @@ public class SQLFunctionUtil {
}
- private static String medianReplaceByDB(String param, String dbType){
+ private static String medianReplaceByDB(String param, String dbType) {
String replaceValue = null;
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
replaceValue = "median#[" + param + "#]";
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
replaceValue = "APPROX_QUANTILE_DS#[" + param + "," + 0.5 + "#]";
}
return replaceValue;
@@ -574,15 +692,15 @@ public class SQLFunctionUtil {
Expression expression = CCJSqlParserUtil.parseExpression(param);
if (expression instanceof CastExpression) {
CastExpression castExpression = (CastExpression) expression;
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
String dataType = castExpression.getType().getDataType();
castExpression.getType().setDataType("Nullable(" + dataType + ")");
}
replaceValue = String.valueOf(castExpression).replace("(", "#[").replace(")", "#]");
}
} catch (JSQLParserException e) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage()));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage()));
}
return replaceValue;
}
@@ -679,12 +797,13 @@ public class SQLFunctionUtil {
/**
* Desc 匹配UNIX_TIMESTAMP_MILLIS(
+ *
* @param sql
* @param dbType
* @return
*/
- public static String parseUnixTimeMillis(String sql, String dbType){
+ public static String parseUnixTimeMillis(String sql, String dbType) {
while (true) {
Matcher matcher = pUnixTimeMillis.matcher(sql);
if (!matcher.find()) {
@@ -853,16 +972,16 @@ public class SQLFunctionUtil {
String var = group.substring(bracketsMatch[1] + 1);
var = var.replaceAll(" ", "");
String replaceValue = m.group(0);
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
switch (var) {
case "/60)*60),'%Y-%m-%d%H:%i:%s'":
- replaceValue = "toStartOfMinute#[toDateTime#[" + param + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toStartOfMinute#[toDateTime#[" + param + ", 'UTC'#]#]" : "toStartOfMinute#[toDateTime#[" + param + "#]#]";
break;
case "/3600)*3600),'%Y-%m-%d%H:%i:%s'":
- replaceValue = "toStartOfHour#[toDateTime#[" + param + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toStartOfHour#[toDateTime#[" + param + ", 'UTC'#]#]" : "toStartOfHour#[toDateTime#[" + param + "#]#]";
break;
}
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
if (param.matches("'\\d{4}-\\d{2}-\\d{2}\\s+\\d{2}:\\d{2}:\\d{2}'")) {
param = "TIMESTAMP " + param;
}
@@ -932,13 +1051,13 @@ public class SQLFunctionUtil {
if (StringUtil.getBracketsMatch(group0.substring(0, group0.length() - 1), "(", true)[0] == num) {
String[] split = group4.split(",");
String replaceValue = "";
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
if (split.length == 1) {
- replaceValue = "toStartOfDay#[toDateTime#[" + group2 + "#]#]+#[" + group4 + "#]*86400 )";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toStartOfDay#[toDateTime#[" + group2 + ", 'UTC'#]#]+#[" + group4 + "#]*86400 )" : "toStartOfDay#[toDateTime#[" + group2 + "#]#]+#[" + group4 + "#]*86400 )";
} else if (split.length == 2) {
- replaceValue = "toStartOfDay#[toDateTime#[" + group2 + "#]#]+#[" + split[0] + "#]*86400 ," + split[1] + ")";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toStartOfDay#[toDateTime#[" + group2 + ", 'UTC'#]#]+#[" + split[0] + "#]*86400 ," + split[1] + ")" : "toStartOfDay#[toDateTime#[" + group2 + "#]#]+#[" + split[0] + "#]*86400 ," + split[1] + ")";
}
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
if (split.length == 1) {
replaceValue = "TIME_SHIFT#[FLOOR#[ TIMESTAMP " + group2 + " to day#],'P1D'," + group4 + "#])";
} else if (split.length == 2) {
@@ -984,9 +1103,9 @@ public class SQLFunctionUtil {
private static String lastDayReplace(String unit, String param, String dbType) {
String replaceValue = null;
if ("LAST_DAY".equals(StringUtils.upperCase(unit))) {
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
- replaceValue = "addDays#[addMonths#[toStartOfMonth#[toDateTime#[" + param + "#]#],1#],-1#]";
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "addDays#[addMonths#[toStartOfMonth#[toDateTime#[" + param + ", 'UTC'#]#],1#],-1#]" : "addDays#[addMonths#[toStartOfMonth#[toDateTime#[" + param + "#]#],1#],-1#]";
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
replaceValue = "TIME_SHIFT#[FLOOR#[TIME_SHIFT#[ TIMESTAMP " + param + ", 'P1M', 1#] to month#], 'P1D', -1#]";
}
}
@@ -1037,17 +1156,21 @@ public class SQLFunctionUtil {
if (mYear.find()) {
String group1 = mYear.group(1);
String group5 = mYear.group(5);
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
if (group1 != null && !"".equals(group1.trim()) && group5 != null && !"".equals(group5.trim())) {
- replaceValue = "toDateTime#[addDays#[addYears#[toStartOfYear#[toDateTime#[" + mYear.group(3) + "#]#]," + group1 + " " + group5 + "#]," + params.get(1) + " -1 #]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[addDays#[addYears#[toStartOfYear#[toDateTime#[" + mYear.group(3) + ", 'UTC'#]#]," + group1 + " " + group5 + "#]," + params.get(1) + " -1 #]#]" :
+ "toDateTime#[addDays#[addYears#[toStartOfYear#[toDateTime#[" + mYear.group(3) + "#]#]," + group1 + " " + group5 + "#]," + params.get(1) + " -1 #]#]";
} else if (group5 != null && !"".equals(group5.trim())) {
- replaceValue = "toDateTime#[addDays#[addYears#[toStartOfYear#[toDateTime#[" + mYear.group(3) + "#]#]," + group5 + "#]," + params.get(1) + " -1 #]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[addDays#[addYears#[toStartOfYear#[toDateTime#[" + mYear.group(3) + ", 'UTC'#]#]," + group5 + "#]," + params.get(1) + " -1 #]#]"
+ : "toDateTime#[addDays#[addYears#[toStartOfYear#[toDateTime#[" + mYear.group(3) + "#]#]," + group5 + "#]," + params.get(1) + " -1 #]#]";
} else if (group1 != null && !"".equals(group1.trim())) {
- replaceValue = "toDateTime#[addDays#[addYears#[toStartOfYear#[toDateTime#[" + mYear.group(3) + "#]#]," + group1 + "0 #]," + params.get(1) + " -1 #]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[addDays#[addYears#[toStartOfYear#[toDateTime#[" + mYear.group(3) + ", 'UTC'#]#]," + group1 + "0 #]," + params.get(1) + " -1 #]#]"
+ : "toDateTime#[addDays#[addYears#[toStartOfYear#[toDateTime#[" + mYear.group(3) + "#]#]," + group1 + "0 #]," + params.get(1) + " -1 #]#]";
} else {
- replaceValue = "toDateTime#[addDays#[toStartOfYear#[toDateTime#[" + mYear.group(3) + "#]#]," + params.get(1) + " -1 #]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[addDays#[toStartOfYear#[toDateTime#[" + mYear.group(3) + ", 'UTC'#]#]," + params.get(1) + " -1 #]#]"
+ : "toDateTime#[addDays#[toStartOfYear#[toDateTime#[" + mYear.group(3) + "#]#]," + params.get(1) + " -1 #]#]";
}
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
if (group1 != null && !"".equals(group1.trim()) && group5 != null && !"".equals(group5.trim())) {
replaceValue = "TIME_SHIFT#[TIME_SHIFT#[FLOOR#[ TIMESTAMP " + mYear.group(3) + " to year#],'P1Y'," + group1 + " " + group5 + " #],'P1D'," + params.get(1) + " -1 #]";
} else if (group5 != null && !"".equals(group5.trim())) {
@@ -1095,42 +1218,42 @@ public class SQLFunctionUtil {
private static String unitFuncGetNumReplaceByDB(String unit, String expr, String dbType) {
String replaceValue = null;
unit = StringUtils.upperCase(unit);
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
switch (unit) {
case "YEAR":
- replaceValue = "toYear#[toDateTime#[" + expr + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toYear#[toDateTime#[" + expr + ", 'UTC'#]#]" : "toYear#[toDateTime#[" + expr + "#]#]";
break;
case "QUARTER":
- replaceValue = "toQuarter#[toDateTime#[" + expr + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toQuarter#[toDateTime#[" + expr + ", 'UTC'#]#]" : "toQuarter#[toDateTime#[" + expr + "#]#]";
break;
case "MONTH":
- replaceValue = "toMonth#[toDateTime#[" + expr + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toMonth#[toDateTime#[" + expr + ", 'UTC'#]#]" : "toMonth#[toDateTime#[" + expr + "#]#]";
break;
case "DAY":
- replaceValue = "toDayOfMonth#[toDateTime#[" + expr + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDayOfMonth#[toDateTime#[" + expr + ", 'UTC'#]#]" : "toDayOfMonth#[toDateTime#[" + expr + "#]#]";
break;
case "HOUR":
- replaceValue = "toHour#[toDateTime#[" + expr + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toHour#[toDateTime#[" + expr + ", 'UTC'#]#]" : "toHour#[toDateTime#[" + expr + "#]#]";
break;
case "MINUTE":
- replaceValue = "toMinute#[toDateTime#[" + expr + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toMinute#[toDateTime#[" + expr + ", 'UTC'#]#]" : "toMinute#[toDateTime#[" + expr + "#]#]";
break;
case "SECOND":
- replaceValue = "toSecond#[toDateTime#[" + expr + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toSecond#[toDateTime#[" + expr + ", 'UTC'#]#]" : "toSecond#[toDateTime#[" + expr + "#]#]";
break;
case "DAYOFYEAR":
- replaceValue = "toDayOfYear#[toDateTime#[" + expr + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDayOfYear#[toDateTime#[" + expr + ", 'UTC'#]#]" : "toDayOfYear#[toDateTime#[" + expr + "#]#]";
break;
case "DAYOFMONTH":
- replaceValue = "toDayOfMonth#[toDateTime#[" + expr + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDayOfMonth#[toDateTime#[" + expr + ", 'UTC'#]#]" : "toDayOfMonth#[toDateTime#[" + expr + "#]#]";
break;
case "DAYOFWEEK":
- replaceValue = "toDayOfWeek#[addDays#[toDateTime#[" + expr + "#],1#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDayOfWeek#[addDays#[toDateTime#[" + expr + ", 'UTC'#],1#]#]" : "toDayOfWeek#[addDays#[toDateTime#[" + expr + "#],1#]#]";
break;
default:
replaceValue = unit + "#[" + expr + "#]";
}
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
switch (unit) {
case "YEAR":
replaceValue = "TIME_EXTRACT#[TIME_SHIFT#[ TIMESTAMP " + expr + ",'PT1M',0#], 'YEAR' #]";
@@ -1188,37 +1311,37 @@ public class SQLFunctionUtil {
if (!sign) {
param2 = "- #[" + param2 + "#]";
}
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
switch (StringUtils.upperCase(matcher.group(2))) {
case "SECOND":
- replaceValue = "toDateTime#[" + param1 + "#]" + param2;
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[" + param1 + ", 'UTC'#]" + param2 : "toDateTime#[" + param1 + "#]" + param2;
break;
case "MINUTE":
- replaceValue = "toDateTime#[" + param1 + "#]" + param2 + "*60";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[" + param1 + ", 'UTC'#]" + param2 + "*60" : "toDateTime#[" + param1 + "#]" + param2 + "*60";
break;
case "HOUR":
- replaceValue = "toDateTime#[" + param1 + "#]" + param2 + "*3600";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[" + param1 + ", 'UTC'#]" + param2 + "*3600" : "toDateTime#[" + param1 + "#]" + param2 + "*3600";
break;
case "DAY":
- replaceValue = "toDateTime#[" + param1 + "#]" + param2 + "*86400";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[" + param1 + ", 'UTC'#]" + param2 + "*86400" : "toDateTime#[" + param1 + "#]" + param2 + "*86400";
break;
case "WEEK":
- replaceValue = "toDateTime#[" + param1 + "#]" + param2 + "*604800";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[" + param1 + ", 'UTC'#]" + param2 + "*604800" : "toDateTime#[" + param1 + "#]" + param2 + "*604800";
break;
case "MONTH":
- replaceValue = "addMonths#[toDateTime#[" + param1 + "#] ," + param2 + "#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "addMonths#[toDateTime#[" + param1 + ", 'UTC'#] ," + param2 + "#]" : "addMonths#[toDateTime#[" + param1 + "#] ," + param2 + "#]";
break;
case "QUARTER":
- replaceValue = "addQuarter#[toDateTime#[" + param1 + "#]," + param2 + "#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "addQuarter#[toDateTime#[" + param1 + ", 'UTC'#]," + param2 + "#]" : "addQuarter#[toDateTime#[" + param1 + "#]," + param2 + "#]";
break;
case "YEAR":
- replaceValue = "addYears#[toDateTime#[" + param1 + "#]," + param2 + "#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "addYears#[toDateTime#[" + param1 + ", 'UTC'#]," + param2 + "#]" : "addYears#[toDateTime#[" + param1 + "#]," + param2 + "#]";
break;
default:
replaceValue = param1 + param2;
break;
}
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
switch (StringUtils.upperCase(matcher.group(2))) {
case "SECOND":
replaceValue = "TIME_SHIFT#[ TIMESTAMP " + param1 + ",'PT1S'," + param2 + "#]";
@@ -1257,12 +1380,12 @@ public class SQLFunctionUtil {
if (!sign) {
param2 = "- #[" + param2 + "#]";
}
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
- replaceValue = "addDays#[toDateTime#[" + param1 + "#]," + param2 + "#]";
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "addDays#[toDateTime#[" + param1 + ", 'UTC'#]," + param2 + "#]" : "addDays#[toDateTime#[" + param1 + "#]," + param2 + "#]";
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
replaceValue = "TIME_SHIFT#[ TIMESTAMP " + param1 + ",'P1D'," + param2 + "#]";
}
- } else if (params.size() == 3 && DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ } else if (params.size() == 3 && DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
replaceValue = "DATE_ADD#[" + params.get(0) + "," + params.get(1) + "," + params.get(2) + "#]";
}
@@ -1282,14 +1405,14 @@ public class SQLFunctionUtil {
String param1 = params.get(0);
String replaceValue = null;
if ("FROM_UNIXTIME".equals(StringUtils.upperCase(func)) && params.size() == 1) {
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
- replaceValue = "toDateTime#[" + param + "#]";
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType) && !bool) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[" + param + ", 'UTC'#]" : "toDateTime#[" + param + "#]";
+ } else if (DBEngineType.DRUID.getValue().equals(dbType) && !bool) {
replaceValue = "MILLIS_TO_TIMESTAMP#[ 1000 * #[" + param + "#]#]";
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType) && bool) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType) && bool) {
replaceValue = "TIME_FORMAT#[MILLIS_TO_TIMESTAMP#[ 1000 * #[" + param + "#]#],'YYYY-MM-dd HH:mm:ss'#]";
}
- } else if (func.contains("FROM_UNIXTIME") && DBTypeEnum.DRUID.getValue().equals(dbType) && "%Y-%m-%d %H:%i:%s".equals(params.get(1).replaceAll("\\'|\\\"", "").trim())) {
+ } else if (func.contains("FROM_UNIXTIME") && DBEngineType.DRUID.getValue().equals(dbType) && "%Y-%m-%d %H:%i:%s".equals(params.get(1).replaceAll("\\'|\\\"", "").trim())) {
if (!bool) {
replaceValue = "MILLIS_TO_TIMESTAMP#[ 1000*#[" + param1 + "#]#]";
} else if (bool) {
@@ -1297,71 +1420,71 @@ public class SQLFunctionUtil {
}
} else {
String param2 = params.get(1).replaceAll("\\'|\\\"", "").trim();
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType) && bool) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType) && bool) {
switch (param2) {
case "%Y-%m-%d %H:%i:%s":
- replaceValue = "toDateTime#[" + param1 + "#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[" + param1 + ", 'UTC'#]" : "toDateTime#[" + param1 + "#]";
break;
case "%Y-%m-%d %H:%i:00":
- replaceValue = "toStartOfMinute#[toDateTime#[" + param1 + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toStartOfMinute#[toDateTime#[" + param1 + ", 'UTC'#]#]" : "toStartOfMinute#[toDateTime#[" + param1 + "#]#]";
break;
case "%Y-%m-%d %H:00:00":
- replaceValue = "toStartOfHour#[toDateTime#[" + param1 + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toStartOfHour#[toDateTime#[" + param1 + ", 'UTC'#]#]" : "toStartOfHour#[toDateTime#[" + param1 + "#]#]";
break;
case "%Y-%m-%d 00:00:00":
- replaceValue = "toStartOfDay#[toDateTime#[" + param1 + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toStartOfDay#[toDateTime#[" + param1 + ", 'UTC'#]#]" : "toStartOfDay#[toDateTime#[" + param1 + "#]#]";
break;
case "%Y-%m-01 00:00:00":
- replaceValue = "toDateTime#[toStartOfMonth#[toDateTime#[" + param1 + "#]#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[toStartOfMonth#[toDateTime#[" + param1 + ", 'UTC'#]#]#]" : "toDateTime#[toStartOfMonth#[toDateTime#[" + param1 + "#]#]#]";
break;
case "%Y-01-01 00:00:00":
- replaceValue = "toDateTime#[toStartOfYear#[toDateTime#[" + param1 + "#]#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[toStartOfYear#[toDateTime#[" + param1 + ", 'UTC'#]#]#]" : "toDateTime#[toStartOfYear#[toDateTime#[" + param1 + "#]#]#]";
break;
case "%Y-%m-%d":
- replaceValue = "formatDateTime#[toDateTime#[" + param1 + "#], '%Y-%m-%d'#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "formatDateTime#[toDateTime#[" + param1 + ", 'UTC'#], '%Y-%m-%d'#]" : "formatDateTime#[toDateTime#[" + param1 + "#], '%Y-%m-%d'#]";
break;
case "%Y-%m-01":
- replaceValue = "formatDateTime#[toDateTime#[" + param1 + "#], '%Y-%m-01'#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "formatDateTime#[toDateTime#[" + param1 + ", 'UTC'#], '%Y-%m-01'#]" : "formatDateTime#[toDateTime#[" + param1 + "#], '%Y-%m-01'#]";
break;
case "%Y-01-01":
- replaceValue = "formatDateTime#[toDateTime#[" + param1 + "#], '%Y-01-01'#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "formatDateTime#[toDateTime#[" + param1 + ", 'UTC'#], '%Y-01-01'#]" : "formatDateTime#[toDateTime#[" + param1 + "#], '%Y-01-01'#]";
break;
default:
replaceValue = "toDateTime#[" + param1 + "#]";
}
- } else if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType) && !bool) {
+ } else if (DBEngineType.CLICKHOUSE.getValue().equals(dbType) && !bool) {
switch (param2) {
case "%Y-%m-%d %H:%i:%s":
- replaceValue = "toDateTime#[" + param1 + "#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[" + param1 + ", 'UTC'#]" : "toDateTime#[" + param1 + "#]";
break;
case "%Y-%m-%d %H:%i:00":
- replaceValue = "toStartOfMinute#[toDateTime#[" + param1 + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toStartOfMinute#[toDateTime#[" + param1 + ", 'UTC'#]#]" : "toStartOfMinute#[toDateTime#[" + param1 + "#]#]";
break;
case "%Y-%m-%d %H:00:00":
- replaceValue = "toStartOfHour#[toDateTime#[" + param1 + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toStartOfHour#[toDateTime#[" + param1 + ", 'UTC'#]#]" : "toStartOfHour#[toDateTime#[" + param1 + "#]#]";
break;
case "%Y-%m-%d 00:00:00":
- replaceValue = "toStartOfDay#[toDateTime#[" + param1 + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toStartOfDay#[toDateTime#[" + param1 + ", 'UTC'#]#]" : "toStartOfDay#[toDateTime#[" + param1 + "#]#]";
break;
case "%Y-%m-01 00:00:00":
- replaceValue = "toDateTime#[toStartOfMonth#[toDateTime#[" + param1 + "#]#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[toStartOfMonth#[toDateTime#[" + param1 + ", 'UTC'#]#]#]" : "toDateTime#[toStartOfMonth#[toDateTime#[" + param1 + "#]#]#]";
break;
case "%Y-01-01 00:00:00":
- replaceValue = "toDateTime#[toStartOfYear#[toDateTime#[" + param1 + "#]#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[toStartOfYear#[toDateTime#[" + param1 + ", 'UTC'#]#]#]" : "toDateTime#[toStartOfYear#[toDateTime#[" + param1 + "#]#]#]";
break;
case "%Y-%m-%d":
- replaceValue = "toStartOfDay#[toDateTime#[" + param1 + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toStartOfDay#[toDateTime#[" + param1 + ", 'UTC'#]#]" : "toStartOfDay#[toDateTime#[" + param1 + "#]#]";
break;
case "%Y-%m-01":
- replaceValue = "toStartOfMonth#[toDateTime#[" + param1 + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toStartOfMonth#[toDateTime#[" + param1 + ", 'UTC'#]#]" : "toStartOfMonth#[toDateTime#[" + param1 + "#]#]";
break;
case "%Y-01-01":
- replaceValue = "toStartOfYear#[toDateTime#[" + param1 + "#]#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toStartOfYear#[toDateTime#[" + param1 + ", 'UTC'#]#]" : "toStartOfYear#[toDateTime#[" + param1 + "#]#]";
break;
default:
- replaceValue = "toDateTime#[" + param1 + "#]";
+ replaceValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toDateTime#[" + param1 + ", 'UTC'#]" : "toDateTime#[" + param1 + "#]";
}
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType) && bool) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType) && bool) {
switch (param2) {
case "%Y-%m-%d %H:%i:%s":
replaceValue = "TIME_FORMAT#[ TIMESTAMP " + param1 + ",'YYYY-MM-dd HH:mm:ss'#]";
@@ -1393,7 +1516,7 @@ public class SQLFunctionUtil {
default:
replaceValue = "TIME_FORMAT#[ TIMESTAMP " + param1 + ",'YYYY-MM-dd HH:mm:ss'#]";
}
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType) && !bool) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType) && !bool) {
switch (param2) {
case "%Y-%m-%d %H:%i:%s":
replaceValue = " TIMESTAMP " + param1;
@@ -1443,7 +1566,7 @@ public class SQLFunctionUtil {
return sql;
}
StringBuffer sb = new StringBuffer();
- if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ if (DBEngineType.DRUID.getValue().equals(dbType)) {
Matcher mNowFun = pNow.matcher(sql);
while (mNowFun.find()) {
@@ -1529,23 +1652,23 @@ public class SQLFunctionUtil {
}
- public static String parseFromUnixTimeMillisByDB(String sqlParse, int num, String dbType){
+ public static String parseFromUnixTimeMillisByDB(String sqlParse, int num, String dbType) {
Pattern pFromUnixTimeMillisParse = Pattern.compile("(FROM_UNIXTIME_MILLIS)\\s*\\((.*?(.*?\\).*?){" + num + "})\\)", Pattern.CASE_INSENSITIVE);
Matcher matcherParse = pFromUnixTimeMillisParse.matcher(sqlParse);
StringBuffer sb = new StringBuffer();
String innerValue = null;
if (matcherParse.find()) {
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
if (matcherParse.group(2) == null || "".equals(matcherParse.group(2).trim())) {
innerValue = "fromUnixTimestamp64Milli#[CAST#[toUnixTimestamp64Milli#[CAST#[now#[#] as DateTime64#]#], 'Int64'#]#]";
} else {
innerValue = String.format("fromUnixTimestamp64Milli#[CAST#[%s, 'Int64'#]#]", matcherParse.group(2));
}
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
if (matcherParse.group(2) == null || "".equals(matcherParse.group(2).trim())) {
innerValue = "TIME_FORMAT#[MILLIS_TO_TIMESTAMP#[TIMESTAMP_TO_MILLIS#[CURRENT_TIMESTAMP#]#],'yyyy-MM-dd HH:mm:ss.SSS'#]";
} else {
- innerValue = String.format("TIME_FORMAT#[MILLIS_TO_TIMESTAMP#[%s#],'yyyy-MM-dd HH:mm:ss.SSS'#]",matcherParse.group(2));
+ innerValue = String.format("TIME_FORMAT#[MILLIS_TO_TIMESTAMP#[%s#],'yyyy-MM-dd HH:mm:ss.SSS'#]", matcherParse.group(2));
}
}
matcherParse.appendReplacement(sb, innerValue);
@@ -1568,20 +1691,20 @@ public class SQLFunctionUtil {
StringBuffer sb = new StringBuffer();
String innerValue = null;
if (matcherParse.find()) {
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
if (matcherParse.group(2) == null || "".equals(matcherParse.group(2).trim())) {
innerValue = "toUnixTimestamp#[now#[#]#]";
} else {
- innerValue = "toUnixTimestamp#[parseDateTimeBestEffort#[toString#[ " + matcherParse.group(2) + "#]#]#]";
+ innerValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toUnixTimestamp#[parseDateTimeBestEffort#[toString#[ " + matcherParse.group(2) + "#], 'UTC'#]#]" : "toUnixTimestamp#[parseDateTimeBestEffort#[toString#[ " + matcherParse.group(2) + "#]#]#]";
}
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
if (matcherParse.group(2) == null || "".equals(matcherParse.group(2).trim())) {
- innerValue = "0.001 * TIMESTAMP_TO_MILLIS#[CURRENT_TIMESTAMP#]";
+ innerValue = "TIMESTAMP_TO_MILLIS#[CURRENT_TIMESTAMP#]/1000";
} else {
if (matcherParse.group(2).startsWith("'") && matcherParse.group(2).endsWith("'")) {
- innerValue = "TIMESTAMP_TO_MILLIS#[ TIME_PARSE#[TIME_FORMAT#[ TIMESTAMP " + matcherParse.group(2) + ", 'yyyy-MM-dd HH:mm:ss'#]#]#]/1000";
+ innerValue = "TIMESTAMP_TO_MILLIS#[ TIME_PARSE#[ " + matcherParse.group(2) + "#]#]/1000";
} else {
- innerValue = "0.001 * TIMESTAMP_TO_MILLIS#[ TIMESTAMP " + matcherParse.group(2) + "#]";
+ innerValue = "TIMESTAMP_TO_MILLIS#[ TIMESTAMP " + matcherParse.group(2) + "#]/1000";
}
}
}
@@ -1591,24 +1714,24 @@ public class SQLFunctionUtil {
return sb.toString();
}
- public static String parseUnixTimeMillisReplaceByDB(String sqlParse, int num, String dbType){
+ public static String parseUnixTimeMillisReplaceByDB(String sqlParse, int num, String dbType) {
Pattern pUnixTimeMillisParse = Pattern.compile("(UNIX_TIMESTAMP_MILLIS)\\s*\\((.*?(.*?\\).*?){" + num + "})\\)", Pattern.CASE_INSENSITIVE);
Matcher matcherParse = pUnixTimeMillisParse.matcher(sqlParse);
StringBuffer sb = new StringBuffer();
String innerValue = null;
if (matcherParse.find()) {
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
if (matcherParse.group(2) == null || "".equals(matcherParse.group(2).trim())) {
innerValue = "toUnixTimestamp64Milli#[CAST#[now#[#] as DateTime64#]#]";
} else {
- innerValue = "toUnixTimestamp64Milli#[CAST#[parseDateTime64BestEffort#[toString#[ " + matcherParse.group(2) + "#]#] as DateTime64#]#]";
+ innerValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toUnixTimestamp64Milli#[CAST#[parseDateTime64BestEffort#[toString#[ " + matcherParse.group(2) + "#], 'UTC'#] as DateTime64#]#]" : "toUnixTimestamp64Milli#[CAST#[parseDateTime64BestEffort#[toString#[ " + matcherParse.group(2) + "#]#] as DateTime64#]#]";
}
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
if (matcherParse.group(2) == null || "".equals(matcherParse.group(2).trim())) {
innerValue = "TIMESTAMP_TO_MILLIS#[CURRENT_TIMESTAMP#]";
} else {
if (matcherParse.group(2).startsWith("'") && matcherParse.group(2).endsWith("'")) {
- innerValue = String.format("TIMESTAMP_TO_MILLIS#[ TIME_PARSE#[%s#]#]",matcherParse.group(2));
+ innerValue = String.format("TIMESTAMP_TO_MILLIS#[ TIME_PARSE#[%s#]#]", matcherParse.group(2));
} else {
innerValue = "TIMESTAMP_TO_MILLIS#[ " + matcherParse.group(2) + "#]";
}
@@ -1634,9 +1757,10 @@ public class SQLFunctionUtil {
StringBuffer sb = new StringBuffer();
String innerValue = null;
if (matcherParse.find()) {
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
- innerValue = "toStartOfDay#[parseDateTimeBestEffort#[toString#[" + matcherParse.group(2) + "#]#]#]";
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
+ String param = matcherParse.group(2);
+ innerValue = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toStartOfDay#[parseDateTimeBestEffort#[toString#[" + param + "#], 'UTC'#]#]" : "toStartOfDay#[parseDateTimeBestEffort#[toString#[" + param + "#]#]#]";
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
innerValue = "TIME_FLOOR#[ TIMESTAMP " + matcherParse.group(2) + ",'P1D'#]";
}
matcherParse.appendReplacement(sb, innerValue);
@@ -1652,10 +1776,7 @@ public class SQLFunctionUtil {
* @param div 参数分隔符
* @return
*/
- public static List<String> diviParam(String str, String div) {
- if (str == null) {
- return null;
- }
+ public static List<String> diviParam(@NotNull String str, String div) {
List<String> result = new ArrayList<>();
String[] split = str.split(div);
String resultTemp = "";
@@ -1731,13 +1852,16 @@ public class SQLFunctionUtil {
private static String translateCaseInsensitive(Function fun, String dbType) {
String data;
- switch (fun.getName()){
+ switch (fun.getName()) {
case TIME_FLOOR_WITH_FILL:
data = translateTimeFloorWithFill(fun, dbType);
break;
case MAX_DURATION:
data = translateDuration(fun, dbType);
break;
+ case RATE:
+ data = translateRate(fun, dbType);
+ break;
case NOT_EMPTY:
data = translateNotEmpty(fun, dbType);
break;
@@ -1767,6 +1891,7 @@ public class SQLFunctionUtil {
}
return data;
}
+
/**
* Desc: 转义notEmpty
*
@@ -1778,7 +1903,7 @@ public class SQLFunctionUtil {
*/
private static String translateNotEmpty(Function fun, String dbType) {
String data;
- if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ if (DBEngineType.DRUID.getValue().equals(dbType)) {
String expr = fun.getParamsList().get(0);
data = expr + " IS NOT NUll ";
} else {
@@ -1804,7 +1929,7 @@ public class SQLFunctionUtil {
*/
private static String translateEmpty(Function fun, String dbType) {
String data;
- if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ if (DBEngineType.DRUID.getValue().equals(dbType)) {
String expr = fun.getParamsList().get(0);
data = expr + " IS NUll ";
} else {
@@ -1839,17 +1964,17 @@ public class SQLFunctionUtil {
if (matcherPT.find()) {
String num = matcherPT.group(1);
String unit = matcherPT.group(2);
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
String format = null;
if ("S".equalsIgnoreCase(unit)) {
- format = "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s),INTERVAL %s SECOND)))";
+ format = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s, 'UTC'),INTERVAL %s SECOND)))" : "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s),INTERVAL %s SECOND)))";
} else if ("M".equalsIgnoreCase(unit)) {
- format = "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s),INTERVAL %s MINUTE)))";
+ format = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s, 'UTC'),INTERVAL %s MINUTE)))" : "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s),INTERVAL %s MINUTE)))";
} else if ("H".equalsIgnoreCase(unit)) {
- format = "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s),INTERVAL %s HOUR)))";
+ format = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s, 'UTC'),INTERVAL %s HOUR)))" : "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s),INTERVAL %s HOUR)))";
}
return String.format(format, timestamp, num);
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
String format = "TIMESTAMP_TO_MILLIS(time_floor(MILLIS_TO_TIMESTAMP(%s * 1000),%s))/1000";
return String.format(format, timestamp, period);
}
@@ -1859,19 +1984,19 @@ public class SQLFunctionUtil {
if (matcherP.find()) {
String num = matcherP.group(1);
String unit = matcherP.group(2);
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
String format = null;
if ("D".equalsIgnoreCase(unit)) {
- format = "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s),INTERVAL %s DAY)))";
+ format = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s, 'UTC'),INTERVAL %s DAY)))" : "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s),INTERVAL %s DAY)))";
} else if ("W".equalsIgnoreCase(unit)) {
- format = "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s),INTERVAL %s WEEK)))";
+ format = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s, 'UTC'),INTERVAL %s WEEK)))" : "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s),INTERVAL %s WEEK)))";
} else if ("M".equalsIgnoreCase(unit)) {
- format = "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s),INTERVAL %s MONTH)))";
+ format = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s, 'UTC'),INTERVAL %s MONTH)))" : "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s),INTERVAL %s MONTH)))";
} else if ("Y".equalsIgnoreCase(unit)) {
- format = "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s),INTERVAL %s YEAR)))";
+ format = CONVERT_DEFAULT_TIMEZONE_INTO_UTC ? "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s, 'UTC'),INTERVAL %s YEAR)))" : "toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(%s),INTERVAL %s YEAR)))";
}
return String.format(format, timestamp, num);
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbType)) {
String format = "TIMESTAMP_TO_MILLIS(time_floor(MILLIS_TO_TIMESTAMP(%s * 1000),%s))/1000";
return String.format(format, timestamp, period);
}
@@ -1892,7 +2017,7 @@ public class SQLFunctionUtil {
int factor = 60;
String timestampExpr = fun.getParamsList().get(0);
int interval = Integer.parseInt(fun.getParamsList().get(1));
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
return String.format("length(arrayReduce('max', alphaTokens(replaceAll(replaceAll(toString(arrayMap(x-> x<(%s+%s), arrayDifference(arraySort(groupUniqArray(%s))))),'1','a'),',','') ) )) * %s "
, interval, factor, timestampExpr, interval);
}
@@ -1900,6 +2025,37 @@ public class SQLFunctionUtil {
}
/**
+ * Desc: 转义RATE
+ *
+ * @param fun
+ * @param dbType
+ * @return {@link String}
+ * @created by wWei
+ * @date 2023/12/22 4:47 下午
+ */
+ private static String translateRate(Function fun, String dbType) {
+ List<String> paramsList = fun.paramsList;
+ if (DBEngineType.DRUID.getValue().equals(dbType) || DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
+ if (paramsList.size() == 2) {
+ String durationParam = fun.getParamsList().get(1);
+ if (!NumberUtil.isNumber(durationParam) && durationParam.startsWith("'") && durationParam.endsWith("'")) {
+ durationParam = String.valueOf(Period.parse(StrUtil.strip(durationParam, "'")).toStandardSeconds().getSeconds());
+ }
+ String formatString = "ROUND(SUM(%s) * 1.0 / %s, 2)";
+ return String.format(formatString, paramsList.get(0), durationParam);
+ } else if (paramsList.size() == 3) {
+ String durationParam = fun.getParamsList().get(1);
+ if (!NumberUtil.isNumber(durationParam)) {
+ durationParam = String.valueOf(Period.parse(StrUtil.strip(durationParam, "'")).toStandardSeconds().getSeconds());
+ }
+ String formatString = "ROUND(SUM(%s) / (%s * 1.0/%s), 2)";
+ return String.format(formatString, paramsList.get(0), durationParam, paramsList.get(2));
+ }
+ }
+ return fun.toString();
+ }
+
+ /**
* Desc: 转义自定义函数 MEDIAN_HDR/QUANTILE_HDR/PERCENTILES_HDR/APPROX_COUNT_DISTINCT_HLLD
*
* @param fun
@@ -1909,7 +2065,7 @@ public class SQLFunctionUtil {
private static String translateHdrHistogram(Function fun, String dbType) {
String hdrSketchExpr = fun.getParamsList().get(0);
- if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ if (DBEngineType.DRUID.getValue().equals(dbType)) {
switch (fun.getName()) {
case MEDIAN_HDR:
return String.format("APPROX_QUANTILE_HDR(%s,0.50)", hdrSketchExpr);
@@ -1917,7 +2073,7 @@ public class SQLFunctionUtil {
double probability = Double.parseDouble(fun.getParamsList().get(1));
return String.format("APPROX_QUANTILE_HDR(%s,%s)", hdrSketchExpr, probability);
case PERCENTILES_HDR:
- return String.format("HDR_GET_PERCENTILES(HDR_HISTOGRAM(%s), 2)", hdrSketchExpr);
+ return String.format("HDR_GET_PERCENTILES(HDR_HISTOGRAM(%s), 5)", hdrSketchExpr);
case APPROX_COUNT_DISTINCT_HLLD:
return String.format("APPROX_COUNT_DISTINCT_HLLD#[%s#]", hdrSketchExpr);
}
@@ -1927,13 +2083,14 @@ public class SQLFunctionUtil {
/**
* Desc 转义 COUNT_DISTINCT
+ *
* @param fun
* @param dbType
* @return
*/
private static String translateCountDistinct(Function fun, String dbType) {
String field = fun.getParamsList().get(0);
- if (DBTypeEnum.DRUID.getValue().equals(dbType) || DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)){
+ if (DBEngineType.DRUID.getValue().equals(dbType) || DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
return String.format("count(distinct %s)", field);
}
return fun.toString();
@@ -1948,9 +2105,9 @@ public class SQLFunctionUtil {
*/
private static String translateConcat(Function function, String dbType) {
StringBuffer sb = new StringBuffer();
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbType)) {
for (String param : function.getParamsList()) {
- sb.append("toString("+param+"), ");
+ sb.append("toString(" + param + "), ");
}
return String.format("concat(%s'')", sb);
}
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/SQLHelper.java b/src/main/java/com/mesalab/common/utils/sqlparser/SQLHelper.java
index dae6f3ed..7e8fe1b8 100644
--- a/src/main/java/com/mesalab/common/utils/sqlparser/SQLHelper.java
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/SQLHelper.java
@@ -3,12 +3,11 @@ package com.mesalab.common.utils.sqlparser;
import com.alibaba.druid.util.JdbcConstants;
import com.geedgenetworks.utils.StringUtil;
import com.google.common.collect.Lists;
-import com.mesalab.common.enums.DBTypeEnum;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.DBEngineType;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.qgw.constant.QGWMessageConst;
import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
+import com.mesalab.common.exception.CommonErrorCode;
import lombok.extern.slf4j.Slf4j;
import net.sf.jsqlparser.JSQLParserException;
import net.sf.jsqlparser.expression.Expression;
@@ -31,6 +30,8 @@ public class SQLHelper {
public static final Pattern pOrder = Pattern.compile("order\\s*by[\\w|\\W|\\s|\\S]*", Pattern.CASE_INSENSITIVE);
public static final String INVALID_LIMIT_DESC = "NO LIMIT";
+ private static final Pattern PATTERN_QUOTES_AND_BACKTICKS = Pattern.compile("^([\"`])(.*)([\"`])$");
+
/**
* 去除qlString的select子句。
*
@@ -58,6 +59,22 @@ public class SQLHelper {
return sb.toString();
}
+ /**
+ * Desc: 去除字符串首位引号或反引号
+ *
+ * @param str
+ * @return {@link String}
+ * @created by wWei
+ * @date 2024/3/11 17:31
+ */
+ public static String removeQuotesAndBackticks(String str) {
+ Matcher matcher = PATTERN_QUOTES_AND_BACKTICKS.matcher(str);
+ if (matcher.matches()) {
+ return matcher.group(2);
+ }
+ return str;
+ }
+
/**
* 为当前SQL最外层增加Limit限制
@@ -89,15 +106,15 @@ public class SQLHelper {
/**
* 获取当前数据库预定义的方言
*
- * @param dbType
+ * @param dbEngine
* @return
*/
- public static String getDialectDBType(String dbType) {
- if (DBTypeEnum.CLICKHOUSE.getValue().equals(dbType)) {
+ public static String getDialectType(String dbEngine) {
+ if (DBEngineType.CLICKHOUSE.getValue().equals(dbEngine)) {
return JdbcConstants.MYSQL;
- } else if (DBTypeEnum.DRUID.getValue().equals(dbType)) {
+ } else if (DBEngineType.DRUID.getValue().equals(dbEngine)) {
return JdbcConstants.HIVE;
- } else if (DBTypeEnum.HBASE.getValue().equals(dbType)) {
+ } else if (DBEngineType.HBASE.getValue().equals(dbEngine)) {
return JdbcConstants.MYSQL;
} else {
return null;
@@ -106,9 +123,9 @@ public class SQLHelper {
}
/**
- * Desc: Desc: 获取当前sql的表名
+ * Desc: Get Table Name from SQL
*
- * @param sql
+ * @param sql SQL
* @return {@link List<String>}
* @created by wWei
* @date 2023/8/30 14:38
@@ -125,13 +142,65 @@ public class SQLHelper {
});
} catch (JSQLParserException e) {
log.error("SQL Syntax Error: Get Table Name Error, SQL is:{}, Error is:{}", sql, e);
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage()));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage()));
}
return result;
}
/**
+ * Desc: Get Table Name from Statement
+ *
+ * @param statement SQL statement
+ * @return {@link List<String>}
+ * @created by wWei
+ * @date 2023/8/30 14:38
+ */
+ public static List<String> getTableName(Statement statement) {
+ List<String> result = Lists.newArrayList();
+ TablesNamesFinder tablesNamesFinder = new TablesNamesFinder();
+ List<String> tableList = tablesNamesFinder.getTableList(statement);
+ tableList.forEach(o -> {
+ int i = o.lastIndexOf(".");
+ result.add(i < 0 ? o : o.substring(i + 1));
+ });
+ return result;
+ }
+
+ /**
+ * Desc: 获取当前sql执行最大结果数
+ *
+ * @param sql
+ * @return {@link Long}
+ * @created by wWei
+ * @date 2024/3/4 18:12
+ */
+ public static Long getRowCount(String sql) {
+ try {
+ Statement statement = CCJSqlParserUtil.parse(sql);
+ if (statement instanceof Select) {
+ Select select = (Select) statement;
+ SelectBody selectBody = select.getSelectBody();
+ if (selectBody instanceof PlainSelect) {
+ PlainSelect plainSelect = (PlainSelect) selectBody;
+ Limit limit = plainSelect.getLimit();
+ if (limit != null) {
+ Expression rowCount = limit.getRowCount();
+ if (rowCount instanceof LongValue) {
+ return ((LongValue) rowCount).getValue();
+ }
+ }
+ }
+ }
+ } catch (JSQLParserException e) {
+ log.error("SQL Syntax Error: Get Row Count Error, SQL is:{}, Error is:{}", sql, e);
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage()));
+ }
+ return null;
+ }
+
+ /**
* Desc: 从SQL中获取指定函数参数
*
* @param sql
@@ -164,13 +233,13 @@ public class SQLHelper {
// selectStatement = ((ExplainStatement) statement).getStatement();
} else {
log.error("Not support DML Parser");
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(),QGWMessageConst.NOT_SUPPORT_DML_PARSER));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), QGWMessageConst.NOT_SUPPORT_DML_PARSER));
}
} catch (JSQLParserException e) {
log.error("SQL Syntax Error: Get Table Name Error, SQL is:{}, Error is:{}", sql, e);
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(),e.getMessage()));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage()));
}
return Lists.newArrayList();
}
@@ -197,6 +266,7 @@ public class SQLHelper {
/**
* Desc: 获取子查询SQL
+ *
* @param sql
* @return {@link String}
* @created by wWei
@@ -215,9 +285,9 @@ public class SQLHelper {
plainSelect = inner;
}
return plainSelect;
- }catch (JSQLParserException | RuntimeException e) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),"statistics job init error: ".concat(e.getMessage())));
+ } catch (JSQLParserException | RuntimeException e) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "statistics job init error: ".concat(e.getMessage())));
}
}
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/SQLQueryTypeHelper.java b/src/main/java/com/mesalab/common/utils/sqlparser/SQLQueryTypeHelper.java
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/SQLQueryTypeHelper.java
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/SQLSyntaxParserUtil.java b/src/main/java/com/mesalab/common/utils/sqlparser/SQLSyntaxParserUtil.java
index b8bfc705..7c0a9642 100644
--- a/src/main/java/com/mesalab/common/utils/sqlparser/SQLSyntaxParserUtil.java
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/SQLSyntaxParserUtil.java
@@ -5,11 +5,11 @@ import cn.hutool.log.LogFactory;
import com.geedgenetworks.utils.StringUtil;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.common.utils.SpringContextUtil;
import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
-import com.mesalab.qgw.service.QueryService;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.service.SQLSyncQueryService;
import lombok.Data;
import lombok.NoArgsConstructor;
import net.sf.jsqlparser.JSQLParserException;
@@ -39,17 +39,14 @@ public class SQLSyntaxParserUtil {
private static final Log log = LogFactory.get();
- private static QueryService queryService = (QueryService) SpringContextUtil.getBean("queryService");
-
public static List syntaxParse(String sql) {
List resultList = Lists.newArrayList();
try {
parseSQL(sql, resultList);
- //addTypeForSelectItems(sql, resultList);
} catch (RuntimeException | JSQLParserException e) {
log.error("syntax_parse: sql-" + sql + ", error: ", e);
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(),e.getMessage()));
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(),e.getMessage()));
}
return resultList;
}
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/SQLVisitorUtil.java b/src/main/java/com/mesalab/common/utils/sqlparser/SQLVisitorUtil.java
index 26165c6d..acf40a6c 100644
--- a/src/main/java/com/mesalab/common/utils/sqlparser/SQLVisitorUtil.java
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/SQLVisitorUtil.java
@@ -2,7 +2,7 @@ package com.mesalab.common.utils.sqlparser;
import cn.hutool.core.util.StrUtil;
import com.mesalab.common.utils.SpringContextUtil;
-import com.mesalab.qgw.service.MetadataService;
+import com.mesalab.qgw.service.DatabaseService;
import net.sf.jsqlparser.expression.*;
import net.sf.jsqlparser.schema.Column;
import net.sf.jsqlparser.schema.Table;
@@ -17,7 +17,7 @@ import net.sf.jsqlparser.util.deparser.SelectDeParser;
*/
public class SQLVisitorUtil {
- private static MetadataService metadataService = (MetadataService) SpringContextUtil.getBean("metadataService");
+ private static DatabaseService databaseService = (DatabaseService) SpringContextUtil.getBean("databaseService");
/**
* Desc: 将SQL列名、别名、表名添加双引号转义
@@ -47,7 +47,7 @@ public class SQLVisitorUtil {
if (!StrUtil.isBlankIfStr(name)) {
table.setName(addDoubleQuote(name));
}
- table.setSchemaName(addDoubleQuote(StrUtil.isBlank(table.getSchemaName()) ? metadataService.getDBNameByTableName(name) : table.getSchemaName()));
+ table.setSchemaName(addDoubleQuote(StrUtil.isBlank(table.getSchemaName()) ? databaseService.getDBNameByTableName(name) : table.getSchemaName()));
Alias alias = table.getAlias();
if (!StrUtil.isBlankIfStr(alias)) {
alias.setName(addDoubleQuote(alias.getName()));
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/DruidSampleSQLHelper.java b/src/main/java/com/mesalab/common/utils/sqlparser/SampleSQLHelper.java
index 115932e5..1e5a3ff5 100644
--- a/src/main/java/com/mesalab/common/utils/sqlparser/DruidSampleSQLHelper.java
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/SampleSQLHelper.java
@@ -5,6 +5,8 @@ import net.sf.jsqlparser.JSQLParserException;
import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.expression.ExpressionVisitorAdapter;
import net.sf.jsqlparser.expression.LongValue;
+import net.sf.jsqlparser.expression.Parenthesis;
+import net.sf.jsqlparser.expression.operators.conditional.AndExpression;
import net.sf.jsqlparser.expression.operators.conditional.OrExpression;
import net.sf.jsqlparser.expression.operators.relational.EqualsTo;
import net.sf.jsqlparser.parser.CCJSqlParserUtil;
@@ -16,30 +18,33 @@ import java.util.List;
/**
* TODO
*
- * @Classname DruidSampleSQLHelper
+ * @Classname SampleSQLHelperOfWhereExpr
* @Date 2023/3/22 10:42
* @Author wWei
*/
-public class DruidSampleSQLHelper {
+public class SampleSQLHelper {
private final String originalSQL;
private final String filterExpr;
- private final boolean isInvalidCondExpr;
+ private final Integer maxSize;
+ private final Boolean isInvalidCondExpr;
/**
* Desc: TODO
*
* @param originalSQL 原始SQL
- * @param filterExpr 置换后的 WHERE 过滤条件
+ * @param filterExpr 新增的 WHERE 过滤条件
* @param isInvalidCondExpr 是否失效 HAVING 过滤条件
+ * @param defaultSize 结果默认条数
* @return {@link null}
* @created by wWei
* @date 2023/4/10 11:03
*/
- public DruidSampleSQLHelper(String originalSQL, String filterExpr, boolean isInvalidCondExpr) {
+ public SampleSQLHelper(String originalSQL, String filterExpr, Boolean isInvalidCondExpr, Integer defaultSize) {
this.originalSQL = originalSQL;
this.filterExpr = filterExpr;
this.isInvalidCondExpr = isInvalidCondExpr;
+ this.maxSize = defaultSize;
}
/**
@@ -81,6 +86,7 @@ public class DruidSampleSQLHelper {
}
}
+
private class SelectAdapter extends SelectVisitorAdapter {
@SneakyThrows
@Override
@@ -97,24 +103,43 @@ public class DruidSampleSQLHelper {
fromItem.accept(new FromItemAdapter());
}
- if ((fromItem instanceof Table) && plainSelect.getWhere() != null) {
- plainSelect.setWhere(CCJSqlParserUtil.parseCondExpression(filterExpr));
+ if ((fromItem instanceof Table) && filterExpr != null) {
+ Expression expression = CCJSqlParserUtil.parseCondExpression(filterExpr, false);
+ if (plainSelect.getWhere() != null) {
+ Expression where = plainSelect.getWhere();
+ if (isInvalidCondExpr) {
+ OrExpression orExpression = buildOrExpression(where);
+ plainSelect.setWhere(orExpression);
+ where = plainSelect.getWhere();
+ }
+ AndExpression andExpression = new AndExpression();
+ Parenthesis parenthesisLeft = new Parenthesis();
+ parenthesisLeft.setExpression(where instanceof Parenthesis ? ((Parenthesis) where).getExpression() : where);
+ Parenthesis parenthesisRight = new Parenthesis();
+ parenthesisRight.setExpression(expression);
+ andExpression.setLeftExpression(parenthesisLeft);
+ andExpression.setRightExpression(parenthesisRight);
+ plainSelect.setWhere(andExpression);
+ } else {
+ plainSelect.setWhere(expression);
+ }
}
Expression having = plainSelect.getHaving();
if (having != null) {
if (isInvalidCondExpr) {
- OrExpression orExpression = new OrExpression();
- orExpression.setRightExpression(having);
- EqualsTo equalsTo = new EqualsTo();
- LongValue longValue = new LongValue(1);
- equalsTo.setLeftExpression(longValue);
- equalsTo.setRightExpression(longValue);
- orExpression.setLeftExpression(equalsTo);
+ OrExpression orExpression = buildOrExpression(having);
plainSelect.setHaving(orExpression);
}
having.accept(new ExpressionAdapter());
}
+
+ Limit limit = plainSelect.getLimit();
+ if (limit == null && maxSize != null) {
+ Limit limitObj = new Limit();
+ limitObj.setRowCount(new LongValue(maxSize));
+ plainSelect.setLimit(limitObj);
+ }
}
@Override
@@ -125,5 +150,16 @@ public class DruidSampleSQLHelper {
}
}
}
+
+ private OrExpression buildOrExpression(Expression where) {
+ OrExpression orExpression = new OrExpression();
+ orExpression.setRightExpression(where);
+ EqualsTo equalsTo = new EqualsTo();
+ LongValue longValue = new LongValue(1);
+ equalsTo.setLeftExpression(longValue);
+ equalsTo.setRightExpression(longValue);
+ orExpression.setLeftExpression(equalsTo);
+ return orExpression;
+ }
}
}
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/ClickhouseSampleSQLHelper.java b/src/main/java/com/mesalab/common/utils/sqlparser/SampleSQLHelperOfDataset.java
index 34887778..14b6a719 100644
--- a/src/main/java/com/mesalab/common/utils/sqlparser/ClickhouseSampleSQLHelper.java
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/SampleSQLHelperOfDataset.java
@@ -17,11 +17,12 @@ import java.util.List;
/**
* TODO
*
- * @Classname ClickhouseSampleSQLHelper
+ * @Classname SampleSQLHelperOfDataset
* @Date 2023/3/22 10:42
* @Author wWei
*/
-public class ClickhouseSampleSQLHelper {
+@Deprecated
+public class SampleSQLHelperOfDataset {
private final String templateSQL;
private final String originalSQL;
@@ -39,7 +40,7 @@ public class ClickhouseSampleSQLHelper {
* @created by wWei
* @date 2023/3/23 15:17
*/
- public ClickhouseSampleSQLHelper(String originalSQL, String dataSetTemplate, boolean isInvalidCondExpr) {
+ public SampleSQLHelperOfDataset(String originalSQL, String dataSetTemplate, boolean isInvalidCondExpr) {
this.templateSQL = dataSetTemplate;
this.originalSQL = originalSQL;
this.isInvalidCondExpr = isInvalidCondExpr;
@@ -63,6 +64,9 @@ public class ClickhouseSampleSQLHelper {
@Override
public void visit(Table table) {
+ if (StrUtil.isEmptyIfStr(templateSQL)) {
+ return;
+ }
if (table.getAlias() == null) {
table.setAlias(new Alias(table.getName(), true));
}
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/SelectItemHelper.java b/src/main/java/com/mesalab/common/utils/sqlparser/SelectItemHelper.java
new file mode 100644
index 00000000..20dcfce5
--- /dev/null
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/SelectItemHelper.java
@@ -0,0 +1,143 @@
+package com.mesalab.common.utils.sqlparser;
+
+import com.google.common.collect.Maps;
+import com.mesalab.qgw.constant.DataTypeConst;
+import lombok.Data;
+import lombok.SneakyThrows;
+import net.sf.jsqlparser.JSQLParserException;
+import net.sf.jsqlparser.expression.Expression;
+import net.sf.jsqlparser.expression.Function;
+import net.sf.jsqlparser.parser.CCJSqlParserUtil;
+import net.sf.jsqlparser.schema.Column;
+import net.sf.jsqlparser.statement.select.*;
+
+import java.util.*;
+
+/**
+ * TODO
+ *
+ * @Classname SelectItemHelper
+ * @Date 2024/2/27 09:48
+ * @Author wWei
+ */
+public class SelectItemHelper {
+
+ private SelectItemHelper() {
+ }
+
+
+ public static Map<String, AliasObject> getSelectItem(String sql) throws JSQLParserException {
+ Select select = (Select) CCJSqlParserUtil.parse(sql);
+ SelectAdapter selectAdapter = new SelectAdapter();
+ select.getSelectBody().accept(selectAdapter);
+ return selectAdapter.getAliasObjects();
+ }
+
+ private static class SelectAdapter extends SelectVisitorAdapter {
+
+ private final Map<String, AliasObject> aliasObjects = Maps.newHashMap();
+
+ public Map<String, AliasObject> getAliasObjects() {
+ return aliasObjects;
+ }
+
+ @SneakyThrows
+ @Override
+ public void visit(PlainSelect plainSelect) {
+ List<SelectItem> selectItems = plainSelect.getSelectItems();
+ if (selectItems == null) {
+ return;
+ }
+ for (int i = 0; i < selectItems.size(); i++) {
+ SelectItem selectItem = selectItems.get(i);
+ SelectItemAdapter selectItemAdapter = new SelectItemAdapter();
+ selectItem.accept(selectItemAdapter);
+ AliasObject aliasExpr = selectItemAdapter.getAliasExpr();
+ if (aliasExpr != null) {
+ aliasExpr.setIndex(i);
+ this.aliasObjects.put(aliasExpr.getName(), aliasExpr);
+ }
+ }
+ }
+
+ @Override
+ public void visit(SetOperationList setOpList) {
+ SelectBody selectBody = setOpList.getSelects().get(0);
+ if (selectBody instanceof PlainSelect) {
+ selectBody.accept(this);
+ }
+ }
+ }
+
+ private static class SelectItemAdapter extends SelectItemVisitorAdapter {
+
+ private AliasObject aliasExpr = new AliasObject();
+
+ public AliasObject getAliasExpr() {
+ return aliasExpr;
+ }
+
+ @Override
+ public void visit(SelectExpressionItem selectExpressionItem) {
+ Expression expression = selectExpressionItem.getExpression();
+ String name = selectExpressionItem.getAlias() != null ? selectExpressionItem.getAlias().getName() : selectExpressionItem.toString();
+ name = SQLHelper.removeQuotesAndBackticks(name);
+ if (expression instanceof Column) {
+ Column column = (Column) expression;
+ AliasColumn aliasColumn = new AliasColumn();
+ aliasColumn.setName(name);
+ aliasColumn.setFieldName(column.getColumnName());
+ this.aliasExpr = aliasColumn;
+ return;
+ } else if (expression instanceof Function) {
+ Function function = (Function) expression;
+ String functionName = function.getName().toUpperCase();
+ Map<String, String> functionDateTypeMap = Maps.newHashMap();
+ functionDateTypeMap.put("FROM_UNIXTIME", DataTypeConst.TIMESTAMP_FORMAT);
+ functionDateTypeMap.put("DATE_FORMAT", DataTypeConst.TIMESTAMP_FORMAT);
+ functionDateTypeMap.put("CONVERT_TZ", DataTypeConst.TIMESTAMP_FORMAT);
+ functionDateTypeMap.put("TIME_FORMAT", DataTypeConst.TIMESTAMP_FORMAT);
+ functionDateTypeMap.put("FROM_UNIXTIME_MILLIS", DataTypeConst.TIMESTAMP_MS_FORMAT);
+ functionDateTypeMap.put("UNIX_TIMESTAMP", DataTypeConst.UNIX_TIMESTAMP);
+ functionDateTypeMap.put("TIME_FLOOR_WITH_FILL", DataTypeConst.UNIX_TIMESTAMP);
+ functionDateTypeMap.put("UNIX_TIMESTAMP_MILLIS", DataTypeConst.UNIX_TIMESTAMP_MS);
+ functionDateTypeMap.put("PERCENTILES_HDR", DataTypeConst.HDR_HISTOGRAM);
+ functionDateTypeMap.put("IP_TO_GEO", DataTypeConst.IP);
+ functionDateTypeMap.put("IP_TO_CITY", DataTypeConst.IP);
+ functionDateTypeMap.put("IP_TO_COUNTRY", DataTypeConst.IP);
+ functionDateTypeMap.put("IP_TO_ISP", DataTypeConst.IP);
+ functionDateTypeMap.put("IP_TO_ASN", DataTypeConst.IP);
+ functionDateTypeMap.put("IP_TO_ASN_DETAIL", DataTypeConst.IP);
+ functionDateTypeMap.put("IP_TO_ASN_ORG", DataTypeConst.IP);
+
+ if (functionDateTypeMap.containsKey(functionName)) {
+ AliasFunExpr aliasFunExpr = new AliasFunExpr();
+ aliasFunExpr.setName(name);
+ aliasFunExpr.setDateType(functionDateTypeMap.get(functionName));
+ this.aliasExpr = aliasFunExpr;
+ return;
+ }
+ }
+ AliasObject aliasObject = new AliasObject();
+ aliasObject.setName(name);
+ this.aliasExpr = aliasObject;
+ }
+
+ }
+
+ @Data
+ public static class AliasObject {
+ private String name;
+ private int index;
+ }
+
+ @Data
+ public static class AliasColumn extends AliasObject {
+ private String fieldName;
+ }
+
+ @Data
+ public static class AliasFunExpr extends AliasObject {
+ private String dateType;
+ }
+}
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/TopSQLVisitor.java b/src/main/java/com/mesalab/common/utils/sqlparser/TopSQLVisitor.java
index faade965..d82ccc18 100644
--- a/src/main/java/com/mesalab/common/utils/sqlparser/TopSQLVisitor.java
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/TopSQLVisitor.java
@@ -31,7 +31,7 @@ import java.util.stream.Collectors;
public class TopSQLVisitor implements OrderByVisitor, GroupByVisitor, SelectItemVisitor {
private static final Log log = LogFactory.get();
private static final long TOP_INNER_LIMIT_RATIO = 10;
- private static final long TOP_INNER_LIMIT_LOWER = 100000;
+ private static final long TOP_INNER_LIMIT_LOWER = 1000;
private static final Pattern pLimitSetting = Pattern.compile("\\blimit(.*?)(\\))", Pattern.CASE_INSENSITIVE);
private List<String> groupByList;
private List<String> orderByList;
@@ -148,7 +148,9 @@ public class TopSQLVisitor implements OrderByVisitor, GroupByVisitor, SelectItem
* @return
*/
private boolean parseAndValidation(PlainSelect plainSelect) {
-
+ if (plainSelect == null) {
+ return false;
+ }
for (SelectItem item : plainSelect.getSelectItems()) {
item.accept(this);
}
diff --git a/src/main/java/com/mesalab/knowledge/controller/KnowledgeController.java b/src/main/java/com/mesalab/knowledge/controller/KnowledgeController.java
index 44d9d2e9..55d4c853 100644
--- a/src/main/java/com/mesalab/knowledge/controller/KnowledgeController.java
+++ b/src/main/java/com/mesalab/knowledge/controller/KnowledgeController.java
@@ -3,14 +3,13 @@ package com.mesalab.knowledge.controller;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.common.exception.BusinessException;
import com.mesalab.knowledge.enums.QueryTypeEnum;
-import com.mesalab.knowledge.exception.KnowLedgeErrorCode;
import com.mesalab.knowledge.exception.KnowLedgeErrorMessage;
import com.mesalab.knowledge.service.KnowledgeService;
import com.geedgenetworks.utils.StringUtil;
+import com.mesalab.common.exception.CommonErrorCode;
import org.apache.commons.lang3.EnumUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
@@ -44,14 +43,14 @@ public class KnowledgeController {
log.debug("服务推荐接口, 参数: queryString is {},params is {}", param, object);
if (!EnumUtils.isValidEnum(QueryTypeEnum.class, StringUtil.upperCase(param))
|| StringUtil.isNotEmpty(request.getParameter(param))) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),
- ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.NOT_SUPPORT));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(),
+ CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.NOT_SUPPORT));
}
return knowledgeService.query(object);
}
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),
- ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.NOT_SUPPORT));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(),
+ CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.NOT_SUPPORT));
}
}
diff --git a/src/main/java/com/mesalab/knowledge/service/KnowledgeService.java b/src/main/java/com/mesalab/knowledge/service/KnowledgeService.java
index 69442081..84b70850 100644
--- a/src/main/java/com/mesalab/knowledge/service/KnowledgeService.java
+++ b/src/main/java/com/mesalab/knowledge/service/KnowledgeService.java
@@ -5,19 +5,12 @@ import cn.hutool.core.date.DateUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSON;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.github.fge.jackson.JsonLoader;
-import com.github.fge.jsonschema.core.exceptions.ProcessingException;
-import com.github.fge.jsonschema.core.report.ProcessingMessage;
-import com.github.fge.jsonschema.core.report.ProcessingReport;
-import com.github.fge.jsonschema.main.JsonSchema;
-import com.github.fge.jsonschema.main.JsonSchemaFactory;
import com.google.common.base.Splitter;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.common.exception.BusinessException;
+import com.mesalab.common.utils.JsonSchemaValidator;
import com.mesalab.knowledge.entity.*;
import com.mesalab.knowledge.enums.DataSourceEnum;
import com.mesalab.knowledge.enums.MatchEnum;
@@ -27,25 +20,18 @@ import com.mesalab.knowledge.strategy.QueryProvider;
import com.mesalab.qgw.constant.QGWMessageConst;
import com.mesalab.qgw.exception.QGWBusinessException;
import com.geedgenetworks.utils.StringUtil;
+import com.mesalab.common.exception.CommonErrorCode;
import org.apache.commons.lang3.EnumUtils;
import org.apache.commons.lang3.Validate;
-import org.apache.commons.lang3.time.DateUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.util.ObjectUtils;
import java.io.IOException;
-import java.text.ParseException;
-import java.time.LocalDateTime;
-import java.time.format.DateTimeFormatter;
-import java.time.format.DateTimeFormatterBuilder;
-import java.time.temporal.ChronoField;
-import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
-import java.util.regex.Pattern;
import static java.util.Objects.*;
@@ -88,17 +74,17 @@ public class KnowledgeService {
DSLObject.DSLQuery dslQuery = dSLObject.getQuery();
if (ObjectUtils.isEmpty(dslQuery)){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.QUERY_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.QUERY_IS_INVALID));
}
Parameters parameters = dslQuery.getParameters();
if (ObjectUtils.isEmpty(dslQuery.getDataSource())){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.DATASOURCE_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.DATASOURCE_IS_INVALID));
}
if (ObjectUtils.isEmpty(EnumUtils.getEnum(DataSourceEnum.class,StringUtil.upperCase(dslQuery.getDataSource())))){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.DATASOURCE_IS_ERROR));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.DATASOURCE_IS_ERROR));
}
if (isNull(parameters)) {
@@ -113,15 +99,15 @@ public class KnowledgeService {
if (!ObjectUtils.isEmpty(matchs)) {
matchs.forEach(m -> {
if (!EnumUtils.isValidEnum(MatchEnum.class, StringUtil.upperCase(m.getType()))){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.MATCH_TYPE_ERROR));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.MATCH_TYPE_ERROR));
}
//正则匹配方式 禁止 $xxx* 形式的value
if (MatchEnum.REGEX.getType().equals(m.getType())) {
m.getFieldValues().forEach(mv -> {
if((String.valueOf(mv).startsWith("$") && String.valueOf(mv).endsWith("*"))){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.MATCHING_SIGN));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.MATCHING_SIGN));
}
});
}
@@ -130,16 +116,16 @@ public class KnowledgeService {
if (!ObjectUtils.isEmpty(ranges)) {
ranges.forEach(r -> {
if (!EnumUtils.isValidEnum(RangeEnum.class, StringUtil.upperCase(r.getType()))){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.RANGE_TYPE_ERROR));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.RANGE_TYPE_ERROR));
}
});
}
if (!ObjectUtils.isEmpty(sorts)) {
sorts.forEach(s -> {
if (!("asc".equalsIgnoreCase(s.getType()) || "desc".equalsIgnoreCase(s.getType()))){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.SORT_TYPE_ERROR));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.SORT_TYPE_ERROR));
}
});
}
@@ -147,64 +133,37 @@ public class KnowledgeService {
try {
List<String> times = Splitter.on("/").splitToList(intervals.get(0));
if (ObjectUtils.isEmpty(times) || times.size() != 2) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.INTERVALS_IS_INVALID));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.INTERVALS_IS_INVALID));
}
for (String dateTimeStr : times) {
DateUtil.parse(dateTimeStr);
}
} catch (DateException e) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.TIME_FORMAT_ERROR));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.TIME_FORMAT_ERROR));
}
}
if (nonNull(limit)) {
List<String> limits = Splitter.on(",").trimResults().omitEmptyStrings().splitToList(limit);
if (ObjectUtils.isEmpty(limits) || (limit.contains(",") && limits.size() != 2)){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.LIMIT_IS_ILLEGAL));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.LIMIT_IS_ILLEGAL));
}
for (String l : limits) {
if (!(l.length() <= maxrows.length() && Integer.valueOf(maxrows) >= Integer.valueOf(l) && Integer.valueOf(l) > 0)){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), String.format(KnowLedgeErrorMessage.LIMIT_IS_ILLEGAL,maxrows)));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), String.format(KnowLedgeErrorMessage.LIMIT_IS_ILLEGAL,maxrows)));
}
}
}
}
private void validateJson(String jsonString) {
- JsonSchemaFactory factory = JsonSchemaFactory.byDefault();
- //通过jsonschemaFactory获取jsonnode对象
- try {
-
- JsonNode schemaNode = JsonLoader.fromResource("/dsl-validation.json");
- //通过jsonstr字符串获取对应的jsonnode对象
- JsonNode dataNode = JsonLoader.fromString(jsonString);
- JsonSchema jsonSchema = factory.getJsonSchema(schemaNode);
- //使用json-schema-validator中的jsonschema对象的validate方法对数据进行校验
- //获取处理的报告信息
- ProcessingReport processingReport = jsonSchema.validate(dataNode);
- //获取完整的报告信息
- Iterator<ProcessingMessage> iterator = processingReport.iterator();
- StringBuffer sb = new StringBuffer();
- while (iterator.hasNext()) {
- ProcessingMessage next = iterator.next();
- JsonNode jsonNode = next.asJson();
- sb.append("pointer on ");
- sb.append(jsonNode.get("instance").get("pointer"));
- sb.append(", ");
- sb.append(next.getMessage());
- sb.append(". ");
- }
- //判断校验是否成功,如果为true成功
- Validate.isTrue(processingReport.isSuccess(), sb.toString());
- } catch (ProcessingException e) {
- log.error(e);
- } catch (IOException e) {
- log.error(e);
- }
+ JsonSchemaValidator jsonSchemaValidator = JsonSchemaValidator.getInstance();
+ jsonSchemaValidator.addRule("dsl-validation.json", "dsl")
+ .validateDSL(jsonString);
}
}
diff --git a/src/main/java/com/mesalab/knowledge/strategy/BaseQueryProvider.java b/src/main/java/com/mesalab/knowledge/strategy/BaseQueryProvider.java
index 4276aa72..a86b1766 100644
--- a/src/main/java/com/mesalab/knowledge/strategy/BaseQueryProvider.java
+++ b/src/main/java/com/mesalab/knowledge/strategy/BaseQueryProvider.java
@@ -4,16 +4,15 @@ import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSON;
import com.google.common.collect.ImmutableMap;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.common.exception.BusinessException;
import com.mesalab.knowledge.common.config.ArangoConfig;
import com.mesalab.knowledge.common.utils.Constant;
import com.mesalab.knowledge.common.utils.JwtCache;
-import com.mesalab.knowledge.exception.KnowLedgeErrorCode;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.HttpResponseResult;
import com.mesalab.qgw.service.impl.HttpClientService;
-import org.apache.http.Header;
-import org.apache.http.message.BasicHeader;
+import com.mesalab.qgw.service.impl.HttpClientServiceV2;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.stereotype.Component;
@@ -30,12 +29,16 @@ import java.util.Objects;
@Component
public class BaseQueryProvider {
private static final Log log = LogFactory.get();
+ private static final int UNAUTHORIZED_CODE = 401;
@Autowired
ArangoConfig arangoConfig;
@Autowired
HttpClientService httpClientService;
+ @Autowired
+ HttpClientServiceV2 httpClientServiceV2;
+
/**
* @return java.lang.String
* @Author zhq
@@ -50,8 +53,8 @@ public class BaseQueryProvider {
Map<String, String> params = new HashMap<>();
params.put("username", arangoConfig.getUsername());
params.put("password", arangoConfig.getPin());
- String res = httpClientService.httpPost(arangoConfig.getJwturl(), JSON.toJSONString(params));
- Map resMap = JSON.parseObject(res, Map.class);
+ HttpResponseResult httpResponseResult = httpClientServiceV2.post(arangoConfig.getJwturl(), JSON.toJSONString(params));
+ Map resMap = JSON.parseObject(httpResponseResult.getResponseBody(), Map.class);
if (Objects.isNull(resMap.get(Constant.JWT))) {
throw new BusinessException("获取Arango jwt 失败");
}
@@ -69,24 +72,18 @@ public class BaseQueryProvider {
**/
public Object queryArango(String aql) {
Map queryMap = ImmutableMap.of("query", aql);
- Header header = new BasicHeader(HttpHeaders.AUTHORIZATION, getJwt());
- String res = "";
- try {
- res = httpClientService.httpPost(arangoConfig.getQueryurl(), JSON.toJSONString(queryMap), header);
- } catch (BusinessException e) {
- if (e.getMessage() != null && e.getMessage().contains("not authorized to execute this request")) {
- //刷新jwt
- JwtCache.remove(Constant.ARANGO_CACHE_JWT);
- log.warn("Arango jwt 失效后重置");
- res = httpClientService.httpPost(arangoConfig.getQueryurl(), JSON.toJSONString(queryMap), new BasicHeader(HttpHeaders.AUTHORIZATION, getJwt()));
- } else {
- log.error(e.getMessage());
- throw new BusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(),e.getMessage()));
- }
+ Map<String, String> headers = new HashMap<>();
+ headers.put(HttpHeaders.AUTHORIZATION, getJwt());
+ HttpResponseResult responseResult = null;
+ responseResult = httpClientServiceV2.post(arangoConfig.getQueryurl(), JSON.toJSONString(queryMap), headers);
+ Map jsonMap = JSON.parseObject(responseResult.getResponseBody(), Map.class);
+ if (jsonMap != null && jsonMap.get("code") != null && Integer.parseInt(jsonMap.get("code").toString()) == UNAUTHORIZED_CODE) {
+ JwtCache.remove(Constant.ARANGO_CACHE_JWT);
+ log.warn("Arango jwt 失效后重置");
+ headers.put(HttpHeaders.AUTHORIZATION, getJwt());
+ responseResult = httpClientServiceV2.post(arangoConfig.getQueryurl(), JSON.toJSONString(queryMap), headers);
+ jsonMap = JSON.parseObject(responseResult.getResponseBody(), Map.class);
}
- Map jsonMap = JSON.parseObject(res, Map.class);
- Object result = jsonMap.get("result");
- return result;
+ return jsonMap.get("result");
}
}
diff --git a/src/main/java/com/mesalab/knowledge/strategy/FqdnProviderImpl.java b/src/main/java/com/mesalab/knowledge/strategy/FqdnProviderImpl.java
index 8e25f0d7..5a800116 100644
--- a/src/main/java/com/mesalab/knowledge/strategy/FqdnProviderImpl.java
+++ b/src/main/java/com/mesalab/knowledge/strategy/FqdnProviderImpl.java
@@ -10,8 +10,7 @@ import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.common.exception.BusinessException;
import com.mesalab.knowledge.entity.DSLObject;
import com.mesalab.knowledge.entity.Match;
@@ -19,19 +18,17 @@ import com.mesalab.knowledge.entity.Parameters;
import com.mesalab.knowledge.entity.Range;
import com.mesalab.knowledge.entity.arango.IpLearningPath;
import com.mesalab.knowledge.enums.RangeEnum;
-import com.mesalab.knowledge.exception.KnowLedgeErrorCode;
import com.mesalab.knowledge.exception.KnowLedgeErrorMessage;
import com.mesalab.qgw.constant.QGWMessageConst;
import com.geedgenetworks.utils.DateUtils;
import com.geedgenetworks.utils.StringUtil;
+import com.mesalab.common.exception.CommonErrorCode;
import org.apache.commons.lang3.EnumUtils;
-import org.joda.time.format.ISODateTimeFormat;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.util.ObjectUtils;
-import java.text.MessageFormat;
import java.util.*;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@@ -78,12 +75,12 @@ public class FqdnProviderImpl implements QueryProvider {
//查询深度
if ("DEPTH".equals(r.getFieldKey())) {
if (ObjectUtils.isEmpty(r.getFieldValues())){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.FIELD_VALUES_FORMAT_ERROR));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.FIELD_VALUES_FORMAT_ERROR));
}
if (!Pattern.matches(depthregex, String.valueOf(r.getFieldValues().get(0)))){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.FIELD_VALUES_FORMAT_ERROR));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), KnowLedgeErrorMessage.FIELD_VALUES_FORMAT_ERROR));
}
} else if ("PROTOCOL".equals(r.getFieldKey())) {
List<String> protocols = Lists.newArrayList();
@@ -122,8 +119,8 @@ public class FqdnProviderImpl implements QueryProvider {
intervalsb.append(" and e.LAST_FOUND_TIME >= ").append(startTime).append(" and ").append("e.LAST_FOUND_TIME < ").append(endTime);
}
} catch (DateException e) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.TIME_FORMAT_ERROR));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.TIME_FORMAT_ERROR));
}
}
diff --git a/src/main/java/com/mesalab/knowledge/strategy/SubscriberIdProviderImpl.java b/src/main/java/com/mesalab/knowledge/strategy/SubscriberIdProviderImpl.java
deleted file mode 100644
index 0e0fb389..00000000
--- a/src/main/java/com/mesalab/knowledge/strategy/SubscriberIdProviderImpl.java
+++ /dev/null
@@ -1,105 +0,0 @@
-package com.mesalab.knowledge.strategy;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.mesalab.cn.enums.EngineTypeEnum;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.knowledge.entity.*;
-import com.mesalab.services.common.dsl.ComDSLObject;
-import com.mesalab.services.common.enums.EntityQueryType;
-import com.mesalab.services.service.RelationService;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.stream.Collectors;
-
-/**
- * @description:
- * @author: zhq
- * @create: 2020-07-17
- **/
-@Component("SUBSCRIBER_ID_VIEW")
-public class SubscriberIdProviderImpl implements QueryProvider {
-
- private static final Log log = LogFactory.get();
- @Autowired
- private RelationService relationService;
-
- @Override
- public Object query(DSLObject.DSLQuery dslQeruy) {
- BaseResult realRelation = null;
- try {
- Parameters parameters = dslQeruy.getParameters();
-
- ComDSLObject.Query.QueryBean queryBean = new ComDSLObject.Query.QueryBean();
- queryBean.setMatch(getComMatch(parameters));
- queryBean.setRange(getComRange(parameters));
- queryBean.setSort(getComSort(parameters));
- queryBean.setIntervals(parameters.getIntervals());
-
- ComDSLObject.Query query = new ComDSLObject.Query();
- query.setDataSource(dslQeruy.getDataSource());
- query.setDataEngine(EngineTypeEnum.ANALYSIS_ENGINE.getEngine());
- query.setLimit(parameters.getLimit());
- query.setParameters(queryBean);
-
- ComDSLObject comDSLObject = new ComDSLObject();
- comDSLObject.setQuery(query);
-
- realRelation = relationService.getRealRelation(EntityQueryType.SUBSCRIBERIDPOOL.getType(), comDSLObject);
- } catch (Exception e) {
- log.error(e);
- }
- return realRelation.getData();
- }
-
- private List<ComDSLObject.Query.FilterBean> getComRange(Parameters parameters) throws Exception {
- List<ComDSLObject.Query.FilterBean> range = new ArrayList<>();
- if (parameters.getRange() != null) {
- for (Range parametersRange : parameters.getRange()) {
- ComDSLObject.Query.FilterBean filterBean = new ComDSLObject.Query.FilterBean();
- filterBean.setType(parametersRange.getType());
- filterBean.setFieldKey(parametersRange.getFieldKey());
- filterBean.setFieldValues(parametersRange.getFieldValues().stream()
- .map(String::valueOf)
- .collect(Collectors.toList()));
- range.add(filterBean);
- }
- }
-
- return range;
- }
-
- private List<ComDSLObject.Query.FilterBean> getComMatch(Parameters parameters) throws Exception {
- List<ComDSLObject.Query.FilterBean> match = new ArrayList<>();
- if (parameters.getMatch() != null) {
- for (Match parametersMatch : parameters.getMatch()) {
- ComDSLObject.Query.FilterBean filterBean = new ComDSLObject.Query.FilterBean();
- filterBean.setType(parametersMatch.getType());
- filterBean.setFieldKey(parametersMatch.getFieldKey());
- filterBean.setFieldValues(parametersMatch.getFieldValues().stream()
- .map(String::valueOf)
- .collect(Collectors.toList()));
- match.add(filterBean);
- }
- }
-
- return match;
- }
-
- private List<ComDSLObject.Query.SortBean> getComSort(Parameters parameters) throws Exception {
- List<ComDSLObject.Query.SortBean> sort = new ArrayList<>();
- if (parameters.getSort() != null) {
- for (Sort parametersSort : parameters.getSort()) {
- ComDSLObject.Query.SortBean sortBean = new ComDSLObject.Query.SortBean();
- sortBean.setType(parametersSort.getType());
- sortBean.setFieldKey(parametersSort.getFieldKey());
- sort.add(sortBean);
- }
- }
- return sort;
- }
-
-} \ No newline at end of file
diff --git a/src/main/java/com/mesalab/network/controller/NetworkMonitorController.java b/src/main/java/com/mesalab/network/controller/NetworkMonitorController.java
deleted file mode 100644
index 3e58d35d..00000000
--- a/src/main/java/com/mesalab/network/controller/NetworkMonitorController.java
+++ /dev/null
@@ -1,115 +0,0 @@
-package com.mesalab.network.controller;
-
-import cn.hutool.core.util.StrUtil;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.google.common.collect.Lists;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.common.exception.BusinessException;
-import com.mesalab.network.dsl.DSLObject;
-import com.mesalab.network.dsl.DSLValidate;
-import com.mesalab.network.exception.NWErrorCode;
-import com.mesalab.network.service.NetworkMonitorService;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.validation.annotation.Validated;
-import org.springframework.web.bind.annotation.*;
-
-import javax.servlet.http.HttpServletRequest;
-import java.util.Enumeration;
-import java.util.List;
-import java.util.Locale;
-
-
-/**
- * @author wangwei
- * @version 1.0
- * @date 2020/6/30 10:38 上午
- */
-@RestController
-@RequestMapping(value = "/")
-public class NetworkMonitorController {
- private static final Log log = LogFactory.get();
- @Autowired
- private DSLValidate dslValidate;
- @Autowired
- private NetworkMonitorService networkMonitorService;
- private static final String PROTOCOL = "protocol";
- private static final String APP = "app";
-
- @PostMapping(value = "/traffic/v1/", produces = "application/json")
- public BaseResult trafficStatistics(HttpServletRequest request, @Validated @RequestBody DSLObject dslObject) {
- Enumeration<String> parameterNames = request.getParameterNames();
- List<String> params = Lists.newArrayList();
- while (parameterNames.hasMoreElements()) {
- params.add(StrUtil.trim(parameterNames.nextElement()).toLowerCase(Locale.ROOT));
- }
- if (params.contains(PROTOCOL)) {
- log.debug("network protocol interface, param: queryString is {}, body is {}.", params, dslObject);
- dslValidate.executeValidate(dslObject);
- return protocolData(dslObject);
- } else if (params.contains(APP)) {
- log.debug("network app interface, param: queryString is {}, body is {}.", params, dslObject);
- dslValidate.executeValidate(dslObject);
- return appData(dslObject);
- } else {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(), "not support query parameter");
- }
- }
-
- private BaseResult appData(DSLObject dslObject) {
- String queryType = dslObject.getQuery().getQueryType();
- if (QueryType.APP_DATA_SUMMARY.getValue()
- .equalsIgnoreCase(queryType)) {
- return networkMonitorService.buildAppData(dslObject);
- } else if (QueryType.INTERNAL_IP_DATA_SUMMARY.getValue()
- .equalsIgnoreCase(queryType)) {
- return networkMonitorService.buildInternalIPData(dslObject);
- }
- else if (QueryType.APP_DATA_RATE_SUMMARY.getValue()
- .equalsIgnoreCase(queryType)) {
- return networkMonitorService.buildAppDataRate(dslObject);
- } else if (QueryType.APP_TRAFFIC_SUMMARY.getValue()
- .equalsIgnoreCase(queryType)) {
- return networkMonitorService.buildAppTraffic(dslObject);
- } else {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(), "No match queryType");
- }
- }
-
- private BaseResult protocolData(DSLObject dslObject) {
- if (QueryType.PROTOCOL_TREE_SUMMARY.getValue()
- .equalsIgnoreCase(dslObject.getQuery().getQueryType())) {
- return networkMonitorService.buildProtocolTree(dslObject);
- } else if (QueryType.PROTOCOL_DATA_RATE_SUMMARY.getValue()
- .equalsIgnoreCase(dslObject.getQuery().getQueryType())) {
- return networkMonitorService.buildProtocolDataRateSummary(dslObject);
- } else if (QueryType.NETWORK_OVERVIEW_SUMMARY.getValue().equalsIgnoreCase(dslObject.getQuery().getQueryType())) {
- return networkMonitorService.buildNetworkOverviewResult(dslObject);
- } else {
- return BaseResultGenerator.failure(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(), "No match queryType");
- }
- }
-
- enum QueryType {
- PROTOCOL_TREE_SUMMARY("protocolTreeSummary"),
- PROTOCOL_DATA_RATE_SUMMARY("protocolDataRateSummary"),
- NETWORK_OVERVIEW_SUMMARY("networkOverviewSummary"),
- APP_DATA_SUMMARY("appDataSummary"),
- APP_DATA_RATE_SUMMARY("appDataRateSummary"),
- APP_TRAFFIC_SUMMARY("appTrafficSummary"),
-
- INTERNAL_IP_DATA_SUMMARY("internalIPDataSummary");
- private final String value;
-
- QueryType(String value) {
- this.value = value;
- }
-
- public String getValue() {
- return value;
- }
- }
-}
diff --git a/src/main/java/com/mesalab/network/dsl/DSLObject.java b/src/main/java/com/mesalab/network/dsl/DSLObject.java
deleted file mode 100644
index 59e45a65..00000000
--- a/src/main/java/com/mesalab/network/dsl/DSLObject.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package com.mesalab.network.dsl;
-
-import lombok.Data;
-
-import java.io.Serializable;
-import java.util.List;
-
-/**
- * @Date: 2020-09-18 09:48
- * @Author : wangwei
- * @ClassName : DSLObject
- * @Description :
- */
-@Data
-public class DSLObject implements Serializable {
-
- private String clientId;
- private QueryBean query;
-
- @Data
- public static class QueryBean {
- private String queryType;
- private String dataSource;
- private Integer limit;
- private Integer offset;
- private Parameters parameters;
-
- @Data
- public static class Parameters {
- private String granularity;
- private List<FilterBean> match;
- private List<FilterBean> range;
- private List<String> intervals;
-
- @Data
- public static class FilterBean {
- private String type;
- private String fieldKey;
- private List<String> fieldValues;
- }
- }
- }
-}
diff --git a/src/main/java/com/mesalab/network/dsl/DSLValidate.java b/src/main/java/com/mesalab/network/dsl/DSLValidate.java
deleted file mode 100644
index 34bf3ed5..00000000
--- a/src/main/java/com/mesalab/network/dsl/DSLValidate.java
+++ /dev/null
@@ -1,162 +0,0 @@
-package com.mesalab.network.dsl;
-
-import cn.hutool.core.date.DateException;
-import cn.hutool.core.date.DateUtil;
-import cn.hutool.core.util.NumberUtil;
-import com.mesalab.cn.constant.ErrorMessage;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.common.exception.BusinessException;
-import com.mesalab.knowledge.enums.MatchEnum;
-import com.mesalab.knowledge.enums.RangeEnum;
-import com.mesalab.network.exception.NWErrorCode;
-import com.mesalab.network.exception.NWErrorMessage;
-import com.mesalab.qgw.exception.QGWBusinessException;
-import com.geedgenetworks.utils.StringUtil;
-import org.apache.commons.lang.Validate;
-import org.apache.commons.lang3.EnumUtils;
-import org.joda.time.format.DateTimeFormat;
-import org.joda.time.format.DateTimeFormatter;
-import org.springframework.stereotype.Component;
-import org.springframework.util.CollectionUtils;
-
-import java.util.List;
-import java.util.regex.Pattern;
-
-/**
- * @Date: 2020-09-17 18:10
- * @Author : wangwei
- * @ClassName : DSLValidate
- * @Description : DSL格式校验
- */
-@Component
-public class DSLValidate {
-
- public static final Pattern periodOfPT = Pattern.compile("PT(\\d+)[SMH]", Pattern.CASE_INSENSITIVE);
- public static final Pattern periodOfP = Pattern.compile("P(\\d+)[DWMY]", Pattern.CASE_INSENSITIVE);
-
- public void executeValidate(DSLObject dslObject) throws BusinessException {
- if (StringUtil.isEmpty(dslObject)) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),NWErrorMessage.DSL_OBJECT_IS_INVALID));
- }
- if (StringUtil.isEmpty(dslObject.getQuery())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),NWErrorMessage.QUERY_IS_INVALID));
- }
- if (StringUtil.isNotEmpty(dslObject.getQuery().getLimit()) && !NumberUtil.isNumber(dslObject.getQuery().getLimit().toString())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),NWErrorMessage.QUERY_LIMIT_IS_INVALID));
- }
- if (StringUtil.isNotEmpty(dslObject.getQuery().getOffset()) && !NumberUtil.isNumber(dslObject.getQuery().getOffset().toString())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),NWErrorMessage.QUERY_OFFSET_IS_INVALID));
- }
- DSLObject.QueryBean.Parameters parameters = dslObject.getQuery().getParameters();
- if (StringUtil.isEmpty(parameters)) {
- return;
- }
- if (!isValidGranularity(parameters.getGranularity())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),NWErrorMessage.GRANULARITY_VALUE_IS_INVALID));
- }
- if (!isValidMatch(parameters.getMatch())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),NWErrorMessage.MATCH_IS_INVALID));
- }
- if (!isValidRange(parameters.getRange())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),NWErrorMessage.RANGE_TYPE_IS_INVALID));
- }
- if (!isValidIntervals(parameters.getIntervals())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),NWErrorMessage.INTERVALS_TYPE_IS_INVALID));
- }
- }
-
- /**
- * 时间粒度校验, 遵循ISO8601 durations 定义
- *
- * @param granularity
- * @return
- */
- private boolean isValidGranularity(String granularity) {
- if (StringUtil.isBlank(granularity)) {
- return true;
- }
- if (periodOfP.matcher(granularity).find() || periodOfPT.matcher(granularity).find()) {
- return true;
- }
- return false;
- }
-
- /**
- * 校验match:
- * 1.是否属于{@link MatchEnum}限定类型
- * 2.不能以*开始、或$结尾
- *
- * @param filterBeanList
- * @return
- */
- private boolean isValidMatch(List<DSLObject.QueryBean.Parameters.FilterBean> filterBeanList) {
- if (CollectionUtils.isEmpty(filterBeanList)) {
- return true;
- }
- for (DSLObject.QueryBean.Parameters.FilterBean match : filterBeanList) {
- Validate.isTrue(EnumUtils.isValidEnum(MatchEnum.class, StringUtil.upperCase(match.getType())), ErrorMessage.MATCH_FIELD_VALUES_ERROR);
- }
- return true;
- }
-
- /**
- * 校验range:
- * 1.是否属于{@link RangeEnum}限定类型
- *
- * @param filterBeanList
- * @return
- */
- private boolean isValidRange(List<DSLObject.QueryBean.Parameters.FilterBean> filterBeanList) {
- if (CollectionUtils.isEmpty(filterBeanList)) {
- return true;
- }
- for (DSLObject.QueryBean.Parameters.FilterBean range : filterBeanList) {
- if (!EnumUtils.isValidEnum(RangeEnum.class, StringUtil.upperCase(range.getType()))){
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),NWErrorMessage.RANGE_TYPE_ERROR));
- }
- }
- return true;
- }
-
- /**
- * 校验Interval:
- * 1.目前只支持between类型: ["2020-01-01 00:00:00/2020-01-02 00:00:00"]
- * 2.时间区间必须是 开始时间 < 结束时间
- *
- * @param intervals
- */
- private boolean isValidIntervals(List<String> intervals) {
- try {
- if (CollectionUtils.isEmpty(intervals)) {
- return true;
- }
- if (intervals.size() != 1) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), NWErrorMessage.INTERVALS_PARAM_ERROR));
- }
- String[] split = intervals.get(0).split("/");
- if (split.length != 2) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), NWErrorMessage.INTERVALS_PARAM_ERROR));
- }
- for (String dateTimeStr : split) {
- DateUtil.parse(dateTimeStr);
- }
- return true;
- } catch (DateException e) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), NWErrorMessage.TIME_FORMAT_ERROR));
- }
- }
-
-}
diff --git a/src/main/java/com/mesalab/network/exception/NWErrorCode.java b/src/main/java/com/mesalab/network/exception/NWErrorCode.java
deleted file mode 100644
index 1774b1dd..00000000
--- a/src/main/java/com/mesalab/network/exception/NWErrorCode.java
+++ /dev/null
@@ -1,31 +0,0 @@
-package com.mesalab.network.exception;
-
-import lombok.Getter;
-
-/**
- * @Author wxs
- * @Date 2022/11/3
- */
-@Getter
-public enum NWErrorCode {
- /**
- * 1-3位:异常类型(HTTP协议状态码)
- * 4-5位:模块 04
- * 6-8:自然排序
- */
-
-
-
-
- ;
-
-
- private String code;
- private String message;
-
- NWErrorCode(String code, String message) {
- this.code = code;
- this.message = message;
- }
-
-} \ No newline at end of file
diff --git a/src/main/java/com/mesalab/network/exception/NWErrorMessage.java b/src/main/java/com/mesalab/network/exception/NWErrorMessage.java
deleted file mode 100644
index 6176fb6e..00000000
--- a/src/main/java/com/mesalab/network/exception/NWErrorMessage.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package com.mesalab.network.exception;
-
-
-/**
- * @Author wxs
- * @Date 2022/11/3
- */
-public class NWErrorMessage {
- /**
- * 1-2位:异常类型(参数检查异常:01、SQL语句检查异常:02、SQL执行异常:03、引擎计算异常:04、解析异常:05、执行失败:99)。
- * 3-4位:模块 03
- * 5-7:自然排序
- */
- public static String DSL_OBJECT_IS_INVALID = "DSLObject is invalid";
- public static String QUERY_IS_INVALID = "DSLObject.query is invalid";
- public static String QUERY_LIMIT_IS_INVALID = "DSLObject.query.limit is invalid";
- public static String QUERY_OFFSET_IS_INVALID = "DSLObject.query.offset is invalid";
- public static String GRANULARITY_VALUE_IS_INVALID = "DSLObject.query.parameters.Granularity value is invalid";
- public static String MATCH_IS_INVALID = "DSLObject.query.Match is invalid";
- public static String RANGE_TYPE_IS_INVALID = "DSLObject.query.Range type is invalid";
- public static String INTERVALS_TYPE_IS_INVALID = "DSLObject.query.Intervals type is invalid";
- public static String MATCH_FIELD_VALUES_ERROR = "Match fieldValues cannot startWith '*' or endWith '$'";
- public static String RANGE_TYPE_ERROR = "range type is illegal";
- public static String INTERVALS_PARAM_ERROR = "DSLObject.query.query.Intervals is invalid";
- public static String TIME_FORMAT_ERROR = "Time format should follow ISO 8601 standard.";
- public static String INTERVALS_VALUE_ERROR = "Intervals value should be [start, end]";
-
-
-}
diff --git a/src/main/java/com/mesalab/network/service/NetworkMonitorService.java b/src/main/java/com/mesalab/network/service/NetworkMonitorService.java
deleted file mode 100644
index b054aa3e..00000000
--- a/src/main/java/com/mesalab/network/service/NetworkMonitorService.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package com.mesalab.network.service;
-
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.network.dsl.DSLObject;
-import com.mesalab.network.model.protocol.ProtocolTree;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * @author wangwei
- * @version 1.0
- * @date 2020/6/30 4:20 下午
- */
-
-
-public interface NetworkMonitorService {
-
- BaseResult buildProtocolTree(DSLObject dslObject);
- BaseResult buildProtocolDataRateSummary(DSLObject dslObject);
- BaseResult buildNetworkOverviewResult(DSLObject dslObject);
- List<ProtocolTree> buildFlatStructure(List<Map> data);
- List<ProtocolTree> buildHierarchicalStructure(List<ProtocolTree> nodes);
- BaseResult buildAppData(DSLObject dslObject);
- BaseResult buildInternalIPData(DSLObject dslObject);
-
- BaseResult buildAppDataRate(DSLObject dslObject);
-
- BaseResult buildAppTraffic(DSLObject dslObject);
-}
diff --git a/src/main/java/com/mesalab/network/service/impl/NetworkMonitorServiceImpl.java b/src/main/java/com/mesalab/network/service/impl/NetworkMonitorServiceImpl.java
deleted file mode 100644
index 6315f44e..00000000
--- a/src/main/java/com/mesalab/network/service/impl/NetworkMonitorServiceImpl.java
+++ /dev/null
@@ -1,607 +0,0 @@
-package com.mesalab.network.service.impl;
-
-import cn.hutool.core.net.NetUtil;
-import cn.hutool.core.util.NumberUtil;
-import cn.hutool.core.util.ObjectUtil;
-import cn.hutool.core.util.StrUtil;
-import cn.hutool.core.util.URLUtil;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.alibaba.fastjson2.JSON;
-import com.alibaba.fastjson2.JSONWriter;
-import com.google.common.base.Splitter;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.mesalab.cn.enums.RangeTypeEnum;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.common.exception.BusinessException;
-import com.mesalab.common.utils.TreeUtils;
-import com.mesalab.qgw.model.basic.HttpConfig;
-import com.mesalab.knowledge.enums.MatchEnum;
-import com.mesalab.network.common.Constants;
-import com.mesalab.network.dsl.DSLObject;
-import com.mesalab.network.service.NetworkMonitorService;
-import com.mesalab.network.model.protocol.ProtocolTree;
-import com.mesalab.qgw.service.impl.HttpClientService;
-import com.mesalab.services.common.dsl.ComDSLParse;
-import com.mesalab.services.common.property.SqlPropertySourceFactory;
-import com.geedgenetworks.utils.StringUtil;
-import org.joda.time.Period;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.annotation.PropertySource;
-import org.springframework.core.env.Environment;
-import org.springframework.stereotype.Service;
-import org.springframework.util.CollectionUtils;
-
-import java.io.*;
-import java.math.BigDecimal;
-import java.net.URLEncoder;
-import java.util.*;
-import java.util.stream.Collectors;
-
-/**
- * @author wangwei
- * @version 1.0
- * @date 2020/6/30 4:24 下午
- */
-@Service("networkMonitorService")
-@PropertySource(value = "classpath:http-sql-template.sql", factory = SqlPropertySourceFactory.class)
-public class NetworkMonitorServiceImpl implements NetworkMonitorService {
- private static final Log log = LogFactory.get();
- @Autowired
- HttpClientService httpClientService;
- @Autowired
- HttpConfig httpConfig;
- @Autowired
- Environment environment;
- private static final String localHostAddress;
-
- public String getPort() {
- return environment.getProperty("local.server.port");
- }
- private static final Map<String, String> LOGICAL_FIELD = Maps.newHashMap();
-
- static {
- localHostAddress = NetUtil.getLocalhostStr();
- LOGICAL_FIELD.put("protocol_stack_id", "protocol_path");
- LOGICAL_FIELD.put("app_name", "app");
- }
-
- @Override
- public BaseResult buildProtocolTree(DSLObject dslObject) {
- BaseResult baseResult;
- String sql = generateProtocolTreeSql(dslObject.getQuery(), environment.getProperty("PROTOCOL_TREE_SUMMARY"));
- Map<String, String> result = executeQuery(sql);
- if (String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(result.get("status"))) {
- Map resultMap = JSON.parseObject(result.get("result"), Map.class);
- List<Map> resultList = (List<Map>) resultMap.get("data");
- if (StringUtil.isEmpty(resultList)) {
- return BaseResultGenerator.success("ok", resultList, (Map) resultMap.get("statistics"));
- }
- List<ProtocolTree> protocolTreeList = buildHierarchicalStructure(buildFlatStructure(resultList));
- baseResult = BaseResultGenerator.success("ok", protocolTreeList, (Map) resultMap.get("statistics"));
- } else {
- baseResult = BaseResultGenerator.error(result.get("result"));
- }
- return baseResult;
- }
-
- @Override
- public BaseResult buildNetworkOverviewResult(DSLObject dslObject) {
- Map<String, String> asymmetricFlowsRsp = executeQuery(generateNetworkStatSummarySql(dslObject.getQuery(), environment.getProperty("NETWORK_OVERVIEW_ASYMMETRIC_FLOWS_STAT")));
- if (!String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(asymmetricFlowsRsp.get("status"))) {
- throw new BusinessException(Integer.parseInt(asymmetricFlowsRsp.get("status")), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), asymmetricFlowsRsp.get("message")));
- }
- Map<String, String> statRsp = executeQuery(generateDynamicStatSql(dslObject.getQuery(), environment.getProperty("NETWORK_OVERVIEW_STAT")));
- if (!String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(statRsp.get("status"))) {
- throw new BusinessException(Integer.parseInt(statRsp.get("status")), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), asymmetricFlowsRsp.get("message")));
- }
- Map<String, String> tcpRsp = executeQuery(generateTCPStatSql(dslObject.getQuery(), environment.getProperty("NETWORK_OVERVIEW_TCP_STAT")));
- if (!String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(statRsp.get("status"))) {
- throw new BusinessException(Integer.parseInt(statRsp.get("status")), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), asymmetricFlowsRsp.get("message")));
- }
- Map<String, String> appRsp = executeQuery(generateAppStatisticsSql(dslObject.getQuery(), environment.getProperty("NETWORK_OVERVIEW_APP_STAT")));
- if (!String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(appRsp.get("status"))) {
- throw new BusinessException(Integer.parseInt(appRsp.get("status")), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), asymmetricFlowsRsp.get("message")));
- }
-
- Map statResult = JSON.parseObject(statRsp.get("result"), Map.class);
- List<Map> statData = (List<Map>) statResult.get("data");
- Map asymmetricFlowsResult = JSON.parseObject(asymmetricFlowsRsp.get("result"), Map.class);
- List<Map> asymmetricFlowsData = (List<Map>) asymmetricFlowsResult.get("data");
- Map tcpResult = JSON.parseObject(tcpRsp.get("result"), Map.class);
- List<Map> tcpData = (List<Map>) tcpResult.get("data");
- Map appResult = JSON.parseObject(appRsp.get("result"), Map.class);
- List<Map> appData = (List<Map>) appResult.get("data");
-
- Map<String, Object> data = new HashMap<>(16);
- if (!statData.isEmpty()) {
- data.putAll(statData.get(0));
- }
-
- if (!asymmetricFlowsData.isEmpty()) {
- data.putAll(asymmetricFlowsData.get(0));
- }
-
- if (!tcpData.isEmpty()) {
- data.putAll(tcpData.get(0));
- }
-
- if (!appData.isEmpty()) {
- data.putAll(appData.get(0));
- }
-
-
- long asymmetricFlows = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_ASYMMETRIC_FLOWS_BYTES) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_ASYMMETRIC_FLOWS_BYTES)));
- long totalSessionUseOnAsymmetricFlows = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES_USE_ON_ASYMMETRIC_FLOWS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES_USE_ON_ASYMMETRIC_FLOWS)));
-
- long uniqClientIp = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_UNIQ_CLIENT_IP) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_UNIQ_CLIENT_IP)));
- long totalSessions = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS)));
- long totalBytes = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES)));
- long totalPackets = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_PACKETS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_PACKETS)));
-
- long fragmentationPackets = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_FRAGMENTATION_PACKETS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_FRAGMENTATION_PACKETS)));
- long dataRate = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_DATA_RATE) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_DATA_RATE)));
-
- long tcpRetransmissionsPackets = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_TCP_RETRANSMISSIONS_PACKETS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TCP_RETRANSMISSIONS_PACKETS)));
- long tcpTotalPackets = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_TCP_TOTAL_PACKETS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TCP_TOTAL_PACKETS)));
- long unknownAppBytes = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_UNKNOWN_APP_BYTES) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_UNKNOWN_APP_BYTES)));
-
-
-
- String unknownAppPercent = totalBytes == 0 ? String.valueOf(0)
- : NumberUtil.div(BigDecimal.valueOf(unknownAppBytes), BigDecimal.valueOf(totalBytes), 4).toPlainString();
- String asymmetricFlowsPercent = totalSessionUseOnAsymmetricFlows == 0 ? String.valueOf(0)
- : NumberUtil.div(BigDecimal.valueOf(asymmetricFlows), BigDecimal.valueOf(totalSessionUseOnAsymmetricFlows), 4).toPlainString();
- String tcpRetransmissionsPercent = tcpTotalPackets == 0 ? String.valueOf(0)
- : NumberUtil.div(BigDecimal.valueOf(tcpRetransmissionsPackets), BigDecimal.valueOf(tcpTotalPackets), 4).toPlainString();
- String fragmentationPacketsPercent = totalPackets == 0 ? String.valueOf(0)
- : NumberUtil.div(BigDecimal.valueOf(fragmentationPackets), BigDecimal.valueOf(totalPackets), 4).toPlainString();
-
- List result = new ArrayList<>();
- Map resultMap = new LinkedHashMap<>();
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_UNIQ_CLIENT_IP, uniqClientIp);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS, totalSessions);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_DATA_RATE, dataRate);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES, totalBytes);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_PACKETS, totalPackets);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_UNKNOWN_APP_BYTES, unknownAppBytes);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_UNKNOWN_APP_PERCENT, unknownAppPercent);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_ASYMMETRIC_FLOWS_BYTES, asymmetricFlows);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_ASYMMETRIC_FLOWS_PERCENT, asymmetricFlowsPercent);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_TCP_RETRANSMISSIONS_PACKETS, tcpRetransmissionsPackets);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_TCP_RETRANSMISSIONS_PERCENT, tcpRetransmissionsPercent);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_FRAGMENTATION_PACKETS, fragmentationPackets);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_FRAGMENTATION_PERCENT, fragmentationPacketsPercent);
- result.add(resultMap);
-
- Map statistics = (Map) asymmetricFlowsResult.get("statistics");
- Map statStatistics = (Map) statResult.get("statistics");
- Map appStatistics = (Map) appResult.get("statistics");
- for (Object key : statistics.keySet()) {
- if ("result_rows".equals(key.toString())) {
- continue;
- }
- statistics.put(key, Long.parseLong(statistics.get(key).toString()) + Long.parseLong(statStatistics.get(key).toString()) + Long.parseLong(appStatistics.get(key).toString()));
- }
-
- return BaseResultGenerator.success("ok", result, statistics);
- }
-
- @Override
- public BaseResult buildProtocolDataRateSummary(DSLObject dslObject) {
- BaseResult baseResult;
- String sql = generateProtocolDataRateSql(dslObject.getQuery(), environment.getProperty("PROTOCOL_DATA_RATE_SUMMARY"));
- Map<String, String> result = executeQuery(sql);
- if (String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(result.get("status"))) {
- Map resultMap = JSON.parseObject(result.get("result"), Map.class);
- List<Map> data = (List<Map>) resultMap.get("data");
- data.forEach(o -> {
- String[] protocolIds = String.valueOf(o.get("type")).split("\\.");
- String protocolId = protocolIds[protocolIds.length - 1];
- o.put("type", protocolId);
- });
- baseResult = BaseResultGenerator.success("ok", data, (Map) resultMap.get("statistics"));
- } else {
- baseResult = BaseResultGenerator.error(result.get("result"));
- }
- return baseResult;
- }
-
- private String generateDynamicStatSql(DSLObject.QueryBean queryParam, String sqlTemplate) {
- String whereOfTime = getWhereOfTime(queryParam.getParameters());
- String whereOfExactly = getWhereFilter(queryParam.getParameters(), false, null);
- String[] intervals = getIntervals(queryParam.getParameters().getIntervals());
- return String.format(sqlTemplate,
- "UNIX_TIMESTAMP('".concat(intervals[1]).concat("')"),
- "UNIX_TIMESTAMP('".concat(intervals[0]).concat("')"),
- queryParam.getDataSource(), whereOfTime, whereOfExactly, Constants.PROTOCOL_ETHERNET_NODE);
- }
-
- private String generateTCPStatSql(DSLObject.QueryBean queryParam, String sqlTemplate) {
- String whereOfTime = getWhereOfTime(queryParam.getParameters());
- String whereOfExactly = getWhereFilter(queryParam.getParameters(), false, null);
- return String.format(sqlTemplate, queryParam.getDataSource(), whereOfTime, whereOfExactly);
- }
-
- private String generateAppStatisticsSql(DSLObject.QueryBean queryParam, String sqlTemplate) {
- String whereOfTime = getWhereOfTime(queryParam.getParameters());
- String whereOfExactly = getWhereFilter(queryParam.getParameters(), false, null);
- return String.format(sqlTemplate, queryParam.getDataSource(), whereOfTime, whereOfExactly);
- }
-
- private String generateProtocolDataRateSql(DSLObject.QueryBean queryParam, String sqlTemplate) {
- String whereOfTime = getWhereOfTime(queryParam.getParameters());
- String whereOfExactly = getWhereFilter(queryParam.getParameters(), false, Lists.newArrayList("protocol_stack_id"));
- String protocolStr = null;
- List<DSLObject.QueryBean.Parameters.FilterBean> match = queryParam.getParameters().getMatch();
- for (DSLObject.QueryBean.Parameters.FilterBean item : match) {
- if (MatchEnum.PREFIX.getType().equals(item.getType())) {
- String[] split = item.getFieldValues().get(0).split(",");
- protocolStr = split[0].replaceFirst(Constants.PROTOCOL_TREE_ROOT_NAME + Constants.ENCAPSULATION_PATH_SEPARATOR, "");
- }
- }
- return String.format(sqlTemplate,
- queryParam.getParameters().getGranularity(), queryParam.getDataSource(), whereOfTime, whereOfExactly, protocolStr, queryParam.getParameters().getGranularity(),
- queryParam.getParameters().getGranularity(), queryParam.getDataSource(), whereOfTime, whereOfExactly, protocolStr, "%",
- protocolStr.length() - protocolStr.replaceAll("\\.", "").length(), queryParam.getParameters().getGranularity());
-
- }
-
- private String generateNetworkStatSummarySql(DSLObject.QueryBean queryParam, String sqlTemplate) {
- String whereOfTime = getWhereOfTime(queryParam.getParameters());
- String whereOfExactly = getWhereFilter(queryParam.getParameters(), false, null);
- return String.format(sqlTemplate,
- whereOfTime, whereOfExactly);
- }
-
- private String getWhereOfTime(DSLObject.QueryBean.Parameters parameters) {
- if (CollectionUtils.isEmpty(parameters.getIntervals())) {
- return StringUtil.EMPTY;
- }
- StringBuffer whereOfTime = new StringBuffer();
- String[] intervals = getIntervals(parameters.getIntervals());
-
- whereOfTime.append("__time >= DATE_FORMAT(FROM_UNIXTIME(UNIX_TIMESTAMP('").append(intervals[0]).append("')), '%Y-%m-%d %H:%i:%s')")
- .append("AND __time < DATE_FORMAT(FROM_UNIXTIME(UNIX_TIMESTAMP( '").append(intervals[1]).append("')), '%Y-%m-%d %H:%i:%s')");
- return whereOfTime.toString();
- }
-
- private String[] getIntervals(List<String> intervals) {
- return intervals.get(0).split("/");
- }
-
- private String getWhereFilter(DSLObject.QueryBean.Parameters parameters, boolean isLogicalField, List<String> skipFields) {
- List<String> filter = Lists.newArrayList();
- if (!CollectionUtils.isEmpty(parameters.getMatch())) {
- filter.addAll(parseMatch(parameters.getMatch(), isLogicalField, skipFields));
- }
- if (!CollectionUtils.isEmpty(parameters.getRange())) {
- filter.addAll(parseRange(parameters.getRange(), isLogicalField, skipFields));
- }
- String join = String.join(" ) AND ( ", filter);
- return StringUtil.isEmpty(join) ? StringUtil.EMPTY : " AND " + " ( " + join + " ) ";
- }
-
- public List<String> parseRange(List<DSLObject.QueryBean.Parameters.FilterBean> ranges, boolean isLogicalField, List<String> skipFields) {
- if (ObjectUtil.isEmpty(ranges)) {
- return Lists.newArrayList();
- }
- List<String> list = Lists.newArrayList();
- for (DSLObject.QueryBean.Parameters.FilterBean range : ranges) {
- if (StringUtil.isNotEmpty(skipFields) && skipFields.contains(range.getFieldKey())) {
- continue;
- }
- List<String> values = Lists.newArrayList();
- String fieldKey = range.getFieldKey();
- fieldKey = isLogicalField && LOGICAL_FIELD.containsKey(fieldKey) ? LOGICAL_FIELD.get(fieldKey) : fieldKey;
- String type = range.getType();
- List<String> fieldValues = range.getFieldValues();
- for (String value : fieldValues) {
- if (!NumberUtil.isNumber(value)) {
- value = "'".concat(ComDSLParse.encode(value)).concat("'");
- }
- values.add(ComDSLParse.parseRange(value, type));
- }
- String join = String.join((RangeTypeEnum.NE.getType().equalsIgnoreCase(type) ? " AND " : " OR ") + fieldKey, values);
- if (StringUtil.isNotBlank(join)) {
- list.add(fieldKey + join);
- }
- }
- return list;
- }
-
- public List<String> parseMatch(List<DSLObject.QueryBean.Parameters.FilterBean> matches, boolean isLogicalField, List<String> skipFields) {
- if (ObjectUtil.isEmpty(matches)) {
- return Lists.newArrayList();
- }
- List<String> list = Lists.newArrayList();
- for (DSLObject.QueryBean.Parameters.FilterBean match : matches) {
- if (StringUtil.isNotEmpty(skipFields) && skipFields.contains(match.getFieldKey())) {
- continue;
- }
- List<String> values = Lists.newArrayList();
- String fieldKey = match.getFieldKey();
- fieldKey = isLogicalField && LOGICAL_FIELD.containsKey(fieldKey) ? LOGICAL_FIELD.get(fieldKey) : fieldKey;
- String type = match.getType();
- List<String> fieldValues = match.getFieldValues();
- for (String value : fieldValues) {
- values.add(ComDSLParse.parseMatch(ComDSLParse.encode(value), type));
- }
- String join = String.join(" OR " + fieldKey, values);
- if (StringUtil.isNotBlank(join)) {
- list.add(fieldKey + join);
- }
- }
- return list;
- }
-
- private String generateProtocolTreeSql(DSLObject.QueryBean queryParam, String sqlTemplate) {
- String whereOfTime = getWhereOfTime(queryParam.getParameters());
- String whereOfExactly = getWhereFilter(queryParam.getParameters(), false, null);
- return String.format(sqlTemplate,
- queryParam.getDataSource(), whereOfTime, whereOfExactly);
- }
-
- @Override
- public List<ProtocolTree> buildFlatStructure(List<Map> protocolData) {
- List<ProtocolTree> nodes = Lists.newArrayList();
- for (Map protocolMap : protocolData) {
- ProtocolTree protocolTree = convertStringToObject(JSON.toJSONString(protocolMap, JSONWriter.Feature.LargeObject));
- if (StringUtil.isNotEmpty(protocolTree)) {
- nodes.add(protocolTree);
- }
-
- }
- ProtocolTree root = new ProtocolTree(Constants.PROTOCOL_TREE_ROOT_NAME, Constants.PROTOCOL_TREE_ROOT_NAME, null);
- Map<String, Long> protocolsAndApplications = nodes.stream().collect(Collectors.groupingBy(ProtocolTree::getName, Collectors.counting()));
- root.addMetric(Constants.PROTOCOL_TREE_METRIC_ENCAPSULATION_PATHS, nodes.size());
- root.addMetric(Constants.PROTOCOL_TREE_METRIC_PROTOCOLS_AND_APPLICATIONS, protocolsAndApplications.size());
- List<ProtocolTree> roots = nodes.stream().filter(o -> StringUtil.isBlank(o.getParentId())).collect(Collectors.toList());
- roots.forEach(item -> {
- root.setSentBytes(root.getSentBytes() + item.getSentBytes());
- root.setReceivedBytes(root.getReceivedBytes() + item.getReceivedBytes());
- root.setTotalSentBytes(root.getTotalSentBytes() + item.getSentBytes());
- root.setTotalReceivedBytes(root.getTotalReceivedBytes() + item.getReceivedBytes());
- });
- nodes.forEach(item -> {
- item.setId(Constants.PROTOCOL_TREE_ROOT_NAME + Constants.ENCAPSULATION_PATH_SEPARATOR + item.getId());
- });
- nodes.add(root);
- return nodes;
- }
-
- private Map<String, String> executeQuery(String sql) {
- try {
- sql = URLEncoder.encode(sql, "utf-8").replaceAll("\\+", "%20");
- } catch (UnsupportedEncodingException e) {
- log.error("sql Encode error: ", e);
- }
- String queryURL = URLUtil.normalize(localHostAddress + ":" + getPort() + "/sql?query=");
- int socketTimeOut = httpConfig.getServerResponseTimeOut();
- return httpClientService.httpGet(queryURL + sql, socketTimeOut);
- }
-
- private ProtocolTree convertStringToObject(String protocolString) {
- ProtocolTree protocolTree = null;
- try {
- Map<String, Object> results = JSON.parseObject(protocolString, Map.class);
- String protocolId = String.valueOf(results.get("protocol_stack_id"));
- if (StringUtil.isBlank(protocolId)) {
- log.warn("Protocol Stack ID is NULL {}", protocolString);
- return null;
- }
- long sessions = Long.parseLong(Optional.ofNullable(
- results.get(Constants.PROTOCOL_TREE_METRIC_SESSIONS)).orElse(0).toString());
- long sentBytes = Long.parseLong(Optional.ofNullable(
- results.get(Constants.PROTOCOL_TREE_METRIC_C2S_BYTES)).orElse(0).toString());
- long receivedBytes = Long.parseLong(Optional.ofNullable(
- results.get(Constants.PROTOCOL_TREE_METRIC_S2C_BYTES)).orElse(0).toString());
- long sentPackets = Long.parseLong(Optional.ofNullable(
- results.get(Constants.PROTOCOL_TREE_METRIC_C2S_PKTS)).orElse(0).toString());
- long receivedPackets = Long.parseLong(Optional.ofNullable(
- results.get(Constants.PROTOCOL_TREE_METRIC_S2C_PKTS)).orElse(0).toString());
- List<String> protocols = Splitter.on(Constants.ENCAPSULATION_PATH_SEPARATOR).omitEmptyStrings().splitToList(protocolId);
- String protocolName = StringUtil.isNotEmpty(protocols) ? protocols.get(protocols.size() - 1) : null;
- protocolTree = new ProtocolTree(protocolId, protocolName, null);
- protocolTree.setSentBytes(sentBytes);
- protocolTree.setReceivedBytes(receivedBytes);
- protocolTree.getMetrics().put(Constants.PROTOCOL_TREE_METRIC_SENT_PACKETS, sentPackets);
- protocolTree.getMetrics().put(Constants.PROTOCOL_TREE_METRIC_RECEIVED_PACKETS, receivedPackets);
- protocolTree.getMetrics().put(Constants.PROTOCOL_TREE_METRIC_SESSIONS, sessions);
- } catch (BusinessException e) {
- log.error("Convert Json String to Protocol Tree Object Error ", e);
- }
-
- return protocolTree;
- }
-
- @Override
- public List<ProtocolTree> buildHierarchicalStructure(List<ProtocolTree> nodes) {
-
- List<ProtocolTree> targetNodes = groupByName(nodes);
-
- targetNodes.sort((o1, o2) -> {
- long numThis = o1.getReceivedBytes() + o1.getSentBytes();
- long numParam = o2.getReceivedBytes() + o2.getSentBytes();
- if (numThis > numParam) {
- return -1;
- } else if (numThis < numParam) {
- return 1;
- } else {
- return 0;
- }
- });
- Map<String, List<ProtocolTree>> protocolMap = Maps.newLinkedHashMap();
- List<ProtocolTree> roots = targetNodes.stream().filter(o -> StringUtil.isBlank(o.getParentId())).collect(Collectors.toList());
- protocolMap.put(Constants.PROTOCOLS_PARENT_ID, roots);
- targetNodes.stream().filter(o -> StringUtil.isNotBlank(o.getParentId())).forEach(x -> {
- if (StringUtil.isEmpty(protocolMap.get(x.getParentId()))) {
- protocolMap.put(x.getParentId(), Lists.newArrayList(x));
- } else {
- protocolMap.get(x.getParentId()).add(x);
- }
- });
-
- List<ProtocolTree> protocolTreeList = TreeUtils.mergeTree(protocolMap, Constants.PROTOCOLS_PARENT_ID, ProtocolTree::getId, ProtocolTree::setChildrens);
- for (ProtocolTree protocolTree : protocolTreeList) {
- List<ProtocolTree> ethernetList = protocolTree.getChildrens().stream().filter(
- p -> Constants.PROTOCOL_ETHERNET_NODE.equals(p.getName())).collect(Collectors.toList());
- if (protocolTree.getChildrens().size() != ethernetList.size()) {
- protocolTree.setChildrens(ethernetList);
- protocolTree.setReceivedBytes(StringUtil.isEmpty(ethernetList.get(0)) ? 0 : ethernetList.get(0).getReceivedBytes());
- protocolTree.setSentBytes(StringUtil.isEmpty(ethernetList.get(0)) ? 0 : ethernetList.get(0).getSentBytes());
- log.warn("Protocol Tree Exceptional data occurs and non-{} protocols are filtered out: {}", Constants.PROTOCOL_ETHERNET_NODE, protocolTree.getChildrens());
- }
- }
-
- return protocolTreeList;
-
- }
-
- @Override
- public BaseResult buildAppData(DSLObject dslObject) {
- String whereOfExactly = getWhereFilter(dslObject.getQuery().getParameters(), false, null);
- String[] intervals = getIntervals(dslObject.getQuery().getParameters().getIntervals());
- String whereOfTime = "__time >= DATE_FORMAT(FROM_UNIXTIME(UNIX_TIMESTAMP('".concat(intervals[0]).concat("')), '%Y-%m-%d %H:%i:%s')")
- .concat("AND __time < DATE_FORMAT(FROM_UNIXTIME(UNIX_TIMESTAMP( '").concat(intervals[1]).concat("')), '%Y-%m-%d %H:%i:%s')");
- String granularity = dslObject.getQuery().getParameters().getGranularity();
- granularity = StringUtil.isEmpty(granularity) ? "PT15S" : granularity;
- Period period = Period.parse(granularity);
- Integer limit = dslObject.getQuery().getLimit();
- Integer offset = dslObject.getQuery().getOffset();
- String sql = String.format(Objects.requireNonNull(environment.getProperty("APP_DATA_SUMMARY")),
- period.toStandardSeconds().getSeconds(), dslObject.getQuery().getDataSource(), whereOfTime, whereOfExactly, StringUtil.isEmpty(limit) ? "" : " LIMIT " + ((StringUtil.isEmpty(offset)) ? "" : offset + " ,") + limit);
- Map<String, String> dataResult = executeQuery(sql);
- if (String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(dataResult.get("status"))) {
- return JSON.parseObject(dataResult.get("result"), BaseResult.class);
- } else {
- throw new BusinessException(dataResult.get("result"));
- }
- }
-
- @Override
- public BaseResult buildInternalIPData(DSLObject dslObject) {
- String whereOfExactly = getWhereFilter(dslObject.getQuery().getParameters(), true, null);
- String[] intervals = getIntervals(dslObject.getQuery().getParameters().getIntervals());
- String whereOfTime = "recv_time >= UNIX_TIMESTAMP('" + intervals[0] + "') AND recv_time < UNIX_TIMESTAMP('" + intervals[1] + "')";
- Integer limit = dslObject.getQuery().getLimit();
- Integer offset = dslObject.getQuery().getOffset();
- String sql = String.format(Objects.requireNonNull(environment.getProperty("APP_INTERNAL_IP_SUMMARY")),
- dslObject.getQuery().getDataSource(), whereOfTime, whereOfExactly, StringUtil.isEmpty(limit) ? "" : " LIMIT " + ((StringUtil.isEmpty(offset)) ? "" : offset + " ,") + limit);
- Map<String, String> dataResult = executeQuery(sql);
- if (String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(dataResult.get("status"))) {
- return JSON.parseObject(dataResult.get("result"), BaseResult.class);
- } else {
- throw new BusinessException(dataResult.get("result"));
- }
- }
-
- @Override
- public BaseResult buildAppDataRate(DSLObject dslObject) {
- BaseResult baseResult;
- DSLObject.QueryBean.Parameters parameters = dslObject.getQuery().getParameters();
- String whereOfExactly = getWhereFilter(parameters, false, null);
- String whereOfTime = getWhereOfTime(parameters);
- String sql = String.format(environment.getProperty("APP_DATA_RATE_SUMMARY"),
- parameters.getGranularity(),
- dslObject.getQuery().getDataSource(),
- whereOfTime,
- whereOfExactly,
- parameters.getGranularity()
- );
- Map<String, String> result = executeQuery(sql);
- if (String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(result.get("status"))) {
- Map resultMap = JSON.parseObject(result.get("result"), Map.class);
- List<Map> data = (List<Map>) resultMap.get("data");
- baseResult = BaseResultGenerator.success("ok", data, (Map) resultMap.get("statistics"));
- } else {
- throw new BusinessException(result.get("result"));
- }
- return baseResult;
- }
-
- @Override
- public BaseResult buildAppTraffic(DSLObject dslObject) {
- BaseResult baseResult;
- DSLObject.QueryBean.Parameters parameters = dslObject.getQuery().getParameters();
- String whereOfExactly = getWhereFilter(parameters, false, null);
- String whereOfTime = getWhereOfTime(parameters);
- String sql = String.format(environment.getProperty("APP_TRAFFIC_SUMMARY"),
- dslObject.getQuery().getDataSource(),
- whereOfTime,
- whereOfExactly
- );
- Map<String, String> result = executeQuery(sql);
- if (String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(result.get("status"))) {
- Map resultMap = JSON.parseObject(result.get("result"), Map.class);
- List<Map> data = (List<Map>) resultMap.get("data");
- baseResult = BaseResultGenerator.success("ok", data, (Map) resultMap.get("statistics"));
- } else {
- throw new BusinessException(result.get("result"));
- }
- return baseResult;
- }
-
- /**
- * 基于Name聚合,统计Protocol和Application在全栈下的流量
- * @param nodes
- * @return
- */
- private List<ProtocolTree> groupByName(List<ProtocolTree> nodes) {
- Map<String, List<ProtocolTree>> resultMap = nodes.stream()
- .collect(Collectors.groupingBy(ProtocolTree::getName, Collectors.collectingAndThen(Collectors.toList(), protocolTreeList -> {
-
- List<ProtocolTree> parentNodes = protocolTreeList.stream()
- .filter(p -> StringUtil.isBlank(p.getParentId()) || !Splitter.on(".").omitEmptyStrings().splitToList(p.getParentId()).contains(p.getName()))
- .collect(Collectors.toList());
- Map<String, Long> summedMetrics = Maps.newLinkedHashMap();
- Map<String, Long> totalBytesMetrics = Maps.newLinkedHashMap();
- totalBytesMetrics.put(Constants.PROTOCOL_TREE_METRIC_TOTAL_SENT_BYTES, 0L);
- totalBytesMetrics.put(Constants.PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_BYTES, 0L);
-
- parentNodes.stream().filter(p -> !p.getName().equalsIgnoreCase(Constants.PROTOCOL_TREE_ROOT_NAME)).forEach(protocolTree -> {
- totalBytesMetrics.put(Constants.PROTOCOL_TREE_METRIC_TOTAL_SENT_BYTES, totalBytesMetrics.get(Constants.PROTOCOL_TREE_METRIC_TOTAL_SENT_BYTES) + protocolTree.getSentBytes());
- totalBytesMetrics.put(Constants.PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_BYTES, totalBytesMetrics.get(Constants.PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_BYTES) + protocolTree.getReceivedBytes());
- for (String key : protocolTree.getMetrics().keySet()) {
- String totalKey = StrUtil.upperFirstAndAddPre(key, "total");
- if (StringUtil.isEmpty(summedMetrics.get(totalKey))) {
- summedMetrics.put(totalKey, Long.valueOf(protocolTree.getMetrics().get(key).toString()));
- } else {
- summedMetrics.put(totalKey, summedMetrics.get(totalKey)
- + Long.parseLong(protocolTree.getMetrics().get(key).toString()));
- }
- }
- });
-
- protocolTreeList.stream().filter(p -> !p.getName().equalsIgnoreCase(Constants.PROTOCOL_TREE_ROOT_NAME)).forEach(protocolTree -> {
- protocolTree.setTotalSentBytes(totalBytesMetrics.get(Constants.PROTOCOL_TREE_METRIC_TOTAL_SENT_BYTES));
- protocolTree.setTotalReceivedBytes(totalBytesMetrics.get(Constants.PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_BYTES));
- protocolTree.getMetrics().putAll(summedMetrics);
- });
-
- return protocolTreeList;
- })));
-
- List<ProtocolTree> protocolTreeList = Lists.newArrayList();
- resultMap.values().forEach(o -> {
- protocolTreeList.addAll(o);
- });
-
- return protocolTreeList;
- }
-}
diff --git a/src/main/java/com/mesalab/qgw/aspect/AuditLogAspect.java b/src/main/java/com/mesalab/qgw/aspect/AuditLogAspect.java
index a25fa48a..ff562c8b 100644
--- a/src/main/java/com/mesalab/qgw/aspect/AuditLogAspect.java
+++ b/src/main/java/com/mesalab/qgw/aspect/AuditLogAspect.java
@@ -1,21 +1,21 @@
package com.mesalab.qgw.aspect;
import cn.hutool.core.thread.ThreadUtil;
-import cn.hutool.crypto.digest.DigestUtil;
+import cn.hutool.core.util.StrUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSON;
import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.utils.QueryCacheUtils;
-import com.mesalab.common.utils.HttpHelper;
-import com.mesalab.qgw.model.basic.QueryProfile;
+import com.mesalab.common.utils.MDCUtil;
+import com.mesalab.qgw.model.basic.EngineConfigSource;
+import com.mesalab.qgw.model.basic.SQLQueryContext;
import com.mesalab.qgw.model.basic.AuditLog;
import com.mesalab.qgw.model.basic.AuditServiceLog;
import com.geedgenetworks.utils.IPUtil;
-import com.geedgenetworks.utils.StringUtil;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.*;
import org.aspectj.lang.reflect.MethodSignature;
+import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
@@ -23,7 +23,6 @@ import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.http.HttpServletRequest;
import java.lang.reflect.Method;
import java.util.Arrays;
-import java.util.Optional;
@Aspect
@@ -33,78 +32,79 @@ public class AuditLogAspect {
private ThreadLocal<Long> startTime = ThreadUtil.createThreadLocal(false);
private ThreadLocal<AuditServiceLog> auditLog = ThreadUtil.createThreadLocal(false);
private static final Log log = LogFactory.get();
+ private EngineConfigSource engineConfigSource;
@Pointcut("@annotation(com.mesalab.qgw.model.basic.AuditLog)")
- public void annotationPointCut(){}
+ public void annotationPointCut() {
+ }
@Before("annotationPointCut()")
public void doBefore(JoinPoint joinPoint) {
-
startTime.set(System.currentTimeMillis());
ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
- HttpServletRequest request = attributes.getRequest();
- String url = request.getRequestURL().toString();
- String method = request.getMethod();
- String remoteAddr = request.getRemoteAddr();
- String clientIp = IPUtil.getIpAddr(request);
+ AuditServiceLog auditServiceLog = new AuditServiceLog();
+ String url = null;
+ if (attributes != null) {
+ HttpServletRequest request = attributes.getRequest();
+ url = request.getRequestURL().toString();
+ String method = request.getMethod();
+ String remoteAddr = request.getRemoteAddr();
+ String clientIp = IPUtil.getIpAddr(request);
+ auditServiceLog.setRemoteAddr(remoteAddr);
+ auditServiceLog.setClientIp(clientIp);
+ auditServiceLog.setMethod(method);
+ }
String classMethod = joinPoint.getSignature().getDeclaringTypeName() + "." + joinPoint.getSignature().getName();
- Object paramObject = joinPoint.getArgs()[0];
-
- MethodSignature sign = (MethodSignature)joinPoint.getSignature();
+ MethodSignature sign = (MethodSignature) joinPoint.getSignature();
Method method2 = sign.getMethod();
AuditLog annotation = method2.getAnnotation(AuditLog.class);
- AuditServiceLog auditServiceLog = new AuditServiceLog();
+ auditServiceLog.setRequestParam(Arrays.toString(joinPoint.getArgs()));
+ auditServiceLog.setUrl(url);
+ auditServiceLog.setClassMethod(classMethod);
auditServiceLog.setAnnotation(annotation.value());
- auditServiceLog.setRemoteAddr(remoteAddr);
- auditServiceLog.setClientIp(clientIp);
- auditServiceLog.setMethod(method);
- if (log.isDebugEnabled()) {
- auditServiceLog.setParam(Optional.ofNullable(paramObject).orElse("").toString());
- auditServiceLog.setUrl(url);
- auditServiceLog.setClassMethod(classMethod);
- }
- auditServiceLog.setCacheKey(HttpHelper.generateCacheKey(request));
- auditServiceLog.setQueryKey(DigestUtil.md5Hex(StringUtil.createUUID()));
+ auditServiceLog.setRequestId(MDCUtil.getTraceId());
auditLog.set(auditServiceLog);
}
@AfterThrowing(value = "annotationPointCut()", throwing = "exception")
- public void doAfterThrowingAdvice(JoinPoint joinPoint, Throwable exception){
- AuditServiceLog logAfter = auditLog.get();
- logAfter.setQueryKey("");
- logAfter.setParam(Arrays.toString(joinPoint.getArgs()));
- log.error("Audit Log [error]:{}", JSON.toJSONString(logAfter));
+ public void doAfterThrowingAdvice(JoinPoint joinPoint, Throwable exception) {
+ AuditServiceLog logAfter = auditLog.get();
+ logAfter.setRequestParam(Arrays.toString(joinPoint.getArgs()));
+ log.error("Audit Log [error]: {}, reason {}", JSON.toJSONString(logAfter), exception.getMessage());
}
@After(value = "annotationPointCut()")
public void after(JoinPoint joinPoint) {
- if (joinPoint.getArgs()[0] instanceof QueryProfile) {
- QueryProfile param = (QueryProfile) joinPoint.getArgs()[0];
- auditLog.get().setDbType(param.getDbType());
- }
+ AuditServiceLog logAfter = auditLog.get();
+ Object[] args = joinPoint.getArgs();
+ if (args != null && args.length > 0 && args[0] instanceof SQLQueryContext) {
+ SQLQueryContext queryContext = (SQLQueryContext) joinPoint.getArgs()[0];
+ auditLog.get().setDbEngine(queryContext.getDbEngine());
+ }
+ long elapsed = System.currentTimeMillis() - startTime.get();
+ logAfter.setElapsed(elapsed);
+ String logMessage = StrUtil.format("Audit Log [done]: {}", JSON.toJSONString(logAfter));
+ log.info(logMessage);
+ if (elapsed > engineConfigSource.getHighLatencyThreshold()) {
+ log.warn(logMessage);
+ }
}
-
-
@AfterReturning(returning = "result", pointcut = "annotationPointCut()")
public void doAfterReturning(Object result) {
- AuditServiceLog logAfter = auditLog.get();
- if (StringUtil.isNotEmpty(result)) {
- QueryCacheUtils.put(logAfter.getCacheKey(), result);
- }
- if(result instanceof BaseResult){
+ AuditServiceLog logAfter = auditLog.get();
+ if (result instanceof BaseResult) {
BaseResult baseResult = (BaseResult) result;
- baseResult.setQueryKey(logAfter.getQueryKey());
+ baseResult.setRequestId(logAfter.getRequestId());
}
- Long exeTime = System.currentTimeMillis() - startTime.get();
- logAfter.setExeTime(exeTime);
-
- log.info("Audit Log [completed]:{}", JSON.toJSONString(logAfter));
-
+ log.debug("Audit Log [success]: result {}", result);
}
-
+ @Autowired
+ private void setEngineConfigSource(EngineConfigSource engineConfigSource) {
+ this.engineConfigSource = engineConfigSource;
+ }
}
diff --git a/src/main/java/com/mesalab/qgw/benchmark/DialectWriter.java b/src/main/java/com/mesalab/qgw/benchmark/DialectWriter.java
index 1f1fd839..6c6ec377 100644
--- a/src/main/java/com/mesalab/qgw/benchmark/DialectWriter.java
+++ b/src/main/java/com/mesalab/qgw/benchmark/DialectWriter.java
@@ -5,11 +5,10 @@ import cn.hutool.core.io.FileUtil;
import cn.hutool.core.io.file.FileWriter;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
-import com.alibaba.nacos.api.config.ConfigService;
+import com.alibaba.fastjson2.JSON;
import com.google.common.collect.Lists;
-import com.mesalab.common.nacos.NacosConfig;
import com.geedgenetworks.utils.DateUtils;
-import com.mesalab.services.service.SQLDatasetService;
+import com.mesalab.qgw.service.DatasetService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -17,7 +16,6 @@ import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
-import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -25,16 +23,10 @@ import java.util.Map;
public class DialectWriter extends Writer {
private static final Log log = LogFactory.get();
- private static final String EXECUTE_ENGINE= "execute_engine";
+ private static final String BACKEND_ENGINE= "backend_engine";
private static final String TEMPLATE= "template";
-
- @Autowired
- private NacosConfig nacosConfig;
-
- @Autowired
- private ConfigService systemConfigService;
@Autowired
- private SQLDatasetService sqlDatasetService;
+ DatasetService datasetService;
@Override
public String buildPocSQL(List<String> dialectList, String dialect, String category) {
@@ -44,13 +36,13 @@ public class DialectWriter extends Writer {
+ File.separator + "benchmark" + File.separator + dialect + "_queries" + "_"
+ DateUtils.getCurrentDate(DateUtils.YYYYMMDD) + ".sql";
if (CollectionUtil.isEmpty(dialectList) && !Files.exists(Paths.get(path))) {
- Map<String, Object> datasetResult = sqlDatasetService.getDatasets(Lists.newArrayList(), category, null);
- ArrayList<Map<String, String>> datasetList = (ArrayList<Map<String, String>>) datasetResult.get("list");
+ List<Map<String, Object>> datasetList = datasetService.getDatasets(Lists.newArrayList(), category, null);
dialectList = Lists.newLinkedList();
if (CollectionUtil.isNotEmpty(datasetList)) {
for (int i = 0; i < datasetList.size(); i++) {
- if (datasetList.get(i).get(EXECUTE_ENGINE).equalsIgnoreCase(dialect)) {
- dialectList.add(datasetList.get(i).get(TEMPLATE));
+ if (String.valueOf(datasetList.get(i).get(BACKEND_ENGINE)).equalsIgnoreCase(dialect)) {
+ Map map = JSON.parseObject(String.valueOf(datasetList.get(i).get(TEMPLATE)), Map.class);
+ dialectList.add(String.valueOf(map.get("statement")));
}
}
}
diff --git a/src/main/java/com/mesalab/qgw/benchmark/Writer.java b/src/main/java/com/mesalab/qgw/benchmark/Writer.java
index 63a37ba7..c0298307 100644
--- a/src/main/java/com/mesalab/qgw/benchmark/Writer.java
+++ b/src/main/java/com/mesalab/qgw/benchmark/Writer.java
@@ -1,7 +1,6 @@
package com.mesalab.qgw.benchmark;
import cn.hutool.core.io.IORuntimeException;
-import cn.hutool.core.io.file.FileReader;
import cn.hutool.core.io.file.FileWriter;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
@@ -14,15 +13,14 @@ import com.jayway.jsonpath.JsonPathException;
import com.mesalab.common.nacos.NacosConfig;
import com.mesalab.common.nacos.NacosConst;
import com.geedgenetworks.utils.StringUtil;
-import com.mesalab.qgw.service.MetadataService;
-import com.mesalab.services.service.SQLDatasetService;
+import com.mesalab.qgw.service.DatabaseService;
+import com.mesalab.qgw.service.DatasetService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.io.File;
import java.io.IOException;
-import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -33,38 +31,12 @@ public abstract class Writer {
@Autowired
NacosConfig nacosConfig;
@Autowired
- MetadataService metadataService;
+ DatabaseService databaseService;
@Autowired
- SQLDatasetService sqlDatasetService;
+ DatasetService datasetService;
private static final Log log = LogFactory.get();
- public static final boolean IS_TITLE = false;
- public static final String TITLE_PREFIX = "--Q";
- public static final String LABEL_COMMON_FILTER = "@common_filter";
- public static final String LABEL_INDEX_FILTER = "@index_filter";
- public static final String LABEL_TIME_START = "@start";
- public static final String LABEL_TIME_END = "@end";
- public Map<String, List<String>> getFilterOptions(String jsonString) {
- Map<String, List<String>> environment = Maps.newLinkedHashMap();
- try {
- List<String> startTime = JsonPath.read(jsonString, "$.filters[?(@.name=='@start')].value");
- List<String> endTime = JsonPath.read(jsonString, "$.filters[?(@.name=='@end')].value");
- environment.put(LABEL_TIME_START, startTime);
- environment.put(LABEL_TIME_END, endTime);
- List<List<String>> commonFilter = JsonPath.read(jsonString, "$.filters[?(@.name=='@common_filter')].value");
- List<List<String>> indexFilter = JsonPath.read(jsonString, "$.filters[?(@.name=='@index_filter')].value");
- if (StringUtil.isNotEmpty(commonFilter)) {
- environment.put(LABEL_COMMON_FILTER, commonFilter.stream().findFirst().get());
- }
- if (StringUtil.isNotEmpty(indexFilter)) {
- environment.put(LABEL_INDEX_FILTER, indexFilter.stream().findFirst().get());
- }
- } catch (JsonPathException e) {
- log.error(" Poc SQL is not found {} ", e);
- }
- return environment;
- }
protected String generatePocSQL(List<String> list, FileWriter writer) {
String pocSQL = "";
@@ -73,14 +45,14 @@ public abstract class Writer {
writer.getFile().createNewFile();
}
List<LinkedHashMap> variables = Lists.newArrayList();
- Object codeInfo = metadataService.getCfg(NacosConst.SQL_DATASETS_VARIABLES);
+ Object codeInfo = databaseService.getCfg(NacosConst.DATASETS_VARIABLES);
if (StringUtil.isNotEmpty(codeInfo)) {
Object json = JSON.toJSON(codeInfo);
- variables = (List<LinkedHashMap>) JSON.parseArray(json.toString(), LinkedHashMap.class);
+ variables = JSON.parseArray(json.toString(), LinkedHashMap.class);
}
List<String> resultLines = Lists.newArrayList();
- for (String line : list){
- String sql = sqlDatasetService.buildExecSQL(variables, line);
+ for (String line : list) {
+ String sql = datasetService.buildExecSQL(variables, line);
resultLines.add(sql);
}
writer.appendLines(resultLines);
@@ -92,52 +64,6 @@ public abstract class Writer {
return pocSQL;
}
-
- @Value("${nacos.config.server-addr}")
- private String nacosServerAddr;
-
- protected String buildSnapshotDataFilePath(String dataId) {
- StringBuilder filePath = new StringBuilder(buildPrefixPath());
- filePath.append(NacosConst.SNAPSHOT_TENANT);
- filePath.append(File.separator);
- filePath.append(nacosConfig.getNamespace());
- filePath.append(File.separator);
- filePath.append(nacosConfig.getGroup());
- filePath.append(File.separator);
- filePath.append(dataId);
- log.info("dataId:{} snapshot path:{}", dataId, filePath.toString());
- return filePath.toString();
- }
-
- protected String buildLocalDataFilePath(String dataId) {
- StringBuilder filePath = new StringBuilder(buildPrefixPath());
- filePath.append(NacosConst.NACOS_LOCAL_DATA);
- filePath.append(File.separator);
- filePath.append(NacosConst.CONFIG_DATA_TENANT);
- filePath.append(File.separator);
- filePath.append(nacosConfig.getNamespace());
- filePath.append(File.separator);
- filePath.append(nacosConfig.getGroup());
- filePath.append(File.separator);
- filePath.append(dataId);
- log.info("dataId:{} local data path:{}", dataId, filePath.toString());
- return filePath.toString();
- }
-
- private String buildPrefixPath() {
- StringBuilder filePath = new StringBuilder(LocalConfigInfoProcessor.LOCAL_SNAPSHOT_PATH);
- filePath.append(File.separator);
- filePath.append(NacosConst.FIXED);
- filePath.append(NacosConst.RAMPANT);
- nacosServerAddr = nacosServerAddr.replaceAll(NacosConst.COLON, NacosConst.UNDERLINE);
- filePath.append(nacosServerAddr).append(NacosConst.RAMPANT);
- filePath.append(nacosConfig.getNamespace());
- filePath.append(NacosConst.UNDERLINE);
- filePath.append(NacosConst.NACOS);
- filePath.append(File.separator);
- return filePath.toString();
- }
-
protected abstract String buildPocSQL(List<String> dialectList, String dialect, String category);
diff --git a/src/main/java/com/mesalab/qgw/constant/DataTypeConst.java b/src/main/java/com/mesalab/qgw/constant/DataTypeConst.java
new file mode 100644
index 00000000..bc3e1b62
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/constant/DataTypeConst.java
@@ -0,0 +1,23 @@
+package com.mesalab.qgw.constant;
+
+/**
+ * TODO
+ *
+ * @Classname DataTypeConst
+ * @Date 2024/3/4 10:22
+ * @Author wWei
+ */
+public class DataTypeConst {
+ public static final String TIMESTAMP = "timestamp";
+ public static final String TIMESTAMP_FORMAT = "timestamp_format";
+ public static final String TIMESTAMP_MS_FORMAT = "timestamp_ms_format";
+ public static final String UNIX_TIMESTAMP = "unix_timestamp";
+ public static final String UNIX_TIMESTAMP_MS = "unix_timestamp_ms";
+ public static final String DATE_TIME_64 = "datetime64";
+ public static final String DATE = "date";
+ public static final String IP = "ip";
+ public static final String DOMAIN = "domain";
+ public static final String FQDN = "fqdn";
+ public static final String APP = "app";
+ public static final String HDR_HISTOGRAM = "hdr_histogram";
+}
diff --git a/src/main/java/com/mesalab/qgw/constant/DslIdentifierNameConst.java b/src/main/java/com/mesalab/qgw/constant/DslIdentifierNameConst.java
index cb2f6dd2..90115236 100644
--- a/src/main/java/com/mesalab/qgw/constant/DslIdentifierNameConst.java
+++ b/src/main/java/com/mesalab/qgw/constant/DslIdentifierNameConst.java
@@ -1,5 +1,7 @@
package com.mesalab.qgw.constant;
+import java.util.*;
+
/**
* TODO
*
@@ -15,6 +17,34 @@ public class DslIdentifierNameConst {
public static final String APPLICATION_AND_PROTOCOL_APP_RELATED_INTERNAL_IPS = "application-and-protocol-app-related-internal-ips";
public static final String APPLICATION_AND_PROTOCOL_APP_THROUGHPUT = "application-and-protocol-app-throughput";
public static final String APPLICATION_AND_PROTOCOL_APP_SUMMARY = "application-and-protocol-app-summary";
- public static final String REAL_TIME_DATA_ANALYTICS_SUBSCRIBER_ID_RELATE_IP = "real-time-data-analytics-subscriber-id-relate-ip";
- public static final String REAL_TIME_DATA_ANALYTICS_MOBILE_IDENTITY_RELATE_TEID = "real-time-data-analytics-mobile-identity-relate-teid";
+ public static final String IP_LEARNING_FQDN_RELATE_IP = "ip-learning-fqdn-relate-ip";
+ public static final String IP_LEARNING_ACTIVE_IP = "ip-learning-active-ip";
+ public static final String FIELD_DISCOVERY = "field_discovery";
+ public static final String DATAPATH_PACKET_COMBINE = "datapath_telemetry_packet_combine";
+ public static final String TRAFFIC_SPECTRUM_SUMMARY = "traffic-spectrum-summary";
+ public static final String TRAFFIC_SPECTRUM_UNIQUE_CLIENT_AND_SERVER_IPS = "traffic-spectrum-unique-client-and-server-ips";
+ public static final String TRAFFIC_SPECTRUM_APP_DISTRIBUTION = "traffic-spectrum-app-distribution";
+ public static final String TRAFFIC_SPECTRUM_CLIENT_IP_CONNECT_APPLICATION_USAGE = "traffic-spectrum-client-ip-connect-application-usage";
+ public static final String TRAFFIC_SPECTRUM_NETWORK_THROUGHPUT_TREND = "traffic-spectrum-network-throughput-trend";
+ public static final Map<String, Set<String>> IDENTIFIER_NAME_SOURCE_MAPPING;
+
+ static {
+ IDENTIFIER_NAME_SOURCE_MAPPING = new HashMap<>();
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(APPLICATION_AND_PROTOCOL_SUMMARY, new HashSet<>(List.of("application_protocol_stat")));
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(APPLICATION_AND_PROTOCOL_TREE_COMPOSITION, new HashSet<>(List.of("application_protocol_stat")));
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(APPLICATION_AND_PROTOCOL_TREE_THROUGHPUT, new HashSet<>(List.of("application_protocol_stat")));
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(APPLICATION_AND_PROTOCOL_TOP_APPS, new HashSet<>(List.of("application_protocol_stat")));
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(APPLICATION_AND_PROTOCOL_APP_RELATED_INTERNAL_IPS, new HashSet<>(List.of("session_record")));
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(APPLICATION_AND_PROTOCOL_APP_THROUGHPUT, new HashSet<>(List.of("application_protocol_stat")));
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(APPLICATION_AND_PROTOCOL_APP_SUMMARY, new HashSet<>(List.of("application_protocol_stat")));
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(IP_LEARNING_FQDN_RELATE_IP, new HashSet<>());
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(IP_LEARNING_ACTIVE_IP, new HashSet<>());
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(FIELD_DISCOVERY, new HashSet<>());
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(DATAPATH_PACKET_COMBINE, new HashSet<>(List.of("datapath_telemetry_record")));
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(TRAFFIC_SPECTRUM_SUMMARY, new HashSet<>(List.of("traffic_sketch_metric")));
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(TRAFFIC_SPECTRUM_UNIQUE_CLIENT_AND_SERVER_IPS, new HashSet<>(List.of("traffic_sketch_metric")));
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(TRAFFIC_SPECTRUM_APP_DISTRIBUTION, new HashSet<>(List.of("traffic_sketch_metric")));
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(TRAFFIC_SPECTRUM_CLIENT_IP_CONNECT_APPLICATION_USAGE, new HashSet<>(List.of("traffic_sketch_metric")));
+ IDENTIFIER_NAME_SOURCE_MAPPING.put(TRAFFIC_SPECTRUM_NETWORK_THROUGHPUT_TREND, new HashSet<>(List.of("traffic_sketch_metric")));
+ }
}
diff --git a/src/main/java/com/mesalab/qgw/constant/ExampleDataModeConst.java b/src/main/java/com/mesalab/qgw/constant/ExampleDataModeConst.java
new file mode 100644
index 00000000..7985e003
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/constant/ExampleDataModeConst.java
@@ -0,0 +1,14 @@
+package com.mesalab.qgw.constant;
+
+/**
+ * TODO
+ *
+ * @Classname ExampleDataModeConst
+ * @Date 2024/3/8 13:44
+ * @Author wWei
+ */
+public class ExampleDataModeConst {
+ public static final String RANGE = "range";
+ public static final String ENUM = "enum";
+ public static final String SEQUENCE = "sequence";
+}
diff --git a/src/main/java/com/mesalab/qgw/constant/MetaConst.java b/src/main/java/com/mesalab/qgw/constant/MetaConst.java
new file mode 100644
index 00000000..2f3f964a
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/constant/MetaConst.java
@@ -0,0 +1,18 @@
+package com.mesalab.qgw.constant;
+
+/**
+ * TODO
+ *
+ * @Classname MetaConst
+ * @Date 2024/3/7 14:29
+ * @Author wWei
+ */
+public class MetaConst {
+ public static final String META_CATEGORY = "category";
+ public static final String META_NAME = "name";
+ public static final String META_TYPE = "type";
+ public static final String META_DATA_TYPE = "data_type";
+ public static final String META_FIELD_NAME = "field_name";
+ public static final String META_CATEGORY_METRIC = "Metric";
+ public static final String META_CATEGORY_DIMENSION = "Dimension";
+}
diff --git a/src/main/java/com/mesalab/qgw/constant/QGWMessageConst.java b/src/main/java/com/mesalab/qgw/constant/QGWMessageConst.java
index c9aa22f0..6e89a909 100644
--- a/src/main/java/com/mesalab/qgw/constant/QGWMessageConst.java
+++ b/src/main/java/com/mesalab/qgw/constant/QGWMessageConst.java
@@ -22,9 +22,11 @@ public class QGWMessageConst {
public static final String SQL_PARSE_ERROR_NOT_FOUNT_TABLE = "unable to parse table name.";
- public static final String DIAGNOSIS_OPTION_ERROR = "not support option value.";
+ public static final String CONSISTENCY_OPTION_ERROR = "not support consistency option value.";
+
+ public static final String BENCHMARK_TEST_PARAM_CANNOT_BE_EMPTY = "The benchmark test param value cannot be empty.";
- public static final String DIAGNOSIS_CATEGORY_ERROR = "not support category value.";
+ public static final String DIAGNOSIS_OPTION_ERROR = "not support option value.";
public static final String NOT_SUPPORT_DML_PARSER = "not support DML Parser.";
@@ -110,5 +112,26 @@ public class QGWMessageConst {
public static final String QUERY_CUSTOM_FIELD_DISCOVERY_ERROR = "The param custom.field_discovery.* illegal.";
+ public static final String DATASET_BACKEND_ENGINE_ERROR = "The dataset backend_engine is not supported, backend_engine value must be :[ qgw| clickhouse| druid| hbase]";
+
+ public static final String BUCKET_NAME_ERROR = "The bucket_name cannot be empty.";
+
+ public static final String DATASET_ID_NOT_EXIST = "The dataset identifier_name is not exist.";
+
+ public static final String KB_FILE_ID_CANNOT_BE_EMPTY = "The knowledge base file id cannot be empty.";
+
+ public static final String KB_FILE_NAME_CANNOT_BE_EMPTY = "The knowledge base file name cannot be empty.";
+
+ public static final String KB_FILE_VERSION_CANNOT_BE_EMPTY = "The knowledge base file version cannot be empty.";
+
+ public static final String KB_FILE_IS_VALID_CANNOT_BE_EMPTY = "The knowledge base file is_valid cannot be empty.";
+
+ public static final String KB_FILE_FORMAT_CANNOT_BE_EMPTY = "The knowledge base file format cannot be empty.";
+
+ public static final String KB_FILE_FORMAT_NOT_SUPPORT = "The knowledge base file format is not supported, format value must be :[ MMDB| CSV| TXT| AES]";
+
+ public static final String SCHEMA_FUNCTION_CONSTRAINT_ERROR ="The %s schema %s constraint functions has %s, does not exit in the list of supported aggregate functions.";
+
+ public static final String SCHEMA_FIELD_TYPE_NEED_CHECK = "The %s schema fields type need to be checked: %s.";
}
diff --git a/src/main/java/com/mesalab/network/common/Constants.java b/src/main/java/com/mesalab/qgw/constant/dsl/LiveChartConstants.java
index db7c632a..fd445c41 100644
--- a/src/main/java/com/mesalab/network/common/Constants.java
+++ b/src/main/java/com/mesalab/qgw/constant/dsl/LiveChartConstants.java
@@ -1,27 +1,27 @@
-package com.mesalab.network.common;
+package com.mesalab.qgw.constant.dsl;
-public class Constants {
+/**
+ * @author wangwei
+ */
+public class LiveChartConstants {
public static final String PROTOCOL_TREE_ROOT_NAME = "Protocols";
public static final String ENCAPSULATION_PATH_SEPARATOR = ".";
public static final String PROTOCOL_ETHERNET_NODE = "ETHERNET";
public static final String PROTOCOLS_PARENT_ID = "ROOT";
- public static final String PROTOCOL_TREE_METRIC_ENCAPSULATION_PATHS = "encapsulationPaths";
- public static final String PROTOCOL_TREE_METRIC_PROTOCOLS_AND_APPLICATIONS = "protocolsAndApplications";
+ public static final String PROTOCOL_TREE_METRIC_ENCAPSULATION_PATHS = "encapsulation_paths";
+ public static final String PROTOCOL_TREE_METRIC_PROTOCOLS_AND_APPLICATIONS = "protocols_and_applications";
public static final String PROTOCOL_TREE_METRIC_SESSIONS = "sessions";
public static final String PROTOCOL_TREE_METRIC_C2S_BYTES = "c2s_bytes";
public static final String PROTOCOL_TREE_METRIC_S2C_BYTES = "s2c_bytes";
public static final String PROTOCOL_TREE_METRIC_C2S_PKTS = "c2s_pkts";
public static final String PROTOCOL_TREE_METRIC_S2C_PKTS = "s2c_pkts";
- public static final String PROTOCOL_TREE_METRIC_SENT_PACKETS = "sentPackets";
- public static final String PROTOCOL_TREE_METRIC_RECEIVED_PACKETS = "receivedPackets";
- public static final String PROTOCOL_TREE_METRIC_TOTAL_SENT_PACKETS = "totalSentPackets";
- public static final String PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_PACKETS = "totalReceivedPackets";
- public static final String PROTOCOL_TREE_METRIC_TOTAL_SENT_BYTES = "totalSentBytes";
- public static final String PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_BYTES = "totalReceivedBytes";
+ public static final String PROTOCOL_TREE_METRIC_SENT_PACKETS = "sent_packets";
+ public static final String PROTOCOL_TREE_METRIC_RECEIVED_PACKETS = "received_packets";
+ public static final String PROTOCOL_TREE_METRIC_TOTAL_SENT_BYTES = "total_sent_bytes";
+ public static final String PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_BYTES = "total_received_bytes";
- public static final String NETWORK_OVERVIEW_METRIC_UNIQ_CLIENT_IP = "uniq_client_ip";
public static final String NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS = "total_sessions";
public static final String NETWORK_OVERVIEW_METRIC_TOTAL_BYTES = "total_bytes";
public static final String NETWORK_OVERVIEW_METRIC_TOTAL_PACKETS = "total_packets";
@@ -35,8 +35,6 @@ public class Constants {
public static final String NETWORK_OVERVIEW_METRIC_TCP_RETRANSMISSIONS_PERCENT = "tcp_retransmissions_percent";
public static final String NETWORK_OVERVIEW_METRIC_FRAGMENTATION_PACKETS = "fragmentation_packets";
public static final String NETWORK_OVERVIEW_METRIC_FRAGMENTATION_PERCENT = "fragmentation_percent";
-
-
public static final String NETWORK_OVERVIEW_METRIC_DATA_RATE = "data_rate";
}
diff --git a/src/main/java/com/mesalab/qgw/constant/dsl/TrafficSpectrumConstants.java b/src/main/java/com/mesalab/qgw/constant/dsl/TrafficSpectrumConstants.java
new file mode 100644
index 00000000..fe19d138
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/constant/dsl/TrafficSpectrumConstants.java
@@ -0,0 +1,46 @@
+package com.mesalab.qgw.constant.dsl;
+
+/**
+ * TODO
+ *
+ * @Classname TrafficSpectrumConstants
+ * @Date 2024/5/23 16:19
+ * @Author wWei
+ */
+public class TrafficSpectrumConstants {
+ public static final String SOURCE = "source";
+ public static final String TARGET = "target";
+ public static final String DIRECTION = "direction";
+ public static final String SESSIONS = "sessions";
+ public static final String BYTES = "bytes";
+ public static final String PACKETS = "packets";
+ public static final String UNKNOWN_APP_BYTES = "unknown_app_bytes";
+ public static final String UNCATEGORIZED_PERCENT = "uncategorized_percent";
+ public static final String ASYMMETRIC_FLOWS = "asymmetric_flows";
+ public static final String ASYMMETRIC_FLOWS_PERCENT = "asymmetric_flows_percent";
+ public static final String FRAGMENTATION_PACKETS = "fragmentation_packets";
+ public static final String FRAGMENTATION_PERCENT = "fragmentation_percent";
+ public static final String INTERNAL_UNIQ_CLIENT_IP = "internal_uniq_client_ip";
+ public static final String INTERNAL_UNIQ_SERVER_IP = "internal_uniq_server_ip";
+ public static final String EXTERNAL_UNIQ_CLIENT_IP = "external_uniq_client_ip";
+ public static final String EXTERNAL_UNIQ_SERVER_IP = "external_uniq_server_ip";
+ public static final String TOP_SERVER_IPS = "top_server_ips";
+ public static final String TOP_SERVER_DOMAINS = "top_server_domains";
+ public static final String MAX_RATE = "max_rate";
+ public static final String AVG_RATE = "avg_rate";
+ public static final String INTERNAL_NODE = "internal_node";
+ public static final String EXTERNAL_NODE = "external_node";
+ public static final String NODES = "nodes";
+ public static final String LINKS = "links";
+ public static final String OBJECT_TYPE = "object_type";
+ public static final String CATEGORY = "category";
+ public static final String NAME = "name";
+ public static final String ID = "id";
+ public static final String OTHER = "Other";
+ public static final String OBJECT_TYPE_APPLICATION = "application";
+ public static final String OBJECT_TYPE_IP = "ip";
+ public static final String PRIVATE_IP = "Private IP";
+ public static final String OUTBOUND = "Outbound";
+ public static final String INBOUND = "Inbound";
+ public static final String SPLIT_HYPHEN = "-";
+}
diff --git a/src/main/java/com/mesalab/qgw/controller/DatabaseController.java b/src/main/java/com/mesalab/qgw/controller/DatabaseController.java
new file mode 100644
index 00000000..66de5887
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/controller/DatabaseController.java
@@ -0,0 +1,153 @@
+package com.mesalab.qgw.controller;
+
+import cn.hutool.core.collection.CollectionUtil;
+import cn.hutool.core.util.NumberUtil;
+import com.alibaba.fastjson2.JSON;
+import com.clearspring.analytics.util.Lists;
+import com.google.common.collect.Sets;
+import com.jayway.jsonpath.JsonPath;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.entity.BaseResultGenerator;
+import com.mesalab.common.enums.MetadataType;
+import com.mesalab.qgw.constant.QGWMessageConst;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.AuditLog;
+import com.mesalab.qgw.model.job.StorageDeletionInfo;
+import com.mesalab.qgw.service.DatabaseService;
+import com.geedgenetworks.utils.StringUtil;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.web.bind.annotation.*;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+@Slf4j
+@RestController
+@RequestMapping(value = "v1/database")
+public class DatabaseController {
+
+ @Autowired
+ private DatabaseService databaseService;
+
+ private final static HashSet<String> VISIBILITY = Sets.newHashSet("enabled", "hidden", "disabled");
+
+
+ @RequestMapping(value = "/table/{table_name}/schema", consumes = "application/x-www-form-urlencoded")
+ @AuditLog("DatabaseController.getSchemaByTable")
+ public BaseResult getSchemaByTable(@PathVariable("table_name") String tableName) {
+ log.debug("SCHEMA信息获取,参数为{}", tableName);
+
+ if (StringUtil.isBlank(tableName)) {
+ return BaseResultGenerator.success4Message("ok");
+ }
+ return BaseResultGenerator.success(databaseService.getSchemaInfo(MetadataType.FIELDS.getValue(), tableName, true));
+ }
+
+ @PutMapping(value = "/table/{table_name}/schema", consumes = "application/json")
+ @AuditLog("DatabaseController.updateSchema")
+ public BaseResult updateSchema(@PathVariable("table_name") String tableName, @RequestBody Map<String, Object> obj) {
+ log.info("update Schema api, params is: {}-{}", tableName, obj);
+ if (StringUtil.isBlank(tableName) || StringUtil.isEmpty(obj)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.SCHEMA_PARAM_ERROR));
+ }
+ validationParam(tableName, obj);
+ return databaseService.updateSchema(tableName, obj);
+ }
+
+ @RequestMapping(value = "/{dbName}/table", consumes = "application/x-www-form-urlencoded")
+ @AuditLog("DatabaseController.getTableByDB")
+ public BaseResult getTableByDB(@PathVariable String dbName) {
+ log.debug("SCHEMA信息获取,参数为{}", dbName);
+
+ if (StringUtil.isBlank(dbName)) {
+
+ return BaseResultGenerator.success4Message("ok");
+ }
+ return BaseResultGenerator.success(databaseService.getSchemaInfo(MetadataType.TABLES.getValue(), dbName, true));
+ }
+
+ @GetMapping(value = "/storage/quota", consumes = "application/x-www-form-urlencoded")
+ @AuditLog("DatabaseController.storageQuota")
+ public BaseResult storageQuota() {
+ return databaseService.getStorageQuota();
+ }
+
+ @PutMapping(value = "/storage/quota", consumes = "application/json")
+ @AuditLog("DatabaseController.storageSetting")
+ public BaseResult storageSetting(@RequestBody List<StorageDeletionInfo> list) {
+ log.warn("数据配额设置, 参数: params is {}", list);
+ if (StringUtil.isEmpty(list)) {
+ return BaseResultGenerator.failure(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.PARAMETER_NOT_OBTAINED));
+ }
+ for (StorageDeletionInfo info : list) {
+ if (StringUtil.isBlank(info.getType()) || StringUtil.isBlank(String.valueOf(info.getMaxDays()))) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.LOGTYPE_AND_MAXDAYS_NOT_NULL));
+ }
+ if ("ALL".equalsIgnoreCase(info.getType()) && list.size() != 1) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.CANNOT_COEXIST_OTHER_TYPES));
+ }
+ if (!(info.getMaxDays() >= 0 && info.getMaxDays() <= 2000)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.MAXDAYS_ERROR));
+ }
+ }
+ return databaseService.deleteStorage(list);
+ }
+
+ @RequestMapping(value = "/storage/quota", method = RequestMethod.DELETE, consumes = "application/x-www-form-urlencoded")
+ @AuditLog("DatabaseController.storageDeletion")
+ public BaseResult storageDeletion(@RequestParam String type) {
+ List<StorageDeletionInfo> deletionInfoList = Lists.newArrayList();
+ StorageDeletionInfo deletionInfo = new StorageDeletionInfo();
+ deletionInfo.setType(type);
+ deletionInfo.setMaxDays(0);
+ deletionInfoList.add(deletionInfo);
+ return databaseService.deleteStorage(deletionInfoList);
+ }
+
+ @RequestMapping(value = "/storage/quota/daily_usage", consumes = "application/x-www-form-urlencoded")
+ @AuditLog("DatabaseController.dailyTrendOfStorage")
+ public BaseResult dailyTrendOfStorage(@RequestParam Map<String, Object> param) {
+ String searchStartTime = StringUtil.stripToEmpty((String) param.get("start_time"));
+ String searchEndTime = StringUtil.stripToEmpty((String) param.get("end_time"));
+ return databaseService.dailyTrendOfStorage(searchStartTime, searchEndTime);
+ }
+
+ private void validationParam(String name, Map<String, Object> obj) {
+ if (!databaseService.getAllTable().contains(name)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), name + QGWMessageConst.NOT_SUPPORT_SETTING_TTL));
+ }
+ String param = JSON.toJSONString(obj);
+ if (!VISIBILITY.containsAll(JsonPath.read(param, "$.fields[?(@.doc.visibility != null)].doc.visibility"))) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.SCHEMA_ERROR_VISIBILITY_VALUE));
+ }
+ List<Object> schemaDocTTL = JsonPath.read(param, "$.[?(@.doc.ttl != null)].doc.ttl");
+ if (StringUtil.isNotEmpty(schemaDocTTL.stream().filter(o -> !(NumberUtil.isLong(String.valueOf(o)))).toArray())) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.SCHEMA_ERROR_TTL_VALUE));
+ }
+ List<Object> fieldDocTTL = JsonPath.read(param, "$.fields[?(@.doc.ttl != null)].doc.ttl");
+ Object[] objects3 = fieldDocTTL.parallelStream().filter(o -> !NumberUtil.isLong(String.valueOf(o))).toArray();
+ if (StringUtil.isNotEmpty(objects3)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.SCHEMA_ERROR_TTL_VALUE));
+ }
+ if (schemaDocTTL.size() > 0 && fieldDocTTL.size() > 0
+ && CollectionUtil.max(schemaDocTTL.stream().map(o -> Long.parseLong(o.toString())).collect(Collectors.toList())) < CollectionUtil.max(fieldDocTTL.stream().map(o -> Long.parseLong(o.toString())).collect(Collectors.toList()))) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.SCHEMA_ERROR_TTL_SIZE));
+ }
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/controller/DatasetController.java b/src/main/java/com/mesalab/qgw/controller/DatasetController.java
new file mode 100644
index 00000000..7fc351ac
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/controller/DatasetController.java
@@ -0,0 +1,62 @@
+package com.mesalab.qgw.controller;
+
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.http.HttpStatus;
+import com.google.common.collect.Lists;
+import com.mesalab.common.entity.BaseResult;
+
+import com.mesalab.common.entity.BaseResultGenerator;
+import com.mesalab.common.enums.DBEngineType;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.qgw.constant.QGWMessageConst;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.AuditLog;
+import com.mesalab.qgw.service.DatasetService;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.web.bind.annotation.*;
+
+import java.util.*;
+
+@Slf4j
+@RestController
+@RequestMapping(value = "/v1/dataset")
+public class DatasetController {
+
+ @Autowired
+ private DatasetService datasetService;
+
+ @GetMapping(value = "/{identifier_name}")
+ @AuditLog("DatasetController.getDataset")
+ public BaseResult getDataset(@PathVariable("identifier_name") String identifierName) {
+ log.info("Get Dataset, ID is: {}", identifierName);
+ Map<String, Object> dataset = datasetService.getDataset(identifierName);
+ return BaseResultGenerator.success(Lists.newArrayList(dataset));
+ }
+
+ @GetMapping
+ @AuditLog("DatasetController.getDatasets")
+ public BaseResult getDatasets(@RequestParam(value = "identifier_names", required = false) String datasetIds, @RequestParam(required = false) String category, @RequestParam(value = "backend_engine", required = false) String backendEngine) {
+ if (StrUtil.isNotBlank(backendEngine)) {
+ if (Arrays.stream(DBEngineType.values()).noneMatch(o -> o.getValue().equalsIgnoreCase(backendEngine))) {
+ throw new QGWBusinessException(HttpStatus.HTTP_BAD_REQUEST, CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.DATASET_BACKEND_ENGINE_ERROR));
+ }
+ }
+ log.info("Get Datasets. IDs is: {}, category is: {}, backendEngine is: {}", datasetIds, category, backendEngine);
+ return BaseResultGenerator.success(datasetService.getDatasets(Arrays.asList(StrUtil.split(datasetIds, ",")), category, backendEngine));
+ }
+
+ @GetMapping(value = "/{identifier_name}/result_preview")
+ @AuditLog("DatasetController.getPreview")
+ public BaseResult getPreview(@PathVariable("identifier_name") String identifierName) {
+ return datasetService.getPreview(identifierName);
+ }
+
+ @GetMapping(value = "/global_variable")
+ @AuditLog("DatasetController.getVariable")
+ public BaseResult getVariable() {
+ return BaseResultGenerator.success(datasetService.getVariable());
+ }
+} \ No newline at end of file
diff --git a/src/main/java/com/mesalab/qgw/controller/DiagnosisController.java b/src/main/java/com/mesalab/qgw/controller/DiagnosisController.java
index 896a0287..b6028ed1 100644
--- a/src/main/java/com/mesalab/qgw/controller/DiagnosisController.java
+++ b/src/main/java/com/mesalab/qgw/controller/DiagnosisController.java
@@ -1,22 +1,15 @@
package com.mesalab.qgw.controller;
-import cn.hutool.core.util.StrUtil;
import cn.hutool.http.HttpStatus;
import com.alibaba.fastjson.JSONObject;
-import com.fasterxml.jackson.annotation.JsonProperty;
import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.enums.DBTypeEnum;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.utils.StringUtil;
+import com.mesalab.common.enums.DBEngineType;
import com.mesalab.qgw.constant.QGWMessageConst;
-import com.mesalab.common.enums.DiagnosisOptionEnum;
-import com.mesalab.common.exception.BusinessException;
import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.AuditLog;
import com.mesalab.qgw.service.DiagnosisService;
-import com.mesalab.services.common.enums.EntityQueryType;
import lombok.extern.slf4j.Slf4j;
-import net.sf.jsqlparser.expression.StringValue;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
@@ -34,51 +27,26 @@ public class DiagnosisController {
private DiagnosisService diagnosisService;
@RequestMapping("/component_info")
+ @AuditLog("DiagnosisController.versionInfo")
public JSONObject versionInfo() {
return diagnosisService.getVersionInfo();
}
- @RequestMapping(value = "ttl_consistency_check", method = RequestMethod.POST)
- public BaseResult monitorCluster() {
- log.info("consistency check interface.");
- return diagnosisService.consistencyCheck();
- }
@RequestMapping(value = "/schema_check")
+ @AuditLog("DiagnosisController.validateSchema")
public BaseResult validateSchema() {
return diagnosisService.validateSchema();
}
- @RequestMapping(value = "/metadata_check")
- public BaseResult validateMetadata() {
- return diagnosisService.validateMetadata();
- }
-
- @RequestMapping(value = "/sql_benchmark")
- public BaseResult executePocSql(@RequestParam(value = "is_saved",required = false) @JsonProperty("isSaved") String isSaved, @RequestParam(defaultValue = "validation") String option, @RequestParam(required = false) String category) throws BusinessException {
- if (Arrays.stream(DiagnosisOptionEnum.values()).noneMatch(o -> o.getValue().equalsIgnoreCase(option))) {
- throw new QGWBusinessException(HttpStatus.HTTP_BAD_REQUEST, ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.DIAGNOSIS_OPTION_ERROR));
- }
- return diagnosisService.runPocSQL(Boolean.parseBoolean(isSaved), option, category);
- }
-
@RequestMapping({"/sql", "/sql/{dialect}/{queryNo}"})
+ @AuditLog("DiagnosisController.getPocSql")
public BaseResult getPocSql(@PathVariable String dialect, @PathVariable(required = false) Integer queryNo) {
- if (Arrays.stream(DBTypeEnum.values()).noneMatch(o -> o.getValue().equalsIgnoreCase(dialect))){
- throw new QGWBusinessException(HttpStatus.HTTP_BAD_REQUEST, ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.DIALECT_VALUE_ERROR));
+ if (Arrays.stream(DBEngineType.values()).noneMatch(o -> o.getValue().equalsIgnoreCase(dialect))){
+ throw new QGWBusinessException(HttpStatus.HTTP_BAD_REQUEST, CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.DIALECT_VALUE_ERROR));
}
return diagnosisService.getPocSQL(dialect, queryNo);
}
- @RequestMapping(value = "/entity")
- public BaseResult getMessageInfo(String option) {
- log.info("Entity recommendation proportion statistical information interface, param is: {}", option);
- if (!EntityQueryType.TOPSERVERIP.getType().equalsIgnoreCase(option) && !EntityQueryType.TOPSNI.getType().equalsIgnoreCase(option)) {
- throw new QGWBusinessException(HttpStatus.HTTP_BAD_REQUEST, ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.DIAGNOSIS_OPTION_ERROR));
- }
- return diagnosisService.getMessageInfo(option);
- }
}
diff --git a/src/main/java/com/mesalab/qgw/controller/DslController.java b/src/main/java/com/mesalab/qgw/controller/DslController.java
deleted file mode 100644
index 094c1875..00000000
--- a/src/main/java/com/mesalab/qgw/controller/DslController.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package com.mesalab.qgw.controller;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.common.exception.BusinessException;
-import com.mesalab.qgw.constant.DslIdentifierNameConst;
-import com.mesalab.qgw.model.basic.DSLProfile;
-import com.mesalab.qgw.service.DslService;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.web.bind.annotation.*;
-
-
-/**
- * TODO
- *
- * @Classname DslController
- * @Date 2023/11/25 17:39
- * @Author wWei
- */
-@RestController
-@RequestMapping(value = "/dsl")
-public class DslController {
-
- private static final Log log = LogFactory.get();
-
- @Autowired
- DslService dslService;
-
- @PostMapping(consumes = "application/json")
- public BaseResult post(@RequestBody DSLProfile dslProfile) {
- log.info("HTTP REST DSL, params is: {}", dslProfile);
- if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_SUMMARY.equals(dslProfile.getName())) {
- return dslService.appAndProtocolSummary(dslProfile);
- } else if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_TREE_COMPOSITION.equals(dslProfile.getName())) {
- return dslService.appAndProtocolTreeComposition(dslProfile);
- } else if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_TREE_THROUGHPUT.equals(dslProfile.getName())) {
- return dslService.applicationAndProtocolTreeThroughput(dslProfile);
- } else if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_TOP_APPS.equals(dslProfile.getName())) {
- return dslService.applicationAndProtocolTopApp(dslProfile);
- } else if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_APP_RELATED_INTERNAL_IPS.equals(dslProfile.getName())) {
- return dslService.applicationAndProtocolAppRelatedInternalIps(dslProfile);
- } else if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_APP_THROUGHPUT.equals(dslProfile.getName())) {
- return dslService.applicationAndProtocolAppThroughput(dslProfile);
- } else if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_APP_SUMMARY.equals(dslProfile.getName())) {
- return dslService.applicationAndProtocolAppSummary(dslProfile);
- } else if (DslIdentifierNameConst.REAL_TIME_DATA_ANALYTICS_SUBSCRIBER_ID_RELATE_IP.equals(dslProfile.getName())) {
- return dslService.realTimeDataAnalyticsSubscriberIdRelateIp(dslProfile);
- } else if (DslIdentifierNameConst.REAL_TIME_DATA_ANALYTICS_MOBILE_IDENTITY_RELATE_TEID.equals(dslProfile.getName())) {
- return dslService.realTimeDataAnalyticsMobileIdentityRelateTeid(dslProfile);
- }
- throw new BusinessException(ResultStatusEnum.NOT_FOUND.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), dslProfile.getName()));
- }
-} \ No newline at end of file
diff --git a/src/main/java/com/mesalab/qgw/controller/HosController.java b/src/main/java/com/mesalab/qgw/controller/HosController.java
new file mode 100644
index 00000000..34f5c302
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/controller/HosController.java
@@ -0,0 +1,84 @@
+package com.mesalab.qgw.controller;
+
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.exception.BusinessException;
+import com.mesalab.qgw.constant.QGWMessageConst;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.AuditLog;
+import com.mesalab.qgw.service.HosService;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.web.bind.annotation.*;
+import org.springframework.web.multipart.MultipartFile;
+
+import javax.servlet.http.HttpServletRequest;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.*;
+import java.util.stream.Collectors;
+
+@RestController
+@RequestMapping(value = "/v1/hos")
+public class HosController {
+ private static final Log log = LogFactory.get();
+ private static final String X_HOS_META_PREFIX = "x-hos-meta-";
+
+ private HosService hosService;
+
+
+ @GetMapping(value = "/{bucket_name}/{file_name}", consumes = "application/x-www-form-urlencoded")
+ @AuditLog("HosController.getFile")
+ public BaseResult getFile(@PathVariable("bucket_name") String bucketName, @PathVariable("file_name") String fileName) {
+ return hosService.getFile(bucketName, fileName);
+ }
+
+ @GetMapping(value = "/{bucket_name}", consumes = "application/x-www-form-urlencoded")
+ @AuditLog("HosController.getFileList")
+ public BaseResult getFileList(@PathVariable("bucket_name") String bucketName, @RequestParam(required = false) String prefix, HttpServletRequest request) {
+ if (StrUtil.isNotBlank(bucketName)) {
+ Map<String, String> udfMetaParam = new HashMap<>();
+ Enumeration<String> parameterNames = request.getParameterNames();
+ while (parameterNames.hasMoreElements()) {
+ String paramName = parameterNames.nextElement();
+ if (paramName.startsWith(X_HOS_META_PREFIX)) {
+ String paramValue = request.getParameter(paramName);
+ udfMetaParam.put(paramName, paramValue);
+ }
+ }
+ log.info("Get File List interface,param:bucket_name is {},prefix is {},x-hos-meta-* is {}", bucketName, prefix, udfMetaParam.toString());
+ return hosService.getFileList(bucketName, prefix, udfMetaParam);
+ } else {
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.BUCKET_NAME_ERROR));
+ }
+ }
+
+ @PostMapping(value = "/{bucket_name}/{file_name}", consumes = "multipart/form-data")
+ @AuditLog("HosController.uploadFile")
+ public BaseResult uploadFile(@PathVariable("bucket_name") String bucketName, @PathVariable("file_name") String fileName, @RequestParam("file") MultipartFile file) {
+ ByteArrayInputStream byteArrayInputStream = null;
+ try {
+ byteArrayInputStream = new ByteArrayInputStream(file.getBytes());
+ } catch (IOException e) {
+ throw new BusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), e.getMessage()));
+ }
+ return hosService.uploadFile(bucketName, fileName, byteArrayInputStream, null);
+ }
+
+ @DeleteMapping(value = "/{bucket_name}", consumes = "application/x-www-form-urlencoded")
+ @AuditLog("HosController.deleteFileList")
+ public BaseResult deleteFileList(@PathVariable("bucket_name") String bucketName, @RequestParam("file_names") String fileNames) {
+ return hosService.deleteFileList(bucketName, Arrays.stream(fileNames.split(",")).distinct().collect(Collectors.toList()));
+ }
+
+
+ @Autowired
+ public void setHosService(HosService hosService) {
+ this.hosService = hosService;
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/controller/MetadataController.java b/src/main/java/com/mesalab/qgw/controller/MetadataController.java
deleted file mode 100644
index e1cc8c10..00000000
--- a/src/main/java/com/mesalab/qgw/controller/MetadataController.java
+++ /dev/null
@@ -1,95 +0,0 @@
-package com.mesalab.qgw.controller;
-
-import cn.hutool.core.collection.CollectionUtil;
-import cn.hutool.core.util.NumberUtil;
-import com.alibaba.fastjson2.JSON;
-import com.jayway.jsonpath.JsonPath;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.MetadataTypeEnum;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.qgw.constant.QGWMessageConst;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.service.MetadataService;
-import com.geedgenetworks.utils.StringUtil;
-import lombok.extern.slf4j.Slf4j;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.web.bind.annotation.*;
-
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
-
-@Slf4j
-@RestController
-@RequestMapping(value = "v1/metadata")
-public class MetadataController {
-
- @Autowired
- private MetadataService metadataService;
-
- private final static List<String> VISIBILITY = Arrays.asList("enabled", "hidden", "disabled");
-
-
- @RequestMapping("/schema/{table_name}")
- public BaseResult getSchemaByTable(@PathVariable("table_name") String tableName) {
- log.debug("SCHEMA信息获取,参数为{} {}", tableName);
-
- if (StringUtil.isBlank(tableName)) {
- return BaseResultGenerator.success4Message("ok");
- }
- return BaseResultGenerator.success(metadataService.getSchemaInfo(MetadataTypeEnum.FIELDS.getValue(), tableName, true));
- }
-
- @PutMapping("/schema/{table_name}")
- public BaseResult updateSchema(@PathVariable("table_name") String tableName, @RequestBody Map<String, Object> obj) {
- log.info("update Schema api, params is: {}-{}", tableName, obj);
- if (StringUtil.isBlank(tableName) || StringUtil.isEmpty(obj)) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.SCHEMA_PARAM_ERROR));
- }
- validationParam(tableName, obj);
- return metadataService.updateSchema(tableName, obj);
- }
-
- @RequestMapping("/{database}/table")
- public BaseResult getTableByDB(@PathVariable String database) {
- log.debug("SCHEMA信息获取,参数为{} {}",database);
-
- if (StringUtil.isBlank(database)) {
-
- return BaseResultGenerator.success4Message("ok");
- }
- return BaseResultGenerator.success(metadataService.getSchemaInfo(MetadataTypeEnum.TABLES.getValue(), database, true));
- }
-
- private void validationParam(String name, Map<String, Object> obj) {
- if (!metadataService.getAllTable().contains(name)) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), name + QGWMessageConst.NOT_SUPPORT_SETTING_TTL));
- }
- String param = JSON.toJSONString(obj);
- if (!VISIBILITY.containsAll(JsonPath.read(param, "$.fields[?(@.doc.visibility != null)].doc.visibility"))) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.SCHEMA_ERROR_VISIBILITY_VALUE));
- }
- List<Object> schemaDocTTL = JsonPath.read(param, "$.[?(@.doc.ttl != null)].doc.ttl");
- if (StringUtil.isNotEmpty(schemaDocTTL.stream().filter(o -> !(NumberUtil.isLong(String.valueOf(o)))).toArray())) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.SCHEMA_ERROR_TTL_VALUE));
- }
- List<Object> fieldDocTTL = JsonPath.read(param, "$.fields[?(@.doc.ttl != null)].doc.ttl");
- Object[] objects3 = fieldDocTTL.parallelStream().filter(o -> !NumberUtil.isLong(String.valueOf(o))).toArray();
- if (StringUtil.isNotEmpty(objects3)) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.SCHEMA_ERROR_TTL_VALUE));
- }
- if (schemaDocTTL.size() > 0 && fieldDocTTL.size() > 0
- && CollectionUtil.max(schemaDocTTL.stream().map(o -> Long.parseLong(o.toString())).collect(Collectors.toList())) < CollectionUtil.max(fieldDocTTL.stream().map(o -> Long.parseLong(o.toString())).collect(Collectors.toList()))) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.SCHEMA_ERROR_TTL_SIZE));
- }
- }
-}
diff --git a/src/main/java/com/mesalab/qgw/controller/QueryController.java b/src/main/java/com/mesalab/qgw/controller/QueryController.java
index 689ee8b9..be534094 100644
--- a/src/main/java/com/mesalab/qgw/controller/QueryController.java
+++ b/src/main/java/com/mesalab/qgw/controller/QueryController.java
@@ -1,44 +1,266 @@
package com.mesalab.qgw.controller;
+import cn.hutool.core.util.BooleanUtil;
import com.alibaba.fastjson2.JSON;
+import com.geedgenetworks.utils.StringUtil;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.qgw.model.basic.QueryProfile;
-import com.mesalab.qgw.model.basic.AuditLog;
-import com.mesalab.qgw.model.basic.CachedSubmitCheck;
-import com.mesalab.qgw.service.QueryService;
+import com.mesalab.common.enums.ExecutionMode;
+import com.mesalab.common.enums.OutputMode;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.*;
+import com.mesalab.qgw.service.DeferredResultHolder;
+import com.mesalab.qgw.service.QueryJobService;
+import com.mesalab.qgw.service.SQLSyncQueryService;
+import com.mesalab.services.configuration.JobConfig;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
+import org.springframework.web.bind.annotation.RequestParam;
+import org.springframework.web.context.request.async.DeferredResult;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
@Slf4j
@RestController
-@RequestMapping(value = "/sql")
public class QueryController {
- @Autowired
- private QueryService queryService;
+ private SQLSyncQueryService sqlSyncQueryService;
+ private QueryJobService queryJobService;
+ private JobConfig jobConfig;
+ private DeferredResultHolder deferredResultHolder;
- @GetMapping
+ @GetMapping("/sql")
@AuditLog("QueryController.get")
- @CachedSubmitCheck
- public BaseResult get(QueryProfile queryProfile) {
- if (queryProfile.queryParamIsNull())
+ public BaseResult get(SQLQueryContext queryContext) {
+ if (queryContext.originalSQLIsNull())
return BaseResultGenerator.success4Message("ok");
- log.debug("Quest Params:{}", JSON.toJSONString(queryProfile));
- return queryService.executeQuery(queryProfile);
+ log.debug("Quest Params:{}", JSON.toJSONString(queryContext));
+ return sqlSyncQueryService.executeQuery(queryContext);
}
- @PostMapping(produces = "application/json;charset=utf-8")
+ @PostMapping(value = "/sql", produces = "application/json;charset=utf-8")
@AuditLog("QueryController.post")
- @CachedSubmitCheck
- public BaseResult post(@RequestBody QueryProfile queryProfile) {
- if (queryProfile.queryParamIsNull()) {
+ public BaseResult post(@RequestBody SQLQueryContext queryContext) {
+ if (queryContext.originalSQLIsNull()) {
return BaseResultGenerator.success4Message("ok");
}
- log.debug("Quest Params:{}", JSON.toJSONString(queryProfile));
- return queryService.executeQuery(queryProfile);
+ log.debug("Quest Params:{}", JSON.toJSONString(queryContext));
+ return sqlSyncQueryService.executeQuery(queryContext);
+
+ }
+
+ @PostMapping(value = "/v1/query/sql", consumes = "application/json", produces = "application/json")
+ @AuditLog("QueryController.createSQLQuery")
+ public BaseResult createSQLQuery(@RequestBody SqlQueryRequestParam sqlQueryRequestParam) {
+ log.info("Create an SQL Query {}", JSON.toJSONString(sqlQueryRequestParam));
+ if (sqlQueryRequestParam.isSavedQuery()) {
+ if (sqlQueryRequestParam.isDryRun() || !sqlQueryRequestParam.getExecutionMode().equals(ExecutionMode.NORMAL) || !sqlQueryRequestParam.getOutputMode().equals(OutputMode.JSON)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getMessage(),
+ "for the Saved Query Job, is_dry_run must be 0(default), execution_mode must be normal(default), and output_mode must be JSON(default)"));
+ }
+ return queryJobService.createSQLSavedQuery(sqlQueryRequestParam);
+ } else {
+ return queryJobService.createSQLAdHocQuery(sqlQueryRequestParam);
+ }
+ }
+
+ @PostMapping(value = "/v1/query/dsl", consumes = "application/json", produces = "application/json")
+ @AuditLog("QueryController.createDSLQuery")
+ public BaseResult createDSLQuery(@RequestBody DSLQueryRequestParam dslQueryRequestParam) {
+ log.info("Create a DSL query {}", JSON.toJSONString(dslQueryRequestParam));
+ if (dslQueryRequestParam.isSavedQuery()) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getMessage(), "Currently, the DSL Saved Query is not supported."));
+ } else {
+ if (!dslQueryRequestParam.getOutputMode().equals(OutputMode.JSON)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getMessage(), "Currently, the DSL Query not support output_mode must be JSON(default)"));
+ }
+ return queryJobService.createDSLAdHocQuery(dslQueryRequestParam);
+ }
+ }
+
+ @GetMapping(value = "/v1/query/job/{id}/result", consumes = "application/x-www-form-urlencoded")
+ @AuditLog("QueryController.getJobResultById")
+ public DeferredResult<BaseResult> getJobResultById(@PathVariable String id, @RequestParam(value = "is_saved_query", required = false, defaultValue = "0") Integer isSavedQuery) {
+ BaseResult baseResult;
+ if (BooleanUtil.toBoolean(String.valueOf(isSavedQuery))) {
+ baseResult = queryJobService.getSavedQueryResultById(id);
+ } else {
+ baseResult = queryJobService.getAdHocQueryResultById(id);
+ }
+ DeferredResult<BaseResult> deferredResult =
+ deferredResultHolder.newDeferredResult(id, jobConfig.getResponseTimeout(), baseResult);
+ if (!jobConfig.isLongPollingEnabled()) {
+ deferredResultHolder.handleDeferredData(deferredResult, baseResult);
+ return deferredResult;
+ }
+ deferredResult.onTimeout(() -> {
+ deferredResultHolder.handleDeferredData(deferredResult, BooleanUtil.toBoolean(String.valueOf(isSavedQuery)) ? queryJobService.getSavedQueryResultById(id) : queryJobService.getAdHocQueryResultById(id));
+ });
+
+ if (StringUtil.isNotEmpty(baseResult)
+ && StringUtil.isNotEmpty(baseResult.getJob())
+ && Boolean.valueOf(String.valueOf(baseResult.getJob().get(JobConfig.IS_DONE)))) {
+ deferredResultHolder.handleDeferredData(deferredResult, baseResult);
+ }
+ return deferredResult;
+ }
+
+ @GetMapping(value = "/v1/query/job/{id}", consumes = "application/x-www-form-urlencoded")
+ @AuditLog("QueryController.getJobStatusById")
+ public DeferredResult<BaseResult> getJobStatusById(@PathVariable String id, @RequestParam(value = "is_saved_query", required = false, defaultValue = "0") Integer isSavedQuery) {
+ BaseResult baseResult;
+ if (BooleanUtil.toBoolean(String.valueOf(isSavedQuery))) {
+ baseResult = queryJobService.getSavedQueryStatusById(id);
+ } else {
+ baseResult = queryJobService.getAdHocQueryStatusById(id);
+ }
+ DeferredResult<BaseResult> deferredResult =
+ deferredResultHolder.newDeferredResult(id, jobConfig.getResponseTimeout(), baseResult);
+ if (!jobConfig.isLongPollingEnabled()) {
+ deferredResultHolder.handleDeferredData(deferredResult, baseResult);
+ return deferredResult;
+ }
+ deferredResult.onTimeout(() -> {
+ deferredResultHolder.handleDeferredData(deferredResult, BooleanUtil.toBoolean(String.valueOf(isSavedQuery)) ? queryJobService.getSavedQueryStatusById(id) : queryJobService.getAdHocQueryStatusById(id));
+ });
+ if (StringUtil.isNotEmpty(baseResult)
+ && StringUtil.isNotEmpty(baseResult.getJob())
+ && Boolean.valueOf(String.valueOf(baseResult.getJob().get(JobConfig.IS_DONE)))) {
+ deferredResultHolder.handleDeferredData(deferredResult, baseResult);
+ }
+ return deferredResult;
+ }
+
+ @GetMapping(value = "/v1/query/job/result", consumes = "application/x-www-form-urlencoded")
+ @AuditLog("QueryController.getJobResult")
+ public DeferredResult<BaseResult> getJobResult(@RequestParam(value = "job_ids") String jobIds
+ , @RequestParam(value = "is_saved_query", required = false, defaultValue = "0") Integer isSavedQueryInt) {
+ boolean isSavedQuery = BooleanUtil.toBoolean(String.valueOf(isSavedQueryInt));
+ List<String> ids = Arrays.stream(jobIds.split(",")).distinct().collect(Collectors.toList());
+ BaseResult baseResult;
+ if (isSavedQuery) {
+ baseResult = queryJobService.getSavedQueryResult(ids);
+ } else {
+ baseResult = queryJobService.getAdHocQueryResult(ids);
+ }
+
+ DeferredResult<BaseResult> deferredResult =
+ deferredResultHolder.newDeferredResult(String.valueOf(ids), jobConfig.getResponseTimeout(), baseResult);
+ if (!jobConfig.isLongPollingEnabled()) {
+ deferredResultHolder.handleDeferredData(deferredResult, baseResult);
+ return deferredResult;
+ }
+ deferredResult.onTimeout(() -> {
+ deferredResultHolder.handleDeferredData(deferredResult, BooleanUtil.toBoolean(String.valueOf(isSavedQuery)) ? queryJobService.getSavedQueryResult(ids) : queryJobService.getAdHocQueryResult(ids));
+ });
+
+ if (jobsIsDoneOfResult(baseResult)) {
+ deferredResultHolder.handleDeferredData(deferredResult, baseResult);
+ }
+ return deferredResult;
+ }
+
+ @GetMapping(value = "/v1/query/job", consumes = "application/x-www-form-urlencoded")
+ @AuditLog("QueryController.getJobStatus")
+ public DeferredResult<BaseResult> getJobStatus(@RequestParam(value = "job_ids") String jobIds
+ , @RequestParam(value = "is_saved_query", required = false, defaultValue = "0") Integer isSavedQueryInt) {
+ boolean isSavedQuery = BooleanUtil.toBoolean(String.valueOf(isSavedQueryInt));
+ List<String> ids = Arrays.stream(jobIds.split(",")).distinct().collect(Collectors.toList());
+ BaseResult baseResult;
+ if (isSavedQuery) {
+ baseResult = queryJobService.getSavedQueryStatus(ids);
+ } else {
+ baseResult = queryJobService.getAdHocQueryStatus(ids);
+ }
+
+ DeferredResult<BaseResult> deferredResult =
+ deferredResultHolder.newDeferredResult(String.valueOf(ids), jobConfig.getResponseTimeout(), baseResult);
+ if (!jobConfig.isLongPollingEnabled()) {
+ deferredResultHolder.handleDeferredData(deferredResult, baseResult);
+ return deferredResult;
+ }
+ deferredResult.onTimeout(() -> {
+ deferredResultHolder.handleDeferredData(deferredResult, BooleanUtil.toBoolean(String.valueOf(isSavedQuery)) ? queryJobService.getSavedQueryStatus(ids) : queryJobService.getAdHocQueryStatus(ids));
+ });
+
+ if (jobsIsDoneOfStatus(baseResult)) {
+ deferredResultHolder.handleDeferredData(deferredResult, baseResult);
+ }
+ return deferredResult;
+ }
+
+ @DeleteMapping(value = "/v1/query/job/{id}", consumes = "application/x-www-form-urlencoded")
+ @AuditLog("QueryController.deleteJobById")
+ public BaseResult deleteJobById(@PathVariable String id, @RequestParam(value = "is_saved_query", required = false, defaultValue = "0") Integer isSavedQueryInt) {
+ boolean isSavedQuery = BooleanUtil.toBoolean(String.valueOf(isSavedQueryInt));
+ if (isSavedQuery) {
+ return queryJobService.deleteSavedQueryById(id);
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getMessage(), "The AdHoc Query Job does not yet support deletion."));
+ }
+
+ private boolean jobsIsDoneOfResult(BaseResult baseResult) {
+ if (StringUtil.isEmpty(baseResult)) {
+ return false;
+ }
+ if (StringUtil.isEmpty(baseResult.getData())) {
+ return false;
+ }
+ List<Map<String, Object>> data = (List<Map<String, Object>>) baseResult.getData();
+ for (Map<String, Object> datum : data) {
+ Map<String, Object> job = (Map<String, Object>) datum.get(JobConfig.JOB);
+ if (!BooleanUtil.toBoolean(String.valueOf(job.get(JobConfig.IS_DONE)))) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private boolean jobsIsDoneOfStatus(BaseResult baseResult) {
+ if (StringUtil.isEmpty(baseResult)) {
+ return false;
+ }
+ if (StringUtil.isEmpty(baseResult.getData())) {
+ return false;
+ }
+ List<Map<String, Object>> data = (List<Map<String, Object>>) baseResult.getData();
+ for (Map<String, Object> datum : data) {
+ if (!BooleanUtil.toBoolean(String.valueOf(datum.get(JobConfig.IS_DONE)))) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Autowired
+ public void setQueryJobService(QueryJobService queryJobService) {
+ this.queryJobService = queryJobService;
+ }
+
+ @Autowired
+ public void setSqlSyncQueryService(SQLSyncQueryService sqlSyncQueryService) {
+ this.sqlSyncQueryService = sqlSyncQueryService;
+ }
+
+ @Autowired
+ public void setJobConfig(JobConfig jobConfig) {
+ this.jobConfig = jobConfig;
+ }
+
+ @Autowired
+ public void setDeferredResultHolder(DeferredResultHolder deferredResultHolder) {
+ this.deferredResultHolder = deferredResultHolder;
}
}
diff --git a/src/main/java/com/mesalab/qgw/controller/SystemController.java b/src/main/java/com/mesalab/qgw/controller/SystemController.java
index 23fa01e2..db8d78fa 100644
--- a/src/main/java/com/mesalab/qgw/controller/SystemController.java
+++ b/src/main/java/com/mesalab/qgw/controller/SystemController.java
@@ -2,93 +2,19 @@ package com.mesalab.qgw.controller;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
-import com.clearspring.analytics.util.Lists;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.common.exception.BusinessException;
-import com.mesalab.qgw.constant.QGWMessageConst;
-import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
-import com.mesalab.qgw.model.job.EncryptionInfo;
-import com.mesalab.qgw.model.job.StorageDeletionInfo;
-import com.mesalab.qgw.service.SystemService;
import com.mesalab.services.configuration.ThreadPoolMonitor;
-import com.geedgenetworks.utils.StringUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
-import java.util.List;
-import java.util.Map;
-
@RestController
@RequestMapping(value = "/v1/admin")
public class SystemController {
private static final Log log = LogFactory.get();
@Autowired
- private SystemService systemService;
- @Autowired
ThreadPoolMonitor threadPoolMonitor;
-
- @GetMapping ("setting/storage/quota")
- public BaseResult storageQuota() {
- return systemService.getStorageQuota();
- }
-
- @PutMapping(value = "setting/storage/quota")
- public BaseResult storageSetting(@RequestBody List<StorageDeletionInfo> list) {
- log.warn("数据配额设置, 参数: params is {}", list);
- if (StringUtil.isEmpty(list)) {
- return BaseResultGenerator.failure(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),QGWMessageConst.PARAMETER_NOT_OBTAINED));
- }
- for (StorageDeletionInfo info : list) {
- if (StringUtil.isBlank(info.getType()) || StringUtil.isBlank(String.valueOf(info.getMaxDays()))) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),QGWMessageConst.LOGTYPE_AND_MAXDAYS_NOT_NULL));
- }
- if ("ALL".equalsIgnoreCase(info.getType()) && list.size() != 1) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),QGWMessageConst.CANNOT_COEXIST_OTHER_TYPES));
- }
- if (!(info.getMaxDays() >= 0 && info.getMaxDays() <= 2000)) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),QGWMessageConst.MAXDAYS_ERROR));
- }
- }
- return systemService.deleteStorage(list);
- }
-
- @RequestMapping(value = "setting/storage/quota",method = RequestMethod.DELETE)
- public BaseResult storageDeletion(@RequestParam String type){
- List<StorageDeletionInfo> deletionInfoList = Lists.newArrayList();
- StorageDeletionInfo deletionInfo = new StorageDeletionInfo();
- deletionInfo.setType(type);
- deletionInfo.setMaxDays(0);
- deletionInfoList.add(deletionInfo);
- return systemService.deleteStorage(deletionInfoList);
- }
-
- @RequestMapping("setting/storage/quota/daily_trend")
- public BaseResult dailyTrendOfStorage(@RequestParam Map<String, Object> param) {
- String searchStartTime = StringUtil.stripToEmpty((String) param.get("start_time"));
- String searchEndTime = StringUtil.stripToEmpty((String) param.get("end_time"));
- return systemService.dailyTrendOfStorage(searchStartTime, searchEndTime);
- }
-
-
- @PostMapping(value = "setting/tool/password_encryption")
- public BaseResult getCiphertext(EncryptionInfo param) {
- log.info("Plaintext encrypted,The plaintext argument is : {}", param);
- if (StringUtil.isEmpty(param.getPassword()) || StringUtil.isEmpty(param.getSalt())) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),QGWMessageConst.PASSWORD_OR_SALT_CANNOT_BE_EMPTY));
- }
- return systemService.getCiphertext(param);
- }
-
@GetMapping(value = "/clear/realJobQueue")
public BaseResult clearTaskQueue() {
log.info("Clear real job queue.");
diff --git a/src/main/java/com/mesalab/qgw/controller/TestController.java b/src/main/java/com/mesalab/qgw/controller/TestController.java
index fc2e84f2..4ce602c5 100644
--- a/src/main/java/com/mesalab/qgw/controller/TestController.java
+++ b/src/main/java/com/mesalab/qgw/controller/TestController.java
@@ -29,7 +29,7 @@ import java.util.List;
public class TestController {
@Autowired
- private DiagnosisService diagnosisService;
+ TroubleshootingController troubleshootingController;
@Autowired
private ProjectInfo projectProperties;
@Autowired
@@ -56,6 +56,7 @@ public class TestController {
* @return BaseResult
*/
@GetMapping(value = "projectProperties")
+ @AuditLog("TestController.projectProperties")
public BaseResult projectProperties() {
log.info("Project Properties: {}", projectProperties);
return BaseResultGenerator.success("ok", projectProperties);
@@ -67,6 +68,7 @@ public class TestController {
* @return 返回当前项目的环境值
*/
@GetMapping(value = "activeProfiles")
+ @AuditLog("TestController.activeProfiles")
public BaseResult activeProfiles() {
List<String> activeProfiles = Lists.newArrayList(nacosConfig.getNamespace());
log.info("Current Namespace: {}", activeProfiles);
@@ -79,6 +81,7 @@ public class TestController {
* @return 返回当前项目的运行环境
*/
@GetMapping(value = "env")
+ @AuditLog("TestController.env")
public BaseResult env() {
return BaseResultGenerator.success(Lists.newArrayList(nacosConfig.getNamespace()));
}
@@ -89,41 +92,18 @@ public class TestController {
* @return 检查授权
*/
@GetMapping(value = "checkAuthc")
+ @AuditLog("TestController.checkAuthc")
public BaseResult checkAuthc() {
return BaseResultGenerator.success();
}
-
- /**
- * 运行测试用例
- *
- * @return
- */
- @GetMapping(value = "runSql")
- @Deprecated
- public BaseResult runSql() {
- BaseResult result = diagnosisService.runPocSQL(false, "validation", null);
- return result;
- }
-
- /**
- * 校验schema
- *
- * @return
- */
- @GetMapping(value = "runSchema")
- @Deprecated
- public BaseResult runSchema() {
- BaseResult result = diagnosisService.validateSchema();
- return result;
- }
-
/**
* 检查job动态连接配置
*
* @return
*/
@GetMapping(value = "jobAdminHttpSource")
+ @AuditLog("TestController.jobAdminHttpSource")
public BaseResult jobAdminHttpSource() {
return BaseResultGenerator.success("ok", jobAdminHttpSource);
}
@@ -134,6 +114,7 @@ public class TestController {
* @return
*/
@GetMapping(value = "druidIoHttpSource")
+ @AuditLog("TestController.druidIoHttpSource")
public BaseResult druidIoHttpSource() {
return BaseResultGenerator.success("ok", druidIoHttpSource);
}
@@ -144,6 +125,7 @@ public class TestController {
* @return
*/
@GetMapping(value = "clickHouseHttpSource")
+ @AuditLog("TestController.clickHouseHttpSource")
public BaseResult clickHouseHttpSource() {
return BaseResultGenerator.success("ok", clickHouseHttpSource);
}
@@ -154,6 +136,7 @@ public class TestController {
* @return
*/
@GetMapping(value = "hBaseAPISource")
+ @AuditLog("TestController.hBaseAPISource")
public BaseResult hBaseAPISource() {
return BaseResultGenerator.success("ok", hBaseAPISource);
}
@@ -164,6 +147,7 @@ public class TestController {
* @return
*/
@GetMapping(value = "engineConfigSource")
+ @AuditLog("TestController.engineConfigSource")
public BaseResult engineConfigSource() {
return BaseResultGenerator.success("ok", engineConfigSource);
}
@@ -174,6 +158,7 @@ public class TestController {
* @return
*/
@GetMapping(value = "arangoConfig")
+ @AuditLog("TestController.arangoConfig")
public BaseResult arangoConfig() {
return BaseResultGenerator.success("ok", arangoConfig);
}
@@ -184,6 +169,7 @@ public class TestController {
* @return
*/
@GetMapping(value = "httpConfig")
+ @AuditLog("TestController.httpConfig")
public BaseResult httpConfig() {
return BaseResultGenerator.success("ok", httpConfig);
}
diff --git a/src/main/java/com/mesalab/qgw/controller/TroubleshootingController.java b/src/main/java/com/mesalab/qgw/controller/TroubleshootingController.java
new file mode 100644
index 00000000..0489d4d5
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/controller/TroubleshootingController.java
@@ -0,0 +1,67 @@
+package com.mesalab.qgw.controller;
+
+import cn.hutool.core.util.BooleanUtil;
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.http.HttpStatus;
+import com.alibaba.fastjson2.JSONObject;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.enums.*;
+import com.mesalab.qgw.constant.QGWMessageConst;
+import com.mesalab.common.exception.BusinessException;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.AuditLog;
+import com.mesalab.qgw.service.TroubleshootingService;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.web.bind.annotation.*;
+
+import java.util.Arrays;
+
+/**
+ * Galaxy日志自动诊断接口
+ */
+@Slf4j
+@RestController
+@RequestMapping(value = "/v1/troubleshooting")
+public class TroubleshootingController {
+
+ @Autowired
+ private TroubleshootingService troubleshootingService;
+ @RequestMapping("/component/status")
+ @AuditLog("TroubleshootingController.componentStatus")
+ public JSONObject componentStatus() {
+ return troubleshootingService.getComponentStatus();
+ }
+
+ @RequestMapping(value = "sanity", method = RequestMethod.GET)
+ @AuditLog("TroubleshootingController.sanity")
+ public BaseResult sanity(String test) {
+ if (Arrays.stream(ConsistencyOption.values()).noneMatch(o -> o.getValue().equalsIgnoreCase(test))) {
+ throw new QGWBusinessException(HttpStatus.HTTP_BAD_REQUEST, CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.CONSISTENCY_OPTION_ERROR));
+ }
+ if (ConsistencyOption.TTL_CONSISTENCY.getValue().equalsIgnoreCase(test)) {
+ log.info("ttl consistency check interface.");
+ return troubleshootingService.consistencyCheck();
+ } else if (ConsistencyOption.DATASET_VERIFICATION.getValue().equalsIgnoreCase(test)) {
+ log.info("dataset verification interface. ");
+ return troubleshootingService.datesetVerification();
+ } else {
+ log.info("schema consistency check interface.");
+ return troubleshootingService.validateMetadata();
+ }
+ }
+
+ @RequestMapping(value = "/benchmark", method = RequestMethod.GET)
+ @AuditLog("TroubleshootingController.benchmark")
+ public BaseResult benchmark(@RequestParam String test, @RequestParam(value = "is_saved", required = false, defaultValue = "0") @JsonProperty("isSaved") Integer isSaved) throws BusinessException {
+ if (StrUtil.isBlankIfStr(test)) {
+ throw new QGWBusinessException(HttpStatus.HTTP_BAD_REQUEST, CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.BENCHMARK_TEST_PARAM_CANNOT_BE_EMPTY));
+ }
+ return troubleshootingService.benchmarkTest(test, BooleanUtil.toBoolean(String.valueOf(isSaved)));
+ }
+
+}
diff --git a/src/main/java/com/mesalab/qgw/controller/UtilController.java b/src/main/java/com/mesalab/qgw/controller/UtilController.java
new file mode 100644
index 00000000..c47ecd4d
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/controller/UtilController.java
@@ -0,0 +1,41 @@
+package com.mesalab.qgw.controller;
+
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.geedgenetworks.utils.StringUtil;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.constant.QGWMessageConst;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import com.mesalab.qgw.model.basic.AuditLog;
+import com.mesalab.qgw.model.job.EncryptionInfo;
+import com.mesalab.qgw.service.UtilService;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.web.bind.annotation.*;
+
+@RestController
+@RequestMapping(value = "/v1/util")
+public class UtilController {
+
+ private static final Log log = LogFactory.get();
+ @Autowired
+ private UtilService utilService;
+
+ @GetMapping(value = "/sql_parser", consumes = "application/x-www-form-urlencoded")
+ @AuditLog("UtilController.getSQLSyntaxTree")
+ public BaseResult getSQLSyntaxTree(String sql){
+ return utilService.getSQLSyntaxTree(sql);
+ }
+
+ @PostMapping(value = "/password_generator", consumes = "application/json")
+ @AuditLog("UtilController.getCiphertext")
+ public BaseResult getCiphertext(@RequestBody EncryptionInfo param) {
+ log.info("Plaintext encrypted,The plaintext argument is : {}", param);
+ if (StringUtil.isEmpty(param.getPassword()) || StringUtil.isEmpty(param.getSalt())) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.PASSWORD_OR_SALT_CANNOT_BE_EMPTY));
+ }
+ return utilService.getCiphertext(param);
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/dialect/AbstractDataSourceDialect.java b/src/main/java/com/mesalab/qgw/dialect/AbstractDataSourceDialect.java
index 02b55393..6724f2ed 100644
--- a/src/main/java/com/mesalab/qgw/dialect/AbstractDataSourceDialect.java
+++ b/src/main/java/com/mesalab/qgw/dialect/AbstractDataSourceDialect.java
@@ -1,61 +1,86 @@
package com.mesalab.qgw.dialect;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSON;
import com.google.common.collect.Maps;
import com.mesalab.common.entity.BaseResult;
import com.geedgenetworks.utils.Encodes;
+import com.mesalab.qgw.model.basic.SQLQueryContext;
+import com.mesalab.qgw.model.basic.udf.TIME_FLOOR_WITH_FILL;
+import com.mesalab.qgw.model.basic.udf.UDF;
import lombok.Data;
-import java.util.Map;
-import java.util.Optional;
+import java.util.*;
@Data
public abstract class AbstractDataSourceDialect implements Dialect {
- abstract void init();
+ protected SQLQueryContext sqlQueryContext;
+ private static final Log log = LogFactory.get();
+ public AbstractDataSourceDialect(SQLQueryContext sqlQueryContext) {
+ this.sqlQueryContext = sqlQueryContext;
+ }
- /**
- * SQL查询语句转换为方言所认知的查询语句。
- * @param sql
- * @return
- */
- abstract String convertQuery(String sql);
+ abstract void init();
/**
- * 基于RBO(rule-base optimizer)获取最优化的SQL
+ * Query SQL statement to be converted the SQL that can be executed by the data source.
+ *
+ * @param sql
* @return
*/
- abstract String getBestQuery();
+ abstract String convertQuery(String sql);
/**
- * 获取原始SQL语句,不做规则转换,直接下推
+ * Generate the best query statement by RBO(Rule Based Optimization)
+ *
* @return
*/
- abstract String getOriginalQuery();
+ abstract String generateRuleBaseOptimizedQuery();
/**
- * 获取样例SQL,目标生成无Filter语句。
+ * 获取样例SQL,目标生成空执行语句。
+ *
* @return
*/
- abstract String getSampleQuery();
+ abstract String getSampleQuery();
/**
* get Explain Statement
+ *
* @return
*/
abstract String getExplainQuery();
/**
- * 简化规范异常信息展示。
- * @return
+ * Desc: 简化规范异常信息展示。
+ *
+ * @return {@link String}
*/
abstract String simplyErrorMessage(Object object);
+ /**
+ * Desc: 构建样例数据 meta 信息
+ *
+ * @created by wWei
+ * @date 2024/3/7 09:54
+ */
+ abstract List<Map<String, String>> buildExampleMeta();
+
+ /**
+ * Desc: 样例数据
+ *
+ * @return {@link null}
+ * @created by wWei
+ * @date 2024/3/7 10:15
+ */
+ abstract List<Object> buildExampleData();
@Override
final public BaseResult executeQuery() {
init();
- String transformedSQL = convertQuery(getBestQuery());
+ String transformedSQL = convertQuery(generateRuleBaseOptimizedQuery());
return generateBaseResult(transformedSQL, Optional.of("OK"));
}
@@ -63,45 +88,51 @@ public abstract class AbstractDataSourceDialect implements Dialect {
final public BaseResult executeSampleQuery() {
init();
String transformedSQL = getSampleQuery();
-
return generateBaseResult(transformedSQL, Optional.of("OK"));
}
/**
* 解析并生成最终结果
+ *
* @param sql
* @param message
* @return
*/
- abstract BaseResult generateBaseResult(String sql, Optional<String> message);
+ abstract BaseResult generateBaseResult(String sql, Optional<String> message);
/**
* shows the execution plan of a select statement.
+ *
* @return
*/
final public BaseResult executeExplainPlan() {
init();
String transformedSQL = getExplainQuery();
Map sqlMap = Maps.newHashMap();
- sqlMap.put("OriginalSQL", Encodes.encodeBase64(getOriginalQuery().getBytes()));
- sqlMap.put("transformedSQL", Encodes.encodeBase64(transformedSQL.getBytes()));
+ sqlMap.put("OriginalSQL", Encodes.encodeBase64(sqlQueryContext.getOriginalSQL().getBytes()));
+ sqlMap.put("TransformedSQL", Encodes.encodeBase64(transformedSQL.getBytes()));
return generateBaseResult(transformedSQL, Optional.of(JSON.toJSONString(sqlMap)));
}
-
@Override
- final public BaseResult executeSyntaxCheck() {
+ final public BaseResult executeSyntaxValidation() {
init();
String transformedSQL = getSampleQuery();
- return generateBaseResult(transformedSQL, Optional.of(getOriginalQuery()));
+ BaseResult baseResult = generateBaseResult(transformedSQL, Optional.of(sqlQueryContext.getOriginalSQL()));
+ try {
+ baseResult.setMeta(buildExampleMeta());
+ baseResult.setData(buildExampleData());
+ } catch (Exception e) {
+ log.error("Build Example Data Error: {} ", e.toString());
+ }
+ return baseResult;
}
-
@Override
final public BaseResult executeAdministrativeQuery() {
init();
- String sql = getOriginalQuery();
- return generateBaseResult(sql, Optional.of("OK"));
+ return generateBaseResult(sqlQueryContext.getOriginalSQL(), Optional.of("OK"));
}
+
}
diff --git a/src/main/java/com/mesalab/qgw/dialect/AbstractEngineDialect.java b/src/main/java/com/mesalab/qgw/dialect/AbstractEngineDialect.java
index be1a3e01..e52248db 100644
--- a/src/main/java/com/mesalab/qgw/dialect/AbstractEngineDialect.java
+++ b/src/main/java/com/mesalab/qgw/dialect/AbstractEngineDialect.java
@@ -15,12 +15,12 @@ public abstract class AbstractEngineDialect implements Dialect {
/**
* Desc: 执行自定义函数操作
*
- * @param
+ * @param isDryRun
* @return
* @created by wWei
* @date 2022/4/27 5:25 下午
*/
- abstract void executeUDF();
+ abstract void executeUDF(boolean isDryRun);
/**
* Desc: 获取联合查询SQL
@@ -62,14 +62,14 @@ public abstract class AbstractEngineDialect implements Dialect {
@Override
final public BaseResult executeQuery() {
init();
- executeUDF();
+ executeUDF(false);
return generateBaseResult(getFederateQuery());
}
@Override
final public BaseResult executeSampleQuery() {
init();
- executeUDF();
+ executeUDF(false);
return generateBaseResult(getSampleQuery());
}
@@ -79,10 +79,10 @@ public abstract class AbstractEngineDialect implements Dialect {
}
@Override
- final public BaseResult executeSyntaxCheck() {
+ final public BaseResult executeSyntaxValidation() {
init();
- executeUDF();
- return generateBaseResult(getSyntaxCheckQuery());
+ executeUDF(true);
+ return generateBaseResult(getSampleQuery());
}
@Override
diff --git a/src/main/java/com/mesalab/qgw/dialect/ClickHouseDialect.java b/src/main/java/com/mesalab/qgw/dialect/ClickHouseDialect.java
index 2597f3f7..4873b559 100644
--- a/src/main/java/com/mesalab/qgw/dialect/ClickHouseDialect.java
+++ b/src/main/java/com/mesalab/qgw/dialect/ClickHouseDialect.java
@@ -1,13 +1,13 @@
package com.mesalab.qgw.dialect;
-import cn.hutool.core.util.BooleanUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.core.util.URLUtil;
import cn.hutool.crypto.digest.DigestUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSON;
+import com.alibaba.fastjson2.JSONPath;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.jayway.jsonpath.JsonPath;
@@ -17,26 +17,29 @@ import com.mesalab.common.entity.DataTypeMapping;
import com.mesalab.common.enums.*;
import com.mesalab.common.utils.*;
import com.mesalab.common.utils.sqlparser.*;
+import com.mesalab.qgw.constant.MetaConst;
import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.udf.ROLLUP;
import com.mesalab.qgw.service.RewriteTable;
import com.mesalab.qgw.model.basic.HttpConfig;
import com.mesalab.qgw.model.basic.*;
-import com.mesalab.qgw.service.MetadataService;
-import com.mesalab.qgw.service.impl.HttpClientService;
+import com.mesalab.qgw.service.DatabaseService;
import com.geedgenetworks.utils.Encodes;
import com.geedgenetworks.utils.StringUtil;
+import com.mesalab.qgw.service.impl.HttpClientServiceV2;
+import lombok.SneakyThrows;
import net.sf.jsqlparser.JSQLParserException;
import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.parser.CCJSqlParserUtil;
import net.sf.jsqlparser.statement.select.*;
import net.sf.jsqlparser.util.deparser.ExpressionDeParser;
-import org.apache.avro.Schema;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import org.springframework.util.CollectionUtils;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -44,6 +47,7 @@ import java.util.regex.Pattern;
public class ClickHouseDialect extends AbstractDataSourceDialect {
private static final Log log = LogFactory.get();
private static final Pattern pNullable = Pattern.compile("\\bNullable\\((.*?)\\)", Pattern.CASE_INSENSITIVE);
+ private static final Pattern pDateTime = Pattern.compile("\\b(DateTime|DateTime64|Date)\\((.*?)\\)", Pattern.CASE_INSENSITIVE);
private static final Pattern pIn = Pattern.compile("(?<!\\bnot)\\s+in\\s*\\(\\s*SELECT\\b", Pattern.CASE_INSENSITIVE);
private static final Pattern pNotIn = Pattern.compile("\\bnot\\s+in\\s*\\(\\s*SELECT\\b", Pattern.CASE_INSENSITIVE);
private static final Pattern pUniq = Pattern.compile("count\\s*\\(\\s*distinct\\s*\\((.*?)\\)", Pattern.CASE_INSENSITIVE);
@@ -51,16 +55,14 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
private static final Pattern pWhere = Pattern.compile("where\\s*", Pattern.CASE_INSENSITIVE);
private static final Pattern pGroupBy = Pattern.compile("\\bgroup\\s+by\\s+", Pattern.CASE_INSENSITIVE);
private static final Pattern pDistinct = Pattern.compile("\\b(DISTINCT)(\\(|\\s+)", Pattern.CASE_INSENSITIVE);
- private static final Pattern pAggregateFunSpecificOfCK = Pattern.compile("\\b(anyHeavy|anyLast|argMin|argMax|avgWeighted|topK|topKWeighted|groupArray|groupUniqArray|groupArrayInsertAt|groupArrayMovingAvg|groupArrayMovingSum|groupBitAnd|groupBitOr|groupBitXor|groupBitmap|groupBitmapAnd|groupBitmapOr|groupBitmapXor|sumWithOverflow|sumMap|minMap|maxMap|skewSamp|skewPop|kurtSamp|kurtPop|uniq|uniqExact|uniqCombined|uniqCombined64|uniqHLL12|quantile|quantiles|quantileExact|quantileExactLow|quantileExactHigh|quantileExactWeighted|quantileTiming|quantileTimingWeighted|quantileDeterministic|quantileTDigest|quantileTDigestWeighted|quantileBFloat16|quantileBFloat16Weighted|simpleLinearRegression|stochasticLinearRegression|stochasticLogisticRegression|categoricalInformationValue)\\s*\\(", Pattern.CASE_INSENSITIVE);
+ public static final Pattern pAggregateFunSpecificOfCK = Pattern.compile("\\b(anyHeavy|anyLast|argMin|argMax|avgWeighted|topK|topKWeighted|groupArray|groupUniqArray|groupArrayInsertAt|groupArrayMovingAvg|groupArrayMovingSum|groupBitAnd|groupBitOr|groupBitXor|groupBitmap|groupBitmapAnd|groupBitmapOr|groupBitmapXor|sumWithOverflow|sumMap|minMap|maxMap|skewSamp|skewPop|kurtSamp|kurtPop|uniq|uniqExact|uniqCombined|uniqCombined64|uniqHLL12|quantile|quantiles|quantileExact|quantileExactLow|quantileExactHigh|quantileExactWeighted|quantileTiming|quantileTimingWeighted|quantileDeterministic|quantileTDigest|quantileTDigestWeighted|quantileBFloat16|quantileBFloat16Weighted|simpleLinearRegression|stochasticLinearRegression|stochasticLogisticRegression|categoricalInformationValue)\\s*\\(", Pattern.CASE_INSENSITIVE);
private static final Pattern pCount1 = Pattern.compile("count\\(\\s*1\\s*\\)", Pattern.CASE_INSENSITIVE);
- private ClickHouseHttpSource clickHouseHttpSource = (ClickHouseHttpSource) SpringContextUtil.getBean("clickHouseHttpSource");
- private HttpClientService httpClientService = (HttpClientService) SpringContextUtil.getBean("httpClientService");
- private MetadataService metadataService = (MetadataService) SpringContextUtil.getBean("metadataService");
- private EngineConfigSource engineConfigSource = (EngineConfigSource) SpringContextUtil.getBean("engineConfigSource");
- private HttpConfig httpConfig = (HttpConfig) SpringContextUtil.getBean("httpConfig");
- public QueryProfile param;
+ private final ClickHouseHttpSource clickHouseHttpSource = (ClickHouseHttpSource) SpringContextUtil.getBean("clickHouseHttpSource");
+ private final HttpClientServiceV2 httpClientService = (HttpClientServiceV2) SpringContextUtil.getBean("httpClientServiceV2");
+ private final DatabaseService databaseService = (DatabaseService) SpringContextUtil.getBean("databaseService");
+ private final EngineConfigSource engineConfigSource = (EngineConfigSource) SpringContextUtil.getBean("engineConfigSource");
+ private final HttpConfig httpConfig = (HttpConfig) SpringContextUtil.getBean("httpConfig");
public BaseResult baseResult;
-
private final static double PERCENT_UP_LINE = 0.98;
private final static double PERCENT_DOWN_LINE = 0.01;
private final static String DB_EXCEPTION_START = "e.displayText() = DB::Exception: ";
@@ -86,19 +88,19 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
metaMap.put("DateTime", DataTypeMapping.TIMESTAMP);
}
- public ClickHouseDialect(QueryProfile param) {
- this.param = param;
+ public ClickHouseDialect(SQLQueryContext queryContext) {
+ super(queryContext);
}
@Override
public void init() {
- if (StringUtil.isNotEmpty(param.getDbQuerySource())) {
- param.getDbQuerySource().setExecSQL(param.getDbQuerySource().getSqlBody());
+ if (StringUtil.isNotEmpty(sqlQueryContext.getDbSelectStatement())) {
+ sqlQueryContext.getDbSelectStatement().setExecSQL(sqlQueryContext.getDbSelectStatement().getSqlBody());
} else {
- SQLQuerySource sqlQuerySource = new SQLQuerySource();
- sqlQuerySource.setExecSQL(param.getQuery());
- param.setDbQuerySource(sqlQuerySource);
+ SelectStatement selectStatement = new SelectStatement();
+ selectStatement.setExecSQL(sqlQueryContext.getOriginalSQL());
+ sqlQueryContext.setDbSelectStatement(selectStatement);
}
baseResult = BaseResultGenerator.success();
}
@@ -116,16 +118,11 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
}
@Override
- public String getBestQuery() {
+ public String generateRuleBaseOptimizedQuery() {
return new Optimizer().generateQueryLimit().
generateSQL().generateSubQuery().generateTableAlias().generateApproximation().build();
}
- @Override
- public String getOriginalQuery() {
- return param.getQuery();
- }
-
/**
* 获取语法检测sql
*
@@ -133,50 +130,48 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
*/
@Override
public String getSampleQuery() {
- String sampleSQL = param.getDbQuerySource().getSqlBody();
+ String sampleSQL = sqlQueryContext.getDbSelectStatement().getSqlBody();
try {
sampleSQL = convertQuery(convertSampleSql(sampleSQL));
} catch (JSQLParserException e) {
log.error("Sample sql error {}, execute original sql: {}, error is: {} ",
- sampleSQL, sampleSQL = param.getDbQuerySource().getSqlBody(), e.getMessage() == null ? e.getCause() : e.getMessage());
+ sampleSQL, sampleSQL = sqlQueryContext.getDbSelectStatement().getSqlBody(), e.getMessage() == null ? e.getCause() : e.getMessage());
}
return sampleSQL;
}
@Override
String getExplainQuery() {
- return "explain " + convertQuery(getBestQuery());
+ return "explain " + convertQuery(generateRuleBaseOptimizedQuery());
}
@Override
public BaseResult executeKillQuery(String queryId) {
- String killSql = StrUtil.format("kill query where query_id ='{}'", queryId);
+ String killSql = StrUtil.format("kill query on cluster ck_cluster where is_initial_query = 1 and query_id ='{}'", queryId);
String queryURL = URLUtil.normalize(clickHouseHttpSource.getUrl() + "/?");
StringBuilder queryParamBuilder = new StringBuilder(getLongTermAccount()).append("&")
- .append("database=").append(clickHouseHttpSource.getDbName()).append("&")
- .append("query=").append(killSql)
- .append(" FORMAT ").append(QueryFormatEnum.JSON.getValue().toUpperCase()).append(" ;");
- List<NameValuePair> values = URLEncodedUtils.parse(queryParamBuilder.toString(), Charset.forName("UTF-8"));
-
- Map<String, String> map = httpClientService.httpGet(queryURL + URLEncodedUtils.format(values, "utf-8"), 0);
- if (Integer.parseInt(map.get("status")) == ResultStatusEnum.SUCCESS.getCode()) {
- return BaseResultGenerator.generate(ResultStatusEnum.ACCEPTED.getCode(), null, "ok", null, null, null, null);
+ .append("database=").append(clickHouseHttpSource.getDbName());
+ List<NameValuePair> values = URLEncodedUtils.parse(queryParamBuilder.toString(), StandardCharsets.UTF_8);
+
+ HttpResponseResult responseResult = httpClientService.post(queryURL + URLEncodedUtils.format(values, StandardCharsets.UTF_8.toString()), killSql);
+ if (responseResult.getStatusCode() == HttpStatusCodeEnum.SUCCESS.getCode()) {
+ return BaseResultGenerator.generate(HttpStatusCodeEnum.ACCEPTED.getCode(), null, true, "ok", null, null, null, null);
} else {
- return BaseResultGenerator.failure(Integer.parseInt(map.get("status")), map.get("result"));
+ return BaseResultGenerator.failure(responseResult.getStatusCode(), responseResult.getErrorMessage());
}
}
@Override
public BaseResult getProcesses(String queryId) {
List<String> tableNames = Lists.newArrayList();
- SQLQuerySource sqlQuerySource = new SQLQuerySource();
- String tableName = "processes";
+ SelectStatement selectStatement = new SelectStatement();
+ String tableName = "processes_cluster";
tableNames.add(tableName);
- String sql = StrUtil.format("select elapsed,total_rows_approx,read_rows from system.{} where query_id='{}'", tableName, queryId);
- param.setQuery(sql);
- sqlQuerySource.setExecSQL(param.getQuery());
- sqlQuerySource.setTableNames(tableNames);
- param.setDbQuerySource(sqlQuerySource);
+ String sql = StrUtil.format("select elapsed,total_rows_approx,read_rows from system.{} where is_initial_query = 1 and query_id='{}'", tableName, queryId);
+ sqlQueryContext.setOriginalSQL(sql);
+ selectStatement.setExecSQL(sqlQueryContext.getOriginalSQL());
+ selectStatement.setTableNames(tableNames);
+ sqlQueryContext.setDbSelectStatement(selectStatement);
BaseResult baseResult = generateBaseResult(sql, Optional.of("ok"));
Map<String, Object> result = new HashMap<>(16);
if (!baseResult.isSuccess()) {
@@ -220,6 +215,10 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
if (matcher.find()) {
type = matcher.group(1);
}
+ matcher = pDateTime.matcher(type);
+ if (matcher.find()) {
+ type = matcher.group(1);
+ }
x.put("type", metaMap.get(type) == null ? DataTypeMapping.STRING : metaMap.get(type));
});
return metaList;
@@ -238,7 +237,7 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
* 1.不支持 count(1) 需转为count(*)
* 2.针对非聚合SQL,非分组sql in/not in 替换为 Global in/not
*
- * @return
+ * @return Converter
*/
public Converter generateStandard() {
sql = generateCount(sql);
@@ -285,7 +284,10 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
}
public Converter generateDateFunction() {
- sql = SQLFunctionUtil.generateDateFunction(sql, DBTypeEnum.CLICKHOUSE.getValue());
+ sql = SQLFunctionUtil.generateDateFunction(sql, DBEngineType.CLICKHOUSE.getValue());
+ if (sqlQueryContext.getDbSelectStatement().getUdfSet().stream().noneMatch(udf -> udf instanceof ROLLUP)) {
+ sql = SQLFunctionUtil.replaceRollup(sql);
+ }
return this;
}
@@ -331,19 +333,19 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
* @return
*/
public Optimizer generateQueryLimit() {
- if (StringUtil.isEmpty(param.getEngineQuerySource())) {
- setDefaultResultRows(param.getDbQuerySource(), engineConfigSource.getDefaultResultNum());
- setMaxCacheResultRows(param.getDbQuerySource(), engineConfigSource.getMaxCacheNum());
+ if (StringUtil.isEmpty(sqlQueryContext.getFederationSelectStatement())) {
+ setDefaultResultRows(sqlQueryContext.getDbSelectStatement(), engineConfigSource.getDefaultResultNum());
+ setMaxCacheResultRows(sqlQueryContext.getDbSelectStatement(), engineConfigSource.getMaxCacheNum());
} else {
- setDefaultResultRows(param.getEngineQuerySource(), engineConfigSource.getDefaultResultNum());
- setMaxCacheResultRows(param.getDbQuerySource(), engineConfigSource.getMaxCacheNum());
- setMaxCacheResultRows(param.getEngineQuerySource(), engineConfigSource.getMaxCacheNum());
+ setDefaultResultRows(sqlQueryContext.getFederationSelectStatement(), engineConfigSource.getDefaultResultNum());
+ setMaxCacheResultRows(sqlQueryContext.getDbSelectStatement(), engineConfigSource.getMaxCacheNum());
+ setMaxCacheResultRows(sqlQueryContext.getFederationSelectStatement(), engineConfigSource.getMaxCacheNum());
}
return this;
}
- private void setDefaultResultRows(SQLQuerySource sqlQuerySource, int defaultResultRows) {
+ private void setDefaultResultRows(SelectStatement sqlQuerySource, int defaultResultRows) {
if (StringUtil.isBlank(sqlQuerySource.getLimit())) {
sqlQuerySource.setSqlBody(sqlQuerySource.getSqlBody() + " limit " + defaultResultRows);
sqlQuerySource.setLimit(String.valueOf(defaultResultRows));
@@ -357,7 +359,7 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
* @param sqlQuerySource
* @return
*/
- private void setMaxCacheResultRows(SQLQuerySource sqlQuerySource, int maxCacheResultRows) {
+ private void setMaxCacheResultRows(SelectStatement sqlQuerySource, int maxCacheResultRows) {
if (!sqlQuerySource.isEnableLimit()) {
return;
}
@@ -382,8 +384,8 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
}
public Optimizer generateSQL() {
- SQLQuerySource sqlQuerySource = param.getDbQuerySource();
- sql = sqlQuerySource.getSqlBody();
+ SelectStatement dbSelectStatement = sqlQueryContext.getDbSelectStatement();
+ sql = dbSelectStatement.getSqlBody();
return this;
}
@@ -405,8 +407,8 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
sql = buffer.toString();
} catch (JSQLParserException | RuntimeException e) {
log.error("SQL Rewrite add tableName and Alias Error: ", e);
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),
- QGWErrorCode.SQL_BUILDER_EXCEPTION.getCode(), String.format(QGWErrorCode.SQL_BUILDER_EXCEPTION.getMessage(), e.getMessage()));
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.SQL_REWRITE_AND_TRANSFORMATION_EXCEPTION.getCode(), String.format(CommonErrorCode.SQL_REWRITE_AND_TRANSFORMATION_EXCEPTION.getMessage(), e.getMessage()));
}
return this;
}
@@ -418,7 +420,7 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
*/
public Optimizer generateUniq() {
Matcher m = pUniq.matcher(sql);
- StringBuffer sb = new StringBuffer();
+ StringBuilder sb = new StringBuilder();
while (m.find()) {
if (m.groupCount() == 1) {
String replaceValue = m.group(1);
@@ -486,10 +488,10 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
*/
public Optimizer generateSubQuery() {
- if (StringUtil.isNotEmpty(param.getDbQuerySource().getJoins()) || param.getDbQuerySource().getTableNames().size() > 1) {
+ if (StringUtil.isNotEmpty(sqlQueryContext.getDbSelectStatement().getJoins()) || sqlQueryContext.getDbSelectStatement().getTableNames().size() > 1) {
return this;
}
- if (StringUtil.isEmpty(param.getDbQuerySource().getWhereExpression())) {
+ if (StringUtil.isEmpty(sqlQueryContext.getDbSelectStatement().getWhereExpression())) {
return this;
}
if (containAggregateFunction(sql) || containGroupBy(sql) || containDistinct(sql)) {
@@ -532,21 +534,21 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
* @return
*/
private String getAvailableIndexTable() throws JSQLParserException {
- String str = metadataService.getValueByKeyInSchemaDoc(param.getDbQuerySource().getTableNames().get(0), "index_table");
+ String str = databaseService.getValueByKeyInSchemaDoc(sqlQueryContext.getDbSelectStatement().getTableNames().get(0), "index_table");
String[] indexTables = StringUtil.split(str, ",");
- if (StringUtil.isEmpty(indexTables) || StringUtil.isEmpty(metadataService.getIndexKey(param.getDbQuerySource().getTableNames().get(0)))) {
+ if (StringUtil.isEmpty(indexTables) || StringUtil.isEmpty(databaseService.getIndexKey(sqlQueryContext.getDbSelectStatement().getTableNames().get(0)))) {
return StringUtil.EMPTY;
}
- Expression expression = CondExpressionHelper.rewireCondExprBaseOnCK(param.getDbQuerySource().getWhereExpression().toString(), param.getDbQuerySource().getAliasFields(), false, null);
+ Expression expression = CondExpressionHelper.rewireCondExprBaseOnCK(sqlQueryContext.getDbSelectStatement().getWhereExpression().toString(), sqlQueryContext.getDbSelectStatement().getAliasFields(), false, null);
List<String> actualFieldOfWhere = CondExpressionHelper.getDistinctFields(expression.toString());
table:
for (String tempIndexTable : indexTables) {
- List<String> indexKey = metadataService.getIndexKey(tempIndexTable);
+ List<String> indexKey = databaseService.getIndexKey(tempIndexTable);
//判断一: where 是否包含索引键
if (StringUtil.isEmpty(indexKey) || !actualFieldOfWhere.contains(indexKey.get(0))) {
continue;
}
- Schema indexTableSchema = metadataService.getSchemaByName(tempIndexTable);
+ LinkedHashMap<String, Object> indexTableSchema = databaseService.getSchemaByName(tempIndexTable);
//判断二: where 条件列是否都在索引表中
for (String field : actualFieldOfWhere) {
if (!isFieldOfSchema(indexTableSchema, field)) {
@@ -557,7 +559,7 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
if (!isValidOfOrderElementsInSchema(indexTableSchema)) {
continue;
}
- return indexTableSchema.getName();
+ return String.valueOf(indexTableSchema.get("name"));
}
return StringUtil.EMPTY;
}
@@ -570,25 +572,25 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
*/
private String getSubQueryByIndexTable(String indexTable) throws JSQLParserException {
- String primaryKey = metadataService.getValueByKeyInSchemaDoc(param.getDbQuerySource().getTableNames().get(0), "primary_key");
- List<String> indexKey = metadataService.getIndexKey(param.getDbQuerySource().getTableNames().get(0));
+ String primaryKey = databaseService.getValueByKeyInSchemaDoc(sqlQueryContext.getDbSelectStatement().getTableNames().get(0), "primary_key");
+ List<String> indexKey = databaseService.getIndexKey(sqlQueryContext.getDbSelectStatement().getTableNames().get(0));
indexKey = indexKey.subList(0, indexKey.contains(primaryKey) ? indexKey.indexOf(primaryKey) + 1 : indexKey.size());
List<String> indexKeyMaster = Lists.newArrayList();
- String nameMaster = metadataService.getSchemaByName(param.getDbQuerySource().getTableNames().get(0)).getName();
+ String nameMaster = String.valueOf(databaseService.getSchemaByName(sqlQueryContext.getDbSelectStatement().getTableNames().get(0)).get("name"));
indexKey.forEach(o -> indexKeyMaster.add(nameMaster.concat(".").concat(o)));
String subQuery = getSubquerySql(String.join(",", indexKey), indexTable);
StringBuffer sb = new StringBuffer();
- sb.append("select ").append(param.getDbQuerySource().getSelect()).append(" from ").append(param.getDbQuerySource().getTableNames().get(0)).append(" where ( ").append(String.join(",", indexKeyMaster)).append(" ) in ( ").append(subQuery).append(")");
- if (StringUtil.isNotEmpty(param.getDbQuerySource().getWhereExpression())) {
- sb.append(" and (").append(param.getDbQuerySource().getWhereExpression().toString()).append(")");
+ sb.append("select ").append(sqlQueryContext.getDbSelectStatement().getSelect()).append(" from ").append(sqlQueryContext.getDbSelectStatement().getTableNames().get(0)).append(" where ( ").append(String.join(",", indexKeyMaster)).append(" ) in ( ").append(subQuery).append(")");
+ if (StringUtil.isNotEmpty(sqlQueryContext.getDbSelectStatement().getWhereExpression())) {
+ sb.append(" and (").append(sqlQueryContext.getDbSelectStatement().getWhereExpression().toString()).append(")");
}
- if (!param.getDbQuerySource().getOrderByElements().isEmpty()) {
- sb.append(" order by ").append(param.getDbQuerySource().getOrderBy());
+ if (!sqlQueryContext.getDbSelectStatement().getOrderByElements().isEmpty()) {
+ sb.append(" order by ").append(sqlQueryContext.getDbSelectStatement().getOrderBy());
}
- if (StringUtil.isNotBlank(param.getDbQuerySource().getLimit())) {
- sb.append(" limit ").append(param.getDbQuerySource().getLimit());
+ if (StringUtil.isNotBlank(sqlQueryContext.getDbSelectStatement().getLimit())) {
+ sb.append(" limit ").append(sqlQueryContext.getDbSelectStatement().getLimit());
}
return sb.toString();
}
@@ -600,18 +602,18 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
* @return
*/
private String getSubQueryByPartitionKey(String sql) throws JSQLParserException {
- String partitionKey = metadataService.getPartitionKey(param.getDbQuerySource().getTableNames().get(0));
+ String partitionKey = databaseService.getPartitionKey(sqlQueryContext.getDbSelectStatement().getTableNames().get(0));
if (StringUtil.isBlank(partitionKey)) {
- log.warn("Partition Key is Empty. Focus in tableName {} schema", param.getDbQuerySource().getTableNames().get(0));
+ log.warn("Partition Key is Empty. Focus in tableName {} schema", sqlQueryContext.getDbSelectStatement().getTableNames().get(0));
return sql;
}
- String subQuery = getSubquerySql(partitionKey, param.getDbQuerySource().getTableNames().get(0));
+ String subQuery = getSubquerySql(partitionKey, sqlQueryContext.getDbSelectStatement().getTableNames().get(0));
subQuery = subQuery.replaceAll("\\\\", "$0$0");
subQuery = subQuery.replaceAll("\\$", "\\\\\\$");
Matcher m = pWhere.matcher(sql);
StringBuffer sb = new StringBuffer();
while (m.find()) {
- m.appendReplacement(sb, "where " + metadataService.getSchemaByName(param.getDbQuerySource().getTableNames().get(0)).getName().concat(".").concat(partitionKey) + " in ( " + subQuery + ") and ");
+ m.appendReplacement(sb, "where " + String.valueOf(databaseService.getSchemaByName(sqlQueryContext.getDbSelectStatement().getTableNames().get(0)).get("name")).concat(".").concat(partitionKey) + " in ( " + subQuery + ") and ");
}
m.appendTail(sb);
return sb.toString();
@@ -628,19 +630,16 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
StringBuilder subQuery = new StringBuilder();
subQuery.append("select " + columnName + " from ").append(tableName)
- .append(" where ").append(CondExpressionHelper.rewireCondExprBaseOnCK(param.getDbQuerySource().getWhereExpression().toString(), param.getDbQuerySource().getAliasFields(), true, tableName));
+ .append(" where ").append(CondExpressionHelper.rewireCondExprBaseOnCK(sqlQueryContext.getDbSelectStatement().getWhereExpression().toString(), sqlQueryContext.getDbSelectStatement().getAliasFields(), true, tableName));
//order by中元素存在别名时做替换处理
- List<OrderByElement> listOrderElement = param.getDbQuerySource().getOrderByElements();
+ List<OrderByElement> listOrderElement = sqlQueryContext.getDbSelectStatement().getOrderByElements();
if (StringUtil.isNotEmpty(listOrderElement)) {
- String orderBy = param.getDbQuerySource().getOrderBy();
+ String orderBy = sqlQueryContext.getDbSelectStatement().getOrderBy();
for (OrderByElement order : listOrderElement) {
String orderField = order.getExpression().toString();
- if ((orderField.startsWith("\"") && orderField.endsWith("\""))
- || (orderField.startsWith("`") && orderField.endsWith("`"))) {
- orderField = orderField.substring(1, orderField.length() - 1);
- }
- String originalField = param.getDbQuerySource().getAliasFields().get(orderField);
+ orderField = SQLHelper.removeQuotesAndBackticks(orderField);
+ String originalField = sqlQueryContext.getDbSelectStatement().getAliasFields().get(orderField);
if (StringUtil.isNotBlank(originalField)) {
orderBy = orderBy.replaceAll("(\"" + orderField + "\")|(\\b" + orderField + "\\b)", originalField);
}
@@ -650,13 +649,13 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
}
- if (StringUtil.isNotBlank(param.getDbQuerySource().getLimit())) {
- String[] limits = param.getDbQuerySource().getLimit().split(",");
+ if (StringUtil.isNotBlank(sqlQueryContext.getDbSelectStatement().getLimit())) {
+ String[] limits = sqlQueryContext.getDbSelectStatement().getLimit().split(",");
if (limits.length == 2) {
int rows = Integer.valueOf(limits[0].trim()) + Integer.valueOf(limits[1].trim());
subQuery.append(" limit ").append(rows);
} else {
- subQuery.append(" limit ").append(param.getDbQuerySource().getLimit());
+ subQuery.append(" limit ").append(sqlQueryContext.getDbSelectStatement().getLimit());
}
} else {
@@ -668,21 +667,21 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
/**
* 判断order by 元素是否属于schema字段中
*
- * @param schema
+ * @param schemaMap
* @return
*/
- private boolean isValidOfOrderElementsInSchema(Schema schema) throws JSQLParserException {
- List<OrderByElement> listOrderElement = param.getDbQuerySource().getOrderByElements();
+ private boolean isValidOfOrderElementsInSchema(Map<String, Object> schemaMap) throws JSQLParserException {
+ List<OrderByElement> listOrderElement = sqlQueryContext.getDbSelectStatement().getOrderByElements();
if (StringUtil.isEmpty(listOrderElement)) {
return true;
}
for (OrderByElement order : listOrderElement) {
OrderByElement orderByElement = new OrderByElement();
- Expression expression = CondExpressionHelper.rewireCondExprBaseOnCK(String.valueOf(order.getExpression()), param.getDbQuerySource().getAliasFields(), false, null);
+ Expression expression = CondExpressionHelper.rewireCondExprBaseOnCK(String.valueOf(order.getExpression()), sqlQueryContext.getDbSelectStatement().getAliasFields(), false, null);
orderByElement.setExpression(expression);
List<String> fields = CondExpressionHelper.getDistinctFieldsOfOrderBy(orderByElement);
for (String field : fields) {
- if (!isFieldOfSchema(schema, field)) {
+ if (!isFieldOfSchema(schemaMap, field)) {
return false;
}
}
@@ -693,16 +692,18 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
/**
* 判断schema中是否存在field列
*
- * @param schema
+ * @param schemaMap
* @param fieldName
* @return
*/
- private boolean isFieldOfSchema(Schema schema, String fieldName) {
- Schema.Field field = schema.getField(fieldName);
- if (StringUtil.isEmpty(field)) {
- return false;
+ private boolean isFieldOfSchema(Map<String, Object> schemaMap, String fieldName) {
+ List<Map> fields = (List<Map>) schemaMap.get("fields");
+ for (Map next : fields) {
+ if (next.get("name").equals(fieldName)) {
+ return true;
+ }
}
- return true;
+ return false;
}
public String build() {
@@ -724,23 +725,27 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
@Override
public BaseResult generateBaseResult(String sql, Optional<String> message) {
- Map<String, String> results = executeHttpPost(sql);
- int status = Integer.parseInt(results.get("status"));
- if (status == ResultStatusEnum.SUCCESS.getCode()) {
+ HttpResponseResult responseResult = executeHttpPost(sql);
+ if (responseResult.getStatusCode() == HttpStatusCodeEnum.SUCCESS.getCode()) {
baseResult = BaseResultGenerator.generate(
- Integer.parseInt(results.get("status")),
+ responseResult.getStatusCode(),
ResultCodeEnum.SUCCESS.getCode(),
+ true,
message.get(),
- JSON.parseObject(results.get("result"), Map.class),
+ JSON.parseObject(responseResult.getResponseBody(), Map.class),
null,
null,
- QueryFormatEnum.JSON.getValue());
+ OutputMode.JSON.getValue());
} else {
- baseResult = BaseResultGenerator.generate(Integer.parseInt(results.get("status")),
-
- ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), message.get()),
- StringUtil.isNotEmpty(results.get("message")) ? results.get("message") : results.get("result"), null, null, param.getFormat());
+ baseResult = BaseResultGenerator.generate(responseResult.getStatusCode(),
+ CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ false,
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), message.get()),
+ responseResult.getErrorMessage(),
+ null,
+ null,
+ sqlQueryContext.getFormat());
}
return build();
}
@@ -759,15 +764,16 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
statistics.put("rows_read", statisticMap.get("rows_read"));
statistics.put("bytes_read", statisticMap.get("bytes_read"));
baseResult.setData(resultMap.get("data"));
- baseResult.setMeta(unifyMeta(resultMap.get("meta")));
+ baseResult.setMeta(ColumnCategoryHelper.expandMetaCategory(unifyMeta(resultMap.get("meta")), sqlQueryContext.getDbSelectStatement(), sqlQueryContext.getDbEngine()));
baseResult.setStatistics(statistics);
} else {
- if (baseResult.getStatus() >= ResultStatusEnum.BAD_REQUEST.getCode() && baseResult.getStatus() < ResultStatusEnum.SERVER_ERROR.getCode()) {
- throw new QGWBusinessException(baseResult.getStatus(), QGWErrorCode.SQL_EXECUTION_BAD_REQUEST_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_EXECUTION_BAD_REQUEST_EXCEPTION.getMessage(), simplyErrorMessage(dataObject)));
+ if (baseResult.getStatus() >= HttpStatusCodeEnum.BAD_REQUEST.getCode()
+ && baseResult.getStatus() < HttpStatusCodeEnum.SERVER_ERROR.getCode()) {
+ throw new QGWBusinessException(baseResult.getStatus(), CommonErrorCode.BAD_REQUEST_SQL_EXECUTION_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_EXECUTION_EXCEPTION.getMessage(), simplyErrorMessage(dataObject)));
}
- throw new QGWBusinessException(baseResult.getStatus(), QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getMessage(), simplyErrorMessage(dataObject)));
+ throw new QGWBusinessException(baseResult.getStatus(), CommonErrorCode.SQL_EXECUTION_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.SQL_EXECUTION_EXCEPTION.getMessage(), simplyErrorMessage(dataObject)));
}
return baseResult;
}
@@ -787,6 +793,61 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
return StrUtil.removeAllLineBreaks(message).trim();
}
+ @SneakyThrows
+ @Override
+ List<Map<String, String>> buildExampleMeta() {
+ if (baseResult.getMeta() == null) {
+ return Lists.newArrayList();
+ }
+ List<Map<String, String>> metaList = (List<Map<String, String>>) baseResult.getMeta();
+ Map<String, SelectItemHelper.AliasObject> selectItem = SelectItemHelper.getSelectItem(sqlQueryContext.getDbSelectStatement().getSqlBody());
+ for (Map<String, String> meta : metaList) {
+ toPerfectMeta(selectItem, meta);
+ }
+ return metaList;
+ }
+
+ private void toPerfectMeta(Map<String, SelectItemHelper.AliasObject> aliasObjects, Map<String, String> meta) {
+ if (aliasObjects == null) {
+ return;
+ }
+ String name = meta.get(MetaConst.META_NAME);
+ SelectItemHelper.AliasObject aliasObject = aliasObjects.get(name);
+ if (aliasObject == null) {
+ return;
+ }
+ if (aliasObject instanceof SelectItemHelper.AliasColumn) {
+ SelectItemHelper.AliasColumn aliasColumn = (SelectItemHelper.AliasColumn) aliasObject;
+ String fieldName = aliasColumn.getFieldName();
+ meta.put(MetaConst.META_FIELD_NAME, fieldName);
+ Map schemaInfo = databaseService.getSchemaInfo(MetadataType.FIELDS.getValue(), sqlQueryContext.getDbSelectStatement().getTableNames().get(0), false);
+ Object dateType = JSONPath.extract(JSON.toJSONString(schemaInfo), "$.fields[?(@.name == \"" + fieldName + "\")].doc.constraints.type");
+ if (dateType instanceof List && ((List<?>) dateType).size() > 0) {
+ dateType = ((List<?>) dateType).get(0);
+ meta.put(MetaConst.META_DATA_TYPE, dateType.toString());
+ }
+ } else if (aliasObject instanceof SelectItemHelper.AliasFunExpr) {
+ SelectItemHelper.AliasFunExpr aliasDatetime = (SelectItemHelper.AliasFunExpr) aliasObject;
+ String dateType = aliasDatetime.getDateType();
+ if (dateType != null) {
+ meta.put(MetaConst.META_DATA_TYPE, dateType);
+ }
+ }
+ }
+
+ @Override
+ List<Object> buildExampleData() {
+ if (baseResult.getMeta() == null) {
+ return Lists.newArrayList();
+ }
+ List<Map<String, String>> metaList = (List<Map<String, String>>) baseResult.getMeta();
+ Map<String, List<Object>> schemaDataDict = databaseService.getSchemaDataDict(sqlQueryContext.getDbSelectStatement().getTableNames().get(0));
+ Long rowCount = SQLHelper.getRowCount(sqlQueryContext.getDbSelectStatement().getSqlBody());
+ String queryType = QueryTypeHelper.determineQueryType(sqlQueryContext.getDbSelectStatement());
+ Object param = QueryTypeHelper.determineParam(sqlQueryContext.getDbSelectStatement(), queryType);
+ return ExampleDataHelper.buildExampleData(queryType, metaList, rowCount, schemaDataDict, param);
+ }
+
private boolean containAggregateFunction(String sql) {
return (SQLFunctionUtil.pAggregateFunStandard.matcher(sql).find()
|| pAggregateFunSpecificOfCK.matcher(sql).find());
@@ -802,20 +863,14 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
/**
- * 通过递归方式将原SQL转换为样例SQL
- * 做数据集限制:
- * 1. 将最内层sql的表名, 替换成(select * from tableName limit XXX) as alias
- * 2. 可对where、having添加:OR 1=1
+ * 将目标SQL转换为Dry Run SQ。使其不扫描数据,仅做执行
*
- * @param sql 原始sql
- * @return 经转换后的样例SQL
+ * @param sql
+ * @return
* @throws JSQLParserException
*/
private String convertSampleSql(String sql) throws JSQLParserException {
- boolean isSampled = BooleanUtil.isTrue(param.getSampled());
- String dataSet = StrUtil.format("SELECT * FROM {}",
- isSampled ? " {} WHERE rand() % 100 = 0 LIMIT 1000 " : " {} LIMIT 0 ");
- return new ClickhouseSampleSQLHelper(sql, dataSet, isSampled).build();
+ return new SampleSQLHelper(sql, " 1 != 1 ", false, null).build();
}
/**
@@ -824,38 +879,54 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
* @param sql SQL语句
* @return
*/
- private Map<String, String> executeHttpPost(String sql) {
+ private HttpResponseResult executeHttpPost(String sql) {
String queryURL = URLUtil.normalize(clickHouseHttpSource.getUrl() + "/?");
- ClickHouseHttpQuery clickHouseHttpQuery = buildHttpQueryParameter();
- log.info("DB engine is :{},execute http query is :{}", DBTypeEnum.CLICKHOUSE.getValue(), queryURL +
- Encodes.urlDecode(clickHouseHttpQuery.getQueryParameter()).replaceAll("password=(.*?)&", "password=XXX&") + ". body sql is:" + sql);
- List<NameValuePair> values = URLEncodedUtils.parse(clickHouseHttpQuery.getQueryParameter(), Charset.forName("UTF-8"));
- return httpClientService.httpPost(queryURL + URLEncodedUtils.format(values, "utf-8"), sql.concat(" FORMAT ").concat(QueryFormatEnum.JSON.getValue().toUpperCase())
- , clickHouseHttpQuery.getSocketTimeOut());
+ String queryParameter = enrichContextAndBuildQueryParameter();
+ log.info("Query ID: {}, SQL: {}", sqlQueryContext.getQueryId(), sql);
+ if (log.isDebugEnabled()) {
+ log.debug("DB engine is: {}, : {}", DBEngineType.CLICKHOUSE.getValue(), queryURL +
+ Encodes.urlDecode(queryParameter).replaceAll("password=(.*?)&", "password=XXX&") + ". body sql is:" + sql);
+ }
+ List<NameValuePair> values = URLEncodedUtils.parse(queryParameter, StandardCharsets.UTF_8);
+ long start = System.currentTimeMillis();
+ HttpResponseResult responseResult = httpClientService.post(queryURL + URLEncodedUtils.format(values, "utf-8"),
+ sqlQueryContext.getTimeout(),
+ sql.concat(" FORMAT ").concat(OutputMode.JSON.getValue().toUpperCase()).concat(" settings output_format_json_quote_64bit_integers=0"));
+ long elapsed = System.currentTimeMillis() - start;
+ if (elapsed > engineConfigSource.getHighLatencyThreshold()) {
+ log.warn("DB engine is: {}, queryId: {}, elapsed: {} ms, sql: {}", DBEngineType.CLICKHOUSE.getValue(), sqlQueryContext.getQueryId(), elapsed, sql);
+ }
+ return responseResult;
}
- private ClickHouseHttpQuery buildHttpQueryParameter() {
- ClickHouseHttpQuery clickHouseHttpQuery = new ClickHouseHttpQuery();
+ private String enrichContextAndBuildQueryParameter() {
StringBuilder queryParameterBuilder = new StringBuilder(1024);
- String dbName = StringUtil.isNotEmpty(param.getDbQuerySource().getTableNames())
- ? metadataService.getDBNameByTableName(param.getDbQuerySource().getTableNames().get(0)) : clickHouseHttpSource.getDbName();
+
+ String dbName = StringUtil.isNotEmpty(sqlQueryContext.getDbSelectStatement().getTableNames())
+ ? databaseService.getDBNameByTableName(sqlQueryContext.getDbSelectStatement().getTableNames().get(0)) : clickHouseHttpSource.getDbName();
+
if (dbName.equalsIgnoreCase(clickHouseHttpSource.getSystemDBName())) {
- param.setOption(QueryOptionEnum.LONG_TERM.getValue());
+ sqlQueryContext.setOption(QueryOption.LONG_TERM.getValue());
+ }
+
+ int timeout = (QueryOption.LONG_TERM.getValue().equalsIgnoreCase(sqlQueryContext.getOption())) ?
+ httpConfig.getCkLongTermAccountSocketTimeOut() : httpConfig.getCkRealTimeAccountSocketTimeOut();
+ sqlQueryContext.setTimeout(timeout);
+
+ if (StringUtil.isBlank(sqlQueryContext.getQueryId())) {
+ sqlQueryContext.setQueryId(DigestUtil.md5Hex(StringUtil.createUUID()));
}
- if (QueryOptionEnum.LONG_TERM.getValue().equalsIgnoreCase(param.getOption())) {
- clickHouseHttpQuery.setSocketTimeOut(httpConfig.getCkLongTermAccountSocketTimeOut());
+ //QueryContext set queryParameter
+ if (QueryOption.LONG_TERM.getValue().equalsIgnoreCase(sqlQueryContext.getOption())) {
queryParameterBuilder.append(getLongTermAccount()).append("&")
.append("database=").append(dbName);
} else {
- clickHouseHttpQuery.setSocketTimeOut(httpConfig.getCkRealTimeAccountSocketTimeOut());
queryParameterBuilder.append(getRealTimeAccount()).append("&")
.append("database=").append(dbName);
}
- queryParameterBuilder.append("&").append("query_id=")
- .append(StringUtil.isNotBlank(param.getQueryId()) ? param.getQueryId() : DigestUtil.md5Hex(StringUtil.createUUID()));
- clickHouseHttpQuery.setQueryParameter(queryParameterBuilder.toString());
- return clickHouseHttpQuery;
+ queryParameterBuilder.append("&").append("query_id=").append(sqlQueryContext.getQueryId());
+ return queryParameterBuilder.toString();
}
private String getRealTimeAccount() {
diff --git a/src/main/java/com/mesalab/qgw/dialect/Dialect.java b/src/main/java/com/mesalab/qgw/dialect/Dialect.java
index 21e6903b..5e0eb02b 100644
--- a/src/main/java/com/mesalab/qgw/dialect/Dialect.java
+++ b/src/main/java/com/mesalab/qgw/dialect/Dialect.java
@@ -16,7 +16,7 @@ public interface Dialect {
* 执行SQL语法检查
* @return
*/
- BaseResult executeSyntaxCheck();
+ BaseResult executeSyntaxValidation();
/**
* 执行查询操作
diff --git a/src/main/java/com/mesalab/qgw/dialect/DruidDialect.java b/src/main/java/com/mesalab/qgw/dialect/DruidDialect.java
index 9d260fd0..76ce1ed1 100644
--- a/src/main/java/com/mesalab/qgw/dialect/DruidDialect.java
+++ b/src/main/java/com/mesalab/qgw/dialect/DruidDialect.java
@@ -1,67 +1,92 @@
package com.mesalab.qgw.dialect;
-import cn.hutool.core.util.BooleanUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.core.util.URLUtil;
import cn.hutool.crypto.digest.DigestUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSON;
+import com.alibaba.fastjson2.JSONPath;
import com.google.common.base.Stopwatch;
+import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.jayway.jsonpath.JsonPath;
import com.jayway.jsonpath.JsonPathException;
-import com.mesalab.common.utils.sqlparser.DruidSampleSQLHelper;
+import com.mesalab.common.utils.sqlparser.*;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.entity.BaseResultGenerator;
import com.mesalab.common.entity.DataTypeMapping;
import com.mesalab.common.enums.*;
import com.mesalab.common.utils.*;
-import com.mesalab.common.utils.sqlparser.SQLFunctionUtil;
-import com.mesalab.common.utils.sqlparser.SQLHelper;
+import com.mesalab.qgw.constant.MetaConst;
import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
+import com.mesalab.common.exception.CommonErrorCode;
import com.mesalab.qgw.model.basic.*;
-import com.mesalab.qgw.model.basic.QueryProfile;
-import com.mesalab.qgw.service.impl.HttpClientService;
+import com.mesalab.qgw.model.basic.SQLQueryContext;
import com.geedgenetworks.utils.StringUtil;
+import com.mesalab.qgw.service.DatabaseService;
+import com.mesalab.qgw.service.impl.HttpClientServiceV2;
import lombok.Data;
+import lombok.SneakyThrows;
import net.sf.jsqlparser.JSQLParserException;
-import org.apache.commons.compress.utils.Lists;
import java.util.*;
import java.util.concurrent.TimeUnit;
+import java.util.regex.Pattern;
+
public class DruidDialect extends AbstractDataSourceDialect {
private static final Log log = LogFactory.get();
+ public static final Pattern pAggregateFunSpecificOfDruid = Pattern.compile("\\b(QUANTILE_HDR|HDR_HISTOGRAM|HLLD|APPROX_COUNT_DISTINCT_HLLD|PERCENTILES_HDR)\\s*\\(", Pattern.CASE_INSENSITIVE);
private DruidIoHttpSource druidIoHttpSource = (DruidIoHttpSource) SpringContextUtil.getBean("druidIoHttpSource");
- private HttpClientService httpClientService = (HttpClientService) SpringContextUtil.getBean("httpClientService");
+ private HttpClientServiceV2 httpClientService = (HttpClientServiceV2) SpringContextUtil.getBean("httpClientServiceV2");
private EngineConfigSource engineConfigSource = (EngineConfigSource) SpringContextUtil.getBean("engineConfigSource");
private HttpConfig httpConfig = (HttpConfig) SpringContextUtil.getBean("httpConfig");
+ private DatabaseService databaseService = (DatabaseService) SpringContextUtil.getBean("databaseService");
private int pageOffset;
private static final String CLAZZ_NAME_CALCITE_EXCEPTION = "com.zdjizhi.calcite.runtime.CalciteContextException: ";
private Stopwatch watch = Stopwatch.createUnstarted();
- public QueryProfile param;
+
public BaseResult baseResult;
- public DruidDialect(QueryProfile param) {
- this.param = param;
+ private final static Map<String, String> metaMap = Maps.newHashMap();
+
+ static {
+ metaMap.put("CHAR", DataTypeMapping.STRING);
+ metaMap.put("VARCHAR", DataTypeMapping.STRING);
+ metaMap.put("FLOAT", DataTypeMapping.FLOAT);
+ metaMap.put("DECIMAL", DataTypeMapping.DOUBLE);
+ metaMap.put("REAL", DataTypeMapping.DOUBLE);
+ metaMap.put("DOUBLE", DataTypeMapping.DOUBLE);
+ metaMap.put("BOOLEAN", DataTypeMapping.BOOLEAN);
+ metaMap.put("TINYINT", DataTypeMapping.INT);
+ metaMap.put("SMALLINT", DataTypeMapping.INT);
+ metaMap.put("INTEGER", DataTypeMapping.INT);
+ metaMap.put("BIGINT", DataTypeMapping.LONG);
+ metaMap.put("TIMESTAMP", DataTypeMapping.DATE);
+ metaMap.put("DATE", DataTypeMapping.DATE);
+
+ metaMap.put("ARRAY", null);
+ metaMap.put("OTHER", null);
+ }
+
+ public DruidDialect(SQLQueryContext queryContext) {
+ super(queryContext);
}
@Override
void init() {
- if (StringUtil.isNotEmpty(param.getDbQuerySource())) {
- param.getDbQuerySource().setExecSQL(param.getDbQuerySource().getSqlBody());
+ if (StringUtil.isNotEmpty(sqlQueryContext.getDbSelectStatement())) {
+ sqlQueryContext.getDbSelectStatement().setExecSQL(sqlQueryContext.getDbSelectStatement().getSqlBody());
} else {
- SQLQuerySource sqlQuerySource = new SQLQuerySource();
- sqlQuerySource.setExecSQL(param.getQuery());
- param.setDbQuerySource(sqlQuerySource);
+ SelectStatement sqlQuerySource = new SelectStatement();
+ sqlQuerySource.setExecSQL(sqlQueryContext.getOriginalSQL());
+ sqlQueryContext.setDbSelectStatement(sqlQuerySource);
}
baseResult = BaseResultGenerator.success();
}
-
@Override
public boolean supportsLimit() {
return true;
@@ -70,42 +95,38 @@ public class DruidDialect extends AbstractDataSourceDialect {
@Override
public String convertQuery(String sql) {
- return new Converter(sql).generateStandard().generateDateFunction().build();
+ return new Converter(sql).generateStandard().mergeFunctions().generateDateFunction().build();
}
@Override
- public String getBestQuery() {
+ public String generateRuleBaseOptimizedQuery() {
return new Optimizer().generateQueryLimit().generateSQL().build();
}
- @Override
- public String getOriginalQuery() {
- return param.getQuery();
- }
-
/**
* 获取语法检测sql
*
* @return
*/
+ @SneakyThrows
@Override
public String getSampleQuery() {
- String sampleSQL = param.getDbQuerySource().getSqlBody();
+ String sampleSQL = sqlQueryContext.getDbSelectStatement().getSqlBody();
try {
sampleSQL = convertQuery(convertSampleSql(sampleSQL));
} catch (JSQLParserException e) {
log.error("syntax-check sql error {}, execute original sql: {}, error is: {} ",
- sampleSQL, sampleSQL = param.getDbQuerySource().getSqlBody(), e.getMessage() == null ? e.getCause() : e.getMessage());
+ sampleSQL, sampleSQL = sqlQueryContext.getDbSelectStatement().getSqlBody(), e.getMessage() == null ? e.getCause() : e.getMessage());
}
return sampleSQL;
}
@Override
String getExplainQuery() {
- return "EXPLAIN PLAN FOR " + convertQuery(getBestQuery());
+ return "EXPLAIN PLAN FOR " + convertQuery(generateRuleBaseOptimizedQuery());
}
@Override
@@ -113,12 +134,12 @@ public class DruidDialect extends AbstractDataSourceDialect {
String url = druidIoHttpSource.getUrl();
url = url.substring(0, url.indexOf("/"));
String deleteUrl = URLUtil.normalize(url).concat("/druid/v2/").concat(queryId);
- Map<String, String> map = httpClientService.httpDelete(deleteUrl, 1000);
- if (Integer.parseInt(map.get("status")) == ResultStatusEnum.SUCCESS.getCode()
- || Integer.parseInt(map.get("status")) == ResultStatusEnum.ACCEPTED.getCode()) {
- return BaseResultGenerator.generate(ResultStatusEnum.ACCEPTED.getCode(), null, "ok", null, null, null, null);
+ HttpResponseResult httpResponseResult = httpClientService.delete(deleteUrl);
+ if (httpResponseResult.getStatusCode() == HttpStatusCodeEnum.SUCCESS.getCode()
+ || httpResponseResult.getStatusCode() == HttpStatusCodeEnum.ACCEPTED.getCode()) {
+ return BaseResultGenerator.generate(HttpStatusCodeEnum.ACCEPTED.getCode(), null, true, "ok", null, null, null, null);
} else {
- return BaseResultGenerator.failure(Integer.parseInt(map.get("status")), map.get("result"));
+ return BaseResultGenerator.failure(httpResponseResult.getStatusCode(), httpResponseResult.getErrorMessage());
}
}
@@ -133,20 +154,37 @@ public class DruidDialect extends AbstractDataSourceDialect {
* @param sql SQL语句
* @return
*/
- private Map<String, String> executeHttpPost(String sql) {
+ private HttpResponseResult executeHttpPost(String sql) {
String queryURL = URLUtil.normalize(druidIoHttpSource.getUrl());
- DruidQueryParam druidQueryParam = getDruidQueryParam(sql);
- log.info("DB engine is :{},execute query is: {}", DBTypeEnum.DRUID.getValue(), JSON.toJSONString(druidQueryParam));
- int socketTimeOut = httpConfig.getDruidSocketTimeOut();
- return httpClientService.httpPost(queryURL, JSON.toJSONString(druidQueryParam), socketTimeOut);
+ DruidQueryParam druidQueryParam = enrichContextAndBuildQueryParameter(sql);
+ log.info("Query ID: {}, SQL: {}", sqlQueryContext.getQueryId(), sql);
+ if (log.isDebugEnabled()) {
+ log.debug("DB engine is: {}, : {}", DBEngineType.DRUID.getValue(), JSON.toJSONString(druidQueryParam)
+ + ". body sql is:" + sql);
+ }
+ long start = System.currentTimeMillis();
+ HttpResponseResult responseResult = httpClientService.post(queryURL, sqlQueryContext.getTimeout(), JSON.toJSONString(druidQueryParam));
+ long elapsed = System.currentTimeMillis() - start;
+ if (elapsed > engineConfigSource.getHighLatencyThreshold()) {
+ log.warn("DB engine is: {}, queryId: {}, elapsed: {} ms, sql: {}", DBEngineType.DRUID.getValue(), sqlQueryContext.getQueryId(), elapsed, sql);
+ }
+ return responseResult;
}
- private DruidQueryParam getDruidQueryParam(String sql) {
+ private DruidQueryParam enrichContextAndBuildQueryParameter(String sql) {
+ sqlQueryContext.setTimeout(httpConfig.getDruidSocketTimeOut());
+ if (StringUtil.isBlank(sqlQueryContext.getQueryId())) {
+ sqlQueryContext.setQueryId(DigestUtil.md5Hex(StringUtil.createUUID()));
+ }
DruidQueryParam druidQueryParam = new DruidQueryParam();
druidQueryParam.setQuery(sql);
druidQueryParam.getContext().put("skipEmptyBuckets", druidIoHttpSource.getSkipEmptyBuckets());
- druidQueryParam.getContext().put("sqlQueryId", StringUtil.isNotBlank(param.getQueryId()) ? param.getQueryId() : DigestUtil.md5Hex(StringUtil.createUUID()));
+ druidQueryParam.getContext().put("sqlQueryId", sqlQueryContext.getQueryId());
+ druidQueryParam.getContext().put("maxSubqueryRows", druidIoHttpSource.getMaxSubqueryRows());
druidQueryParam.setResultFormat("object");
+ druidQueryParam.setHeader(true);
+ druidQueryParam.setTypesHeader(true);
+ druidQueryParam.setSqlTypesHeader(true);
return druidQueryParam;
}
@@ -154,17 +192,15 @@ public class DruidDialect extends AbstractDataSourceDialect {
public BaseResult generateBaseResult(String sql, Optional<String> message) {
watch.start();
- Map<String, String> results = executeHttpPost(sql);
+ HttpResponseResult responseResult = executeHttpPost(sql);
- int status = Integer.parseInt(results.get("status"));
- if (status == ResultStatusEnum.SUCCESS.getCode()) {
- baseResult = BaseResultGenerator.generate(Integer.valueOf(results.get("status")), ResultCodeEnum.SUCCESS.getCode(), message.get(),
- (List<Object>) JSON.parseObject(results.get("result"), Object.class), null, null, QueryFormatEnum.JSON.getValue());
+ if (responseResult.getStatusCode() == HttpStatusCodeEnum.SUCCESS.getCode()) {
+ baseResult = BaseResultGenerator.generate(responseResult.getStatusCode(), ResultCodeEnum.SUCCESS.getCode(), true, message.get(),
+ (List<Object>) JSON.parseObject(responseResult.getResponseBody(), Object.class), null, null, OutputMode.JSON.getValue());
} else {
- baseResult = BaseResultGenerator.generate(Integer.parseInt(results.get("status")),
-
- ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), message.get()),
- StringUtil.isNotEmpty(results.get("message")) ? results.get("message") : results.get("result"), null, null, param.getFormat());
+ baseResult = BaseResultGenerator.generate(responseResult.getStatusCode(),
+ CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), false, String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), message.get()),
+ responseResult.getErrorMessage(), null, null, sqlQueryContext.getFormat());
}
return build();
}
@@ -179,6 +215,8 @@ public class DruidDialect extends AbstractDataSourceDialect {
baseResult.setMessage("ok");
}
List<Object> allResults = (List<Object>) dataObject;
+ Object metaObj = allResults.remove(0);
+ baseResult.setMeta(ColumnCategoryHelper.expandMetaCategory(unifyMeta(JSON.parseObject(JSON.toJSONString(metaObj), Map.class)), sqlQueryContext.getDbSelectStatement(), sqlQueryContext.getDbEngine()));
if (pageOffset > 0 && StringUtil.isNotEmpty(allResults)) {
if (pageOffset >= allResults.size()) {
allResults = new ArrayList<>();
@@ -190,15 +228,14 @@ public class DruidDialect extends AbstractDataSourceDialect {
statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS));
statistics.put("rows_read", allResults.size());
baseResult.setData(allResults);
- baseResult.setMeta(unifyMeta(allResults));
baseResult.setStatistics(statistics);
} else {
- if (baseResult.getStatus() >= ResultStatusEnum.BAD_REQUEST.getCode() && baseResult.getStatus() < ResultStatusEnum.SERVER_ERROR.getCode()) {
- throw new QGWBusinessException(baseResult.getStatus(), QGWErrorCode.SQL_EXECUTION_BAD_REQUEST_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_EXECUTION_BAD_REQUEST_EXCEPTION.getMessage(), simplyErrorMessage(dataObject)));
+ if (baseResult.getStatus() >= HttpStatusCodeEnum.BAD_REQUEST.getCode() && baseResult.getStatus() < HttpStatusCodeEnum.SERVER_ERROR.getCode()) {
+ throw new QGWBusinessException(baseResult.getStatus(), CommonErrorCode.BAD_REQUEST_SQL_EXECUTION_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_EXECUTION_EXCEPTION.getMessage(), simplyErrorMessage(dataObject)));
}
- throw new QGWBusinessException(baseResult.getStatus(), QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getMessage(), simplyErrorMessage(dataObject)));
+ throw new QGWBusinessException(baseResult.getStatus(), CommonErrorCode.SQL_EXECUTION_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.SQL_EXECUTION_EXCEPTION.getMessage(), simplyErrorMessage(dataObject)));
}
return baseResult;
}
@@ -216,45 +253,87 @@ public class DruidDialect extends AbstractDataSourceDialect {
return StrUtil.removeAllLineBreaks(message).trim();
}
+ @SneakyThrows
+ @Override
+ List<Map<String, String>> buildExampleMeta() {
+ if (baseResult.getMeta() == null) {
+ return Lists.newArrayList();
+ }
+ List<Map<String, String>> metaList = (List<Map<String, String>>) baseResult.getMeta();
+ Map<String, SelectItemHelper.AliasObject> selectItem = SelectItemHelper.getSelectItem(sqlQueryContext.getDbSelectStatement().getSqlBody());
+ for (Map<String, String> meta : metaList) {
+ toPerfectMeta(selectItem, meta);
+ }
+ return metaList;
+ }
+
+ @Override
+ public List<Object> buildExampleData() {
+ if (baseResult.getMeta() == null) {
+ return Lists.newArrayList();
+ }
+ List<Map<String, String>> metaList = (List<Map<String, String>>) baseResult.getMeta();
+ Map<String, List<Object>> schemaDataDict = databaseService.getSchemaDataDict(sqlQueryContext.getDbSelectStatement().getTableNames().get(0));
+ Long rowCount = SQLHelper.getRowCount(sqlQueryContext.getDbSelectStatement().getSqlBody());
+ String queryType = QueryTypeHelper.determineQueryType(sqlQueryContext.getDbSelectStatement());
+ Object param = QueryTypeHelper.determineParam(sqlQueryContext.getDbSelectStatement(), queryType);
+ return ExampleDataHelper.buildExampleData(queryType, metaList, rowCount, schemaDataDict, param);
+ }
/**
- * 做时间范围条件过滤: 去除原有最内层条件, 重新添加时间范围
+ * 将目标SQL转换为Dry Run SQ。使其不扫描数据,仅做执行
*
* @param sql
* @return
* @throws JSQLParserException
*/
private String convertSampleSql(String sql) throws JSQLParserException {
- boolean isSampled = BooleanUtil.isTrue(param.getSampled());
- String filterExpr = String.format("%s >= CURRENT_TIMESTAMP - INTERVAL %s AND %s < CURRENT_TIMESTAMP",
- param.getDbQuerySource().getPartitionKey(), isSampled ? "'12' HOUR " : "'1' SECOND", param.getDbQuerySource().getPartitionKey());
- return new DruidSampleSQLHelper(sql, filterExpr, isSampled).build();
+ return new SampleSQLHelper(sql, " 1 != 1 ", false, null).build();
}
- private List<Map<String, String>> unifyMeta(List<Object> list) {
- List<Map<String, String>> metaList = Lists.newArrayList();
- if (StringUtil.isEmpty(list)) {
- return metaList;
+ private List<Map<String, String>> unifyMeta(Map<String, Map<String, String>> metaMap) {
+ List<Map<String, String>> result = new ArrayList<>();
+ if (metaMap == null || metaMap.isEmpty()) {
+ return result;
}
- Map<String, Object> data = JsonPath.read(list.get(0), "$");
- for (String key : data.keySet()) {
- Map<String, String> temp = Maps.newHashMap();
- temp.put("name", key);
- Object value = data.get(key);
- if (value instanceof Integer) {
- temp.put("type", DataTypeMapping.INT);
- } else if (value instanceof Long) {
- temp.put("type", DataTypeMapping.LONG);
- } else if (value instanceof Float) {
- temp.put("type", DataTypeMapping.FLOAT);
- } else if (value instanceof Double) {
- temp.put("type", DataTypeMapping.DOUBLE);
- } else {
- temp.put("type", DataTypeMapping.STRING);
+ for (Map.Entry<String, Map<String, String>> entry : metaMap.entrySet()) {
+ String metaName = entry.getKey();
+ Map<String, String> typeMap = entry.getValue();
+ Map<String, String> meta = Maps.newHashMap();
+ meta.put(MetaConst.META_NAME, metaName);
+ String type = DruidDialect.metaMap.get(typeMap.get("sqlType"));
+ meta.put(MetaConst.META_TYPE, type == null ? DataTypeMapping.STRING : type);
+ result.add(meta);
+ }
+ return result;
+ }
+
+ private void toPerfectMeta(Map<String, SelectItemHelper.AliasObject> aliasObjects, Map<String, String> meta) {
+ if (aliasObjects == null) {
+ return;
+ }
+ String name = meta.get(MetaConst.META_NAME);
+ SelectItemHelper.AliasObject aliasObject = aliasObjects.get(name);
+ if (aliasObject == null) {
+ return;
+ }
+ if (aliasObject instanceof SelectItemHelper.AliasColumn) {
+ SelectItemHelper.AliasColumn aliasColumn = (SelectItemHelper.AliasColumn) aliasObject;
+ String fieldName = aliasColumn.getFieldName();
+ meta.put(MetaConst.META_FIELD_NAME, fieldName);
+ Map schemaInfo = databaseService.getSchemaInfo(MetadataType.FIELDS.getValue(), sqlQueryContext.getDbSelectStatement().getTableNames().get(0), false);
+ Object dateType = JSONPath.extract(JSON.toJSONString(schemaInfo), "$.fields[?(@.name == \"" + fieldName + "\")].doc.constraints.type");
+ if (dateType instanceof List && ((List<?>) dateType).size() > 0) {
+ dateType = ((List<?>) dateType).get(0);
+ meta.put(MetaConst.META_DATA_TYPE, dateType.toString());
+ }
+ } else if (aliasObject instanceof SelectItemHelper.AliasFunExpr) {
+ SelectItemHelper.AliasFunExpr aliasDatetime = (SelectItemHelper.AliasFunExpr) aliasObject;
+ String dateType = aliasDatetime.getDateType();
+ if (dateType != null) {
+ meta.put(MetaConst.META_DATA_TYPE, dateType);
}
- metaList.add(temp);
}
- return metaList;
}
class Converter {
@@ -271,7 +350,7 @@ public class DruidDialect extends AbstractDataSourceDialect {
* @return
*/
public Converter generateStandard() {
- SQLQuerySource dbQuerySource = param.getDbQuerySource();
+ SelectStatement dbQuerySource = sqlQueryContext.getDbSelectStatement();
if (StringUtil.isNotBlank(dbQuerySource.getLimit()) &&
dbQuerySource.getLimit().split(",").length >= 2) {
@@ -284,9 +363,14 @@ public class DruidDialect extends AbstractDataSourceDialect {
return this;
}
+ @SneakyThrows
+ public Converter mergeFunctions() {
+ sql = FunctionsMergeHelper.build(sql);
+ return this;
+ }
public Converter generateDateFunction() {
- sql = SQLFunctionUtil.generateDateFunction(sql, DBTypeEnum.DRUID.getValue());
+ sql = SQLFunctionUtil.generateDateFunction(sql, DBEngineType.DRUID.getValue());
return this;
}
@@ -307,19 +391,19 @@ public class DruidDialect extends AbstractDataSourceDialect {
* @return
*/
public Optimizer generateQueryLimit() {
- if (StringUtil.isEmpty(param.getEngineQuerySource())) {
- setDefaultResultRows(param.getDbQuerySource(), engineConfigSource.getDefaultResultNum());
- setMaxCacheResultRows(param.getDbQuerySource(), engineConfigSource.getMaxCacheNum());
+ if (StringUtil.isEmpty(sqlQueryContext.getFederationSelectStatement())) {
+ setDefaultResultRows(sqlQueryContext.getDbSelectStatement(), engineConfigSource.getDefaultResultNum());
+ setMaxCacheResultRows(sqlQueryContext.getDbSelectStatement(), engineConfigSource.getMaxCacheNum());
} else {
- setDefaultResultRows(param.getEngineQuerySource(), engineConfigSource.getDefaultResultNum());
- setMaxCacheResultRows(param.getDbQuerySource(), engineConfigSource.getMaxCacheNum());
- setMaxCacheResultRows(param.getEngineQuerySource(), engineConfigSource.getMaxCacheNum());
+ setDefaultResultRows(sqlQueryContext.getFederationSelectStatement(), engineConfigSource.getDefaultResultNum());
+ setMaxCacheResultRows(sqlQueryContext.getDbSelectStatement(), engineConfigSource.getMaxCacheNum());
+ setMaxCacheResultRows(sqlQueryContext.getFederationSelectStatement(), engineConfigSource.getMaxCacheNum());
}
return this;
}
- private void setDefaultResultRows(SQLQuerySource sqlQuerySource, int defaultResultRows) {
+ private void setDefaultResultRows(SelectStatement sqlQuerySource, int defaultResultRows) {
if (StringUtil.isBlank(sqlQuerySource.getLimit())) {
sqlQuerySource.setSqlBody(sqlQuerySource.getSqlBody() + " limit " + defaultResultRows);
sqlQuerySource.setLimit(String.valueOf(defaultResultRows));
@@ -333,7 +417,7 @@ public class DruidDialect extends AbstractDataSourceDialect {
* @param sqlQuerySource
* @return
*/
- private void setMaxCacheResultRows(SQLQuerySource sqlQuerySource, int maxCacheResultRows) {
+ private void setMaxCacheResultRows(SelectStatement sqlQuerySource, int maxCacheResultRows) {
if (!sqlQuerySource.isEnableLimit()) {
return;
}
@@ -358,7 +442,7 @@ public class DruidDialect extends AbstractDataSourceDialect {
}
public Optimizer generateSQL() {
- SQLQuerySource sqlQuerySource = param.getDbQuerySource();
+ SelectStatement sqlQuerySource = sqlQueryContext.getDbSelectStatement();
sql = sqlQuerySource.getSqlBody();
return this;
}
@@ -374,8 +458,11 @@ public class DruidDialect extends AbstractDataSourceDialect {
@Data
class DruidQueryParam {
private String query;
- private Map<String, String> context = Maps.newHashMap();
+ private Map<String, Object> context = Maps.newHashMap();
private String resultFormat;
+ private boolean header;
+ private boolean typesHeader;
+ private boolean sqlTypesHeader;
}
diff --git a/src/main/java/com/mesalab/qgw/dialect/FederationDialect.java b/src/main/java/com/mesalab/qgw/dialect/FederationDialect.java
index e58fbd91..81c877f6 100644
--- a/src/main/java/com/mesalab/qgw/dialect/FederationDialect.java
+++ b/src/main/java/com/mesalab/qgw/dialect/FederationDialect.java
@@ -8,41 +8,41 @@ import com.google.common.base.Stopwatch;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.jayway.jsonpath.JsonPath;
-import com.mesalab.calcite.CalciteMemoryUtils;
-import com.mesalab.common.enums.DBTypeEnum;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.qgw.constant.QGWMessageConst;
+import com.mesalab.calcite.CalciteConnectionProcessor;
+import com.mesalab.common.enums.DBEngineType;
+import com.mesalab.common.utils.SpringContextUtil;
+import com.mesalab.common.utils.sqlparser.*;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.entity.DataTypeMapping;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.common.enums.QueryFormatEnum;
-import com.mesalab.common.utils.sqlparser.SQLHelper;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.enums.OutputMode;
+import com.mesalab.qgw.constant.MetaConst;
import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
-import com.mesalab.qgw.model.basic.QueryProfile;
-import com.mesalab.qgw.model.basic.SQLQuerySource;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.EngineConfigSource;
+import com.mesalab.qgw.model.basic.SQLQueryContext;
+import com.mesalab.qgw.model.basic.SelectStatement;
import com.mesalab.qgw.model.basic.udf.UDF;
import com.geedgenetworks.utils.StringUtil;
import lombok.extern.slf4j.Slf4j;
import net.sf.jsqlparser.JSQLParserException;
-import net.sf.jsqlparser.expression.Expression;
-import net.sf.jsqlparser.parser.CCJSqlParserUtil;
-import net.sf.jsqlparser.statement.Statement;
-import net.sf.jsqlparser.statement.select.*;
import javax.annotation.Nullable;
import java.util.*;
import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
/**
* 联邦查询-基于Calcite支持多源联合查询
*/
@Slf4j
-public class FederationDialect extends AbstractEngineDialect{
+public class FederationDialect extends AbstractEngineDialect {
private Stopwatch watch = Stopwatch.createUnstarted();
+ private final EngineConfigSource engineConfigSource = (EngineConfigSource) SpringContextUtil.getBean("engineConfigSource");
+
private static final Map<String, String> metaMap = Maps.newHashMap();
- public QueryProfile param;
+ public SQLQueryContext sqlQueryContext;
public BaseResult baseResult;
static {
@@ -51,45 +51,45 @@ public class FederationDialect extends AbstractEngineDialect{
metaMap.put("VARCHAR", DataTypeMapping.STRING);
}
- public FederationDialect(QueryProfile param, BaseResult result) {
- this.param = param;
+ public FederationDialect(SQLQueryContext sqlQueryContext, BaseResult result) {
+ this.sqlQueryContext = sqlQueryContext;
this.baseResult = result;
}
@Override
public void init() {
- if (StringUtil.isNotEmpty(param.getEngineQuerySource())) {
- param.getEngineQuerySource().setExecSQL(param.getEngineQuerySource().getSqlBody());
+ if (StringUtil.isNotEmpty(sqlQueryContext.getFederationSelectStatement())) {
+ sqlQueryContext.getFederationSelectStatement().setExecSQL(sqlQueryContext.getFederationSelectStatement().getSqlBody());
} else {
- SQLQuerySource sqlQuerySource = new SQLQuerySource();
- sqlQuerySource.setExecSQL(null);
- param.setEngineQuerySource(sqlQuerySource);
+ SelectStatement fdSelectStatement = new SelectStatement();
+ fdSelectStatement.setExecSQL(null);
+ sqlQueryContext.setFederationSelectStatement(fdSelectStatement);
}
}
@Override
public String getFederateQuery() {
- return param.getEngineQuerySource().getExecSQL();
+ return sqlQueryContext.getFederationSelectStatement().getExecSQL();
}
@Override
public String getSampleQuery() {
- String sql = param.getEngineQuerySource().getExecSQL();
+ String sql = sqlQueryContext.getFederationSelectStatement().getExecSQL();
if (!isExecute()) {
return sql;
}
try {
- sql = String.valueOf(convertSampleSql(sql));
+ sql = new SampleSQLHelper(sql, null, false, engineConfigSource.getDefaultResultNum()).build();
} catch (JSQLParserException e) {
log.error("syntax-check sql error {}, execute original sql: {}, error is: {} ",
- sql, sql = param.getDbQuerySource().getSqlBody(), e.getMessage() == null ? e.getCause() : e.getMessage());
+ sql, sql = sqlQueryContext.getDbSelectStatement().getSqlBody(), e.getMessage() == null ? e.getCause() : e.getMessage());
}
return sql;
}
@Override
public String getSyntaxCheckQuery() {
- String sql = param.getEngineQuerySource().getExecSQL();
+ String sql = sqlQueryContext.getFederationSelectStatement().getExecSQL();
if (!isExecute()) {
return sql;
}
@@ -107,13 +107,13 @@ public class FederationDialect extends AbstractEngineDialect{
}
@Override
- final public BaseResult getProcesses(String queryId){
+ final public BaseResult getProcesses(String queryId) {
return null;
}
@Override
public boolean isExecute() {
- return StringUtil.isNotEmpty(param.getEngineQuerySource().getExecSQL());
+ return StringUtil.isNotEmpty(sqlQueryContext.getFederationSelectStatement().getExecSQL());
}
@@ -122,63 +122,73 @@ public class FederationDialect extends AbstractEngineDialect{
try {
watch.start();
if (isExecute()) {
- log.info("DB engine is :{},execute query is: {}", DBTypeEnum.ENGINE.getValue(), JSON.toJSONString(sql));
- SQLQuerySource sqlQuerySource = param.getEngineQuerySource();
- Map resultMap = CalciteMemoryUtils.executeMemoryQuery(sqlQuerySource.getTableNames().get(0),
+ log.info("DB engine is :{}, execute query is: {}", DBEngineType.QGW.getValue(), JSON.toJSONString(sql));
+ SelectStatement fdSelectStatement = sqlQueryContext.getFederationSelectStatement();
+ Map resultMap = CalciteConnectionProcessor.getInstance().executeMemoryQuery(fdSelectStatement.getTableNames().get(0),
(List<Map<String, String>>) baseResult.getMeta(),
(List<Map<String, Object>>) baseResult.getData(), sql);
baseResult.setData(resultMap.get("data"));
baseResult.setMeta(unifyMeta(resultMap.get("meta")));
}
- log.debug("Calcite execute time :" + watch.elapsed().toMillis() / 1000 + "s");
-
+ long elapsed = watch.elapsed(TimeUnit.MILLISECONDS);
+ if (elapsed > engineConfigSource.getHighLatencyThreshold()) {
+ log.warn("DB engine is: {}, elapsed: {} ms, sql: {}", DBEngineType.QGW.getValue(), elapsed, sql);
+ }
} catch (RuntimeException ex) {
- log.error("Engine execute memory-query error:{}", ex);
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),
- QGWErrorCode.SQL_QUERY_FEDERATION_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_QUERY_FEDERATION_EXCEPTION.getMessage(),ex.getMessage()));
+ log.error("Federation query error:{}", ex.getMessage());
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.SQL_FEDERATION_QUERY_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.SQL_FEDERATION_QUERY_EXCEPTION.getMessage(), ex.getMessage()));
}
return build();
}
-
private BaseResult build() {
-
- long executeTime = watch.elapsed(TimeUnit.MILLISECONDS);
Map<String, Object> statisticMap = (Map<String, Object>) baseResult.getStatistics();
- statisticMap.put("elapsed", Long.parseLong(String.valueOf(statisticMap.get("elapsed"))) + executeTime);
if (StringUtil.isEmpty(baseResult.getData())) {
statisticMap.put("result_bytes", 0);
statisticMap.put("result_rows", 0);
} else {
statisticMap.put("result_bytes", JSON.toJSONBytes(baseResult.getData(), JSONWriter.Feature.LargeObject).length);
- statisticMap.put("result_rows", ((List) baseResult.getData()).size());
+ statisticMap.put("result_rows", ((List<?>) baseResult.getData()).size());
}
- extendMetaInfo(baseResult.getMeta());
- if (param.getFormat().equalsIgnoreCase(QueryFormatEnum.CSV.getValue())) {
- convertJsonToCSV();
+ baseResult.setMeta(ColumnCategoryHelper.expandMetaCategory(baseResult.getMeta(), sqlQueryContext.getFederationSelectStatement(), null));
+ if (baseResult.getMeta() != null
+ && baseResult.getData() != null
+ && ((List<Map<String, Object>>) baseResult.getMeta()).stream().anyMatch(x -> DataTypeMapping.LONG.equalsIgnoreCase(String.valueOf(x.get("type"))))) {
+
+ List<Map<String, Object>> data = (List<Map<String, Object>>) baseResult.getData();
+ for (int i = 0; i < data.size(); i++) {
+ Map<String, Object> map = data.get(i);
+ String jsonString = JSON.toJSONString(map, JSONWriter.Feature.WriteNulls, JSONWriter.Feature.BrowserCompatible);
+ Map parsedMap = JSON.parseObject(jsonString, LinkedHashMap.class);
+ map.clear();
+ map.putAll(parsedMap);
+ }
+ baseResult.setData(data);
}
- baseResult.setFormatType(param.getFormat());
- log.debug("Execute build Time : {}", watch.elapsed(TimeUnit.MILLISECONDS) / 1000.0);
- return baseResult;
- }
+ if (baseResult.getMeta() != null) {
+ List<Map<String, Object>> metaList = (List<Map<String, Object>>) baseResult.getMeta();
+ metaList.forEach(x -> {
+ x.remove(MetaConst.META_CATEGORY);
+ x.remove(MetaConst.META_DATA_TYPE);
+ x.remove(MetaConst.META_FIELD_NAME);
+ });
- private void extendMetaInfo(Object meta) {
- if (StringUtil.isEmpty(meta)) {
- return;
}
- List<Map<String, Object>> data = JsonPath.read(meta, "$");
- SQLQuerySource querySource = StringUtil.isEmpty(param.getEngineQuerySource()) || StringUtil.isEmpty(param.getEngineQuerySource().getExecSQL()) ? param.getDbQuerySource() : param.getEngineQuerySource();
- Map<String, String> groupDimension = querySource.getGroupDimension();
- for (Map<String, Object> map : data) {
- String field = String.valueOf(map.get("name"));
- if (StringUtil.isNotEmpty(groupDimension) && ((groupDimension.containsKey(field) || groupDimension.containsValue(field)))) {
- map.put("category", "Dimension");
- } else {
- map.put("category", "Metric");
- }
+ if (sqlQueryContext.getFormat().equalsIgnoreCase(OutputMode.CSV.getValue())) {
+ convertJsonToCSV();
+ } else if (sqlQueryContext.getFormat().equalsIgnoreCase(OutputMode.JSON.getValue())) {
+ baseResult.setMeta(null);
+ } else if (sqlQueryContext.getFormat().equalsIgnoreCase(OutputMode.JSON_COMPACT.getValue())) {
+ convertToJsonCompact();
}
+ long executeTime = watch.elapsed(TimeUnit.MILLISECONDS);
+ statisticMap.put("elapsed", Long.parseLong(String.valueOf(statisticMap.get("elapsed"))) + executeTime);
+ baseResult.setOutputMode(sqlQueryContext.getFormat());
+ log.debug("Federation execute and build result time : {} ms", executeTime);
+ return baseResult;
}
private List<Map<String, String>> unifyMeta(Object meta) {
@@ -196,12 +206,11 @@ public class FederationDialect extends AbstractEngineDialect{
}
@Override
- public void executeUDF() {
- SQLQuerySource sqlQuerySource = param.getDbQuerySource();
+ public void executeUDF(boolean isDryRun) {
+ SelectStatement sqlQuerySource = sqlQueryContext.getDbSelectStatement();
Set<UDF> udfSet = sqlQuerySource.getUdfSet();
for (UDF udf : udfSet) {
- List<Map<String, String>> targetResult = (List<Map<String, String>>) udf.execute(sqlQuerySource,
- baseResult);
+ List<Map<String, String>> targetResult = (List<Map<String, String>>) udf.execute(sqlQuerySource, isDryRun, baseResult);
baseResult.setData(targetResult);
}
}
@@ -209,7 +218,7 @@ public class FederationDialect extends AbstractEngineDialect{
private void convertJsonToCSV() {
- List<String> results = Lists.transform( (List<Map<String, Object>>) baseResult.getData(), new Function<Map<String, Object>, String>() {
+ List<String> results = Lists.transform((List<Map<String, Object>>) baseResult.getData(), new Function<Map<String, Object>, String>() {
@Override
public String apply(@Nullable Map<String, Object> input) {
@@ -229,55 +238,12 @@ public class FederationDialect extends AbstractEngineDialect{
baseResult.setMeta(Joiner.on(",").useForNull("").join(metas));
}
- /**
- * Desc: where、having至无效:添加 OR 1=1
- *
- * @param sql
- * @return {@link SelectBody}
- * @created by wWei
- * @date 2022/4/27 5:18 下午
- */
- private SelectBody convertSampleSql(String sql) throws JSQLParserException {
- Statement parse = CCJSqlParserUtil.parse(sql);
- if (parse instanceof Select) {
- Select select = (Select) parse;
- SelectBody selectBody = select.getSelectBody();
- if (selectBody instanceof PlainSelect) {
- PlainSelect plainSelect = (PlainSelect) select.getSelectBody();
- FromItem fromItem = plainSelect.getFromItem();
- if (fromItem instanceof SubSelect) {
- SubSelect subSelect = (SubSelect) plainSelect.getFromItem();
- subSelect.setSelectBody(convertSampleSql(subSelect.getSelectBody().toString()));
- }
- if (StringUtil.isNotEmpty(plainSelect.getWhere())) {
- plainSelect.setWhere(generateDatasetWhereSql(plainSelect.getWhere().toString()));
- }
- if (StringUtil.isNotEmpty(plainSelect.getHaving())) {
- plainSelect.setHaving(generateDatasetWhereSql(plainSelect.getHaving().toString()));
- }
- return plainSelect;
- } else if (selectBody instanceof SetOperationList) {
- SetOperationList setOperationList = (SetOperationList) selectBody;
- List<SelectBody> selects = setOperationList.getSelects();
- for (int i = 0; i < selects.size(); i++) {
- selects.set(i, convertSampleSql(selects.get(i).toString()));
- }
- return setOperationList;
- } else {
- throw new QGWBusinessException(ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.SQL_PARSE_ONLY_SUPPORT_SELECT));
- }
- }
- throw new QGWBusinessException(ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.SQL_PARSE_ONLY_SUPPORT_SELECT));
- }
-
- private Expression generateDatasetWhereSql(String where) throws JSQLParserException {
- String sql = String.format("SELECT * FROM tableName WHERE %s %s ", where, "OR 1=1");
- Statement parse = CCJSqlParserUtil.parse(sql);
- Select select = (Select) parse;
- PlainSelect selectBody = (PlainSelect) select.getSelectBody();
- return selectBody.getWhere();
+ private void convertToJsonCompact() {
+ List<Map<String, Object>> dataList = (List<Map<String, Object>>) baseResult.getData();
+ List<Object> result = dataList.stream()
+ .map(Map::values)
+ .collect(Collectors.toList());
+ baseResult.setData(result);
}
}
diff --git a/src/main/java/com/mesalab/qgw/dialect/HbaseDialect.java b/src/main/java/com/mesalab/qgw/dialect/HbaseDialect.java
index f8898663..d40fc74c 100644
--- a/src/main/java/com/mesalab/qgw/dialect/HbaseDialect.java
+++ b/src/main/java/com/mesalab/qgw/dialect/HbaseDialect.java
@@ -7,20 +7,21 @@ import com.google.common.collect.Maps;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.entity.BaseResultGenerator;
import com.mesalab.common.entity.DataTypeMapping;
-import com.mesalab.common.enums.DBTypeEnum;
-import com.mesalab.common.enums.QueryFormatEnum;
+import com.mesalab.common.enums.DBEngineType;
+import com.mesalab.common.enums.OutputMode;
import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.common.utils.SpringContextUtil;
+import com.mesalab.common.utils.sqlparser.ColumnCategoryHelper;
import com.mesalab.common.utils.sqlparser.SQLVisitorUtil;
import com.mesalab.common.utils.sqlparser.SQLFunctionUtil;
import com.mesalab.qgw.constant.QGWMessageConst;
import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
-import com.mesalab.qgw.model.basic.QueryProfile;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.SQLQueryContext;
import com.mesalab.qgw.model.basic.EngineConfigSource;
import com.mesalab.qgw.model.basic.HBaseAPISource;
-import com.mesalab.qgw.model.basic.SQLQuerySource;
+import com.mesalab.qgw.model.basic.SelectStatement;
import com.geedgenetworks.utils.StringUtil;
import lombok.extern.slf4j.Slf4j;
import net.sf.jsqlparser.JSQLParserException;
@@ -40,7 +41,6 @@ import java.util.concurrent.TimeUnit;
@Slf4j
public class HbaseDialect extends AbstractDataSourceDialect {
- public QueryProfile param;
public BaseResult baseResult;
private Stopwatch watch = Stopwatch.createUnstarted();
private static final String URL_PREFIX = "jdbc:phoenix:";
@@ -49,6 +49,10 @@ public class HbaseDialect extends AbstractDataSourceDialect {
private final static Map<String, String> metaMap = Maps.newHashMap();
public static Properties PROPERTIES = new Properties();
+ public HbaseDialect(SQLQueryContext sqlQueryContext) {
+ super(sqlQueryContext);
+ }
+
static {
try {
Class.forName("org.apache.phoenix.jdbc.PhoenixDriver");
@@ -75,55 +79,47 @@ public class HbaseDialect extends AbstractDataSourceDialect {
metaMap.put("VARCHAR", DataTypeMapping.STRING);
}
- public HbaseDialect(QueryProfile param) {
- this.param = param;
- }
@Override
void init() {
- if (StringUtil.isNotEmpty(param.getDbQuerySource())) {
- param.getDbQuerySource().setExecSQL(param.getDbQuerySource().getSqlBody());
+ if (StringUtil.isNotEmpty(sqlQueryContext.getDbSelectStatement())) {
+ sqlQueryContext.getDbSelectStatement().setExecSQL(sqlQueryContext.getDbSelectStatement().getSqlBody());
} else {
- SQLQuerySource sqlQuerySource = new SQLQuerySource();
- sqlQuerySource.setExecSQL(param.getQuery());
- param.setDbQuerySource(sqlQuerySource);
+ SelectStatement dbSelectStatement = new SelectStatement();
+ dbSelectStatement.setExecSQL(sqlQueryContext.getOriginalSQL());
+ sqlQueryContext.setDbSelectStatement(dbSelectStatement);
}
baseResult = BaseResultGenerator.success();
}
@Override
String convertQuery(String sql) {
- sql = SQLFunctionUtil.generateDateFunction(sql, DBTypeEnum.HBASE.getValue());
+ sql = SQLFunctionUtil.generateDateFunction(sql, DBEngineType.HBASE.getValue());
try {
Select select = (Select) CCJSqlParserUtil.parse(sql);
select.getSelectBody().accept(SQLVisitorUtil.getVisitorOfEscapeMetadataWithDoubleQuote());
return select.toString();
} catch (JSQLParserException | RuntimeException e) {
log.error("SQL Syntax Error: Converter SQL Syntax Error, SQL is:{}, Error is:{}", sql, e);
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage()));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage()));
}
}
@Override
- String getBestQuery() {
+ String generateRuleBaseOptimizedQuery() {
return new Optimizer().generateQueryLimit().generateSQL().build();
}
@Override
- String getOriginalQuery() {
- return param.getQuery();
- }
-
- @Override
String getSampleQuery() {
- return convertQuery(param.getQuery());
+ return convertQuery(sqlQueryContext.getOriginalSQL());
}
@Override
String getExplainQuery() {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), QGWMessageConst.HBASE_DIALECT_UNSUPPORTED_EXPLAIN_SYNTAX));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), QGWMessageConst.HBASE_DIALECT_UNSUPPORTED_EXPLAIN_SYNTAX));
}
@Override
@@ -136,17 +132,28 @@ public class HbaseDialect extends AbstractDataSourceDialect {
}
@Override
+ List<Map<String, String>> buildExampleMeta() {
+ return baseResult.getMeta() != null ? (List<Map<String, String>>) baseResult.getMeta() : null;
+ }
+
+ @Override
+ List<Object> buildExampleData() {
+ return baseResult.getData() != null ? (List<Object>) baseResult.getData() : null;
+ }
+
+ @Override
public BaseResult generateBaseResult(String sql, Optional<String> message) {
watch.start();
Map<String, Object> results = executeQuery(sql);
int status = Integer.parseInt(String.valueOf(results.get("status")));
- if (status == ResultStatusEnum.SUCCESS.getCode()) {
- baseResult = BaseResultGenerator.generate(status, ResultCodeEnum.SUCCESS.getCode(), message.get(),
- results.get("data"), results.get("meta"), (Map<String, Object>) results.get("statistics"), QueryFormatEnum.JSON.getValue());
+ if (status == HttpStatusCodeEnum.SUCCESS.getCode()) {
+ baseResult = BaseResultGenerator.generate(status, ResultCodeEnum.SUCCESS.getCode(), true, message.get(),
+ results.get("data"), results.get("meta"), (Map<String, Object>) results.get("statistics"), OutputMode.JSON.getValue());
} else {
- baseResult = BaseResultGenerator.generate(status, QGWErrorCode.SQL_BUILDER_EXCEPTION.getCode(), String.valueOf(results.get("message")),
- results.get("data"), results.get("meta"), (Map<String, Object>) results.get("statistics"), param.getFormat());
+ baseResult = BaseResultGenerator.generate(status, CommonErrorCode.SQL_REWRITE_AND_TRANSFORMATION_EXCEPTION.getCode(), false, String.valueOf(results.get("message")),
+ results.get("data"), results.get("meta"), (Map<String, Object>) results.get("statistics"), sqlQueryContext.getFormat());
}
+ baseResult.setMeta(ColumnCategoryHelper.expandMetaCategory(baseResult.getMeta(), sqlQueryContext.getDbSelectStatement(), sqlQueryContext.getDbEngine()));
return baseResult;
}
@@ -155,7 +162,7 @@ public class HbaseDialect extends AbstractDataSourceDialect {
java.sql.Statement statement = null;
ResultSet resultSet = null;
try {
- log.info("{} engine execute query: {}", DBTypeEnum.HBASE.getValue(), sql);
+ log.info("{} engine execute query: {}", DBEngineType.HBASE.getValue(), sql);
PROPERTIES.setProperty("hbase.rpc.timeout", hBaseAPISource.getRpcTimeout());
PROPERTIES.setProperty("hbase.client.scanner.timeout.period", hBaseAPISource.getRpcTimeout());
PROPERTIES.setProperty("phoenix.query.timeoutMs", hBaseAPISource.getRpcTimeout());
@@ -165,9 +172,9 @@ public class HbaseDialect extends AbstractDataSourceDialect {
resultSet = statement.executeQuery(sql);
return buildResult(resultSet);
} catch (RuntimeException | SQLException e) {
- log.error("{} engine error: {}", DBTypeEnum.HBASE.getValue(), e);
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(),simplyErrorMessage(e.getMessage())));
+ log.error("{} engine error: {}", DBEngineType.HBASE.getValue(), e);
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), simplyErrorMessage(e.getMessage())));
} finally {
close(resultSet, statement, conn);
}
@@ -197,7 +204,7 @@ public class HbaseDialect extends AbstractDataSourceDialect {
statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS));
statistics.put("result_bytes", dataList.toString().getBytes().length);
statistics.put("result_rows", dataList.size());
- result.put("status", ResultStatusEnum.SUCCESS.getCode());
+ result.put("status", HttpStatusCodeEnum.SUCCESS.getCode());
result.put("statistics", statistics);
result.put("meta", metaDataList);
result.put("data", dataList);
@@ -211,12 +218,12 @@ public class HbaseDialect extends AbstractDataSourceDialect {
@Override
public BaseResult executeKillQuery(String queryId) {
- return BaseResultGenerator.failure(ResultCodeEnum.PARAMETER_ERROR.getCode(), String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),"not support"));
+ return BaseResultGenerator.failure(CommonErrorCode.PARAMETER_ERROR.getCode(), String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "not support"));
}
@Override
public BaseResult getProcesses(String queryId) {
- return BaseResultGenerator.failure(ResultCodeEnum.PARAMETER_ERROR.getCode(), String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),"not support"));
+ return BaseResultGenerator.failure(CommonErrorCode.PARAMETER_ERROR.getCode(), String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "not support"));
}
class Optimizer {
@@ -229,11 +236,11 @@ public class HbaseDialect extends AbstractDataSourceDialect {
*/
public Optimizer generateQueryLimit() {
- setDefaultResultRows(StringUtil.isEmpty(param.getEngineQuerySource()) ? param.getDbQuerySource() : param.getEngineQuerySource(), engineConfigSource.getDefaultResultNum());
+ setDefaultResultRows(StringUtil.isEmpty(sqlQueryContext.getFederationSelectStatement()) ? sqlQueryContext.getDbSelectStatement() : sqlQueryContext.getFederationSelectStatement(), engineConfigSource.getDefaultResultNum());
return this;
}
- private void setDefaultResultRows(SQLQuerySource sqlQuerySource, int defaultResultRows) {
+ private void setDefaultResultRows(SelectStatement sqlQuerySource, int defaultResultRows) {
if (StringUtil.isBlank(sqlQuerySource.getLimit())) {
sqlQuerySource.setSqlBody(sqlQuerySource.getSqlBody() + " limit " + defaultResultRows);
sqlQuerySource.setLimit(String.valueOf(defaultResultRows));
@@ -242,7 +249,7 @@ public class HbaseDialect extends AbstractDataSourceDialect {
}
public Optimizer generateSQL() {
- SQLQuerySource sqlQuerySource = param.getDbQuerySource();
+ SelectStatement sqlQuerySource = sqlQueryContext.getDbSelectStatement();
sql = sqlQuerySource.getSqlBody();
return this;
}
diff --git a/src/main/java/com/mesalab/qgw/exception/QGWBusinessException.java b/src/main/java/com/mesalab/qgw/exception/QGWBusinessException.java
index b4932405..bbec18d8 100644
--- a/src/main/java/com/mesalab/qgw/exception/QGWBusinessException.java
+++ b/src/main/java/com/mesalab/qgw/exception/QGWBusinessException.java
@@ -1,7 +1,7 @@
package com.mesalab.qgw.exception;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.exception.CommonErrorCode;
import lombok.*;
/**
@@ -20,12 +20,12 @@ public class QGWBusinessException extends RuntimeException {
/**
* 状态码
*/
- private int status = ResultStatusEnum.SERVER_ERROR.getCode();
+ private int status = HttpStatusCodeEnum.SERVER_ERROR.getCode();
/**
* 业务码
*/
- private String code = ResultCodeEnum.UNKNOWN_EXCEPTION.getCode();
+ private String code = CommonErrorCode.UNKNOWN_EXCEPTION.getCode();
/**
@@ -33,14 +33,14 @@ public class QGWBusinessException extends RuntimeException {
*/
private String message;
- public QGWBusinessException(int status, QGWErrorCode errorCode) {
+ public QGWBusinessException(int status, CommonErrorCode errorCode) {
super(errorCode.getMessage());
this.status = status;
this.code = errorCode.getCode();
this.message = errorCode.getMessage();
}
- public QGWBusinessException(QGWErrorCode errorCode) {
+ public QGWBusinessException(CommonErrorCode errorCode) {
super(errorCode.getMessage());
this.code = errorCode.getCode();
this.message = errorCode.getMessage();
diff --git a/src/main/java/com/mesalab/qgw/exception/QGWErrorCode.java b/src/main/java/com/mesalab/qgw/exception/QGWErrorCode.java
deleted file mode 100644
index 1024fa9a..00000000
--- a/src/main/java/com/mesalab/qgw/exception/QGWErrorCode.java
+++ /dev/null
@@ -1,37 +0,0 @@
-package com.mesalab.qgw.exception;
-
-import lombok.Getter;
-
-/**
- * @Author wxs
- * @Date 2022/11/3
- */
-@Getter
-public enum QGWErrorCode {
-
- /**
- * 1-3位:异常类型(HTTP协议状态码)
- * 3-4位:模块 01
- * 5-7:自然排序
- */
- // SQL解析错误
- SQL_SYNTAX_PARSE_EXCEPTION("40001300", "Syntax error in SQL statement: %s"),
- // 数据库引擎错误
- SQL_EXECUTION_BAD_REQUEST_EXCEPTION("40001301", "Invalid SQL statement error in database execution engine: %s"),
- //Schema与DB不一致
- SCHEMA_WITCH_DB_INCONSISTENTY("50001010", "Schema inconsistent with DB: %s"),
- // SQL构建错误
- SQL_BUILDER_EXCEPTION("50001100", "Error in SQL query builder and optimizer: %s"),
- // SQL在数据库查询中执行错误
- SQL_EXECUTION_SERVER_EXCEPTION("50001300", "Error in database execution engine: %s"),
- // SQL在联邦查询中执行错误
- SQL_QUERY_FEDERATION_EXCEPTION("50001500", "Error in query federation: %s"),
- ;
- private String code;
- private String message;
-
- QGWErrorCode(String code, String message) {
- this.code = code;
- this.message = message;
- }
-}
diff --git a/src/main/java/com/mesalab/qgw/filter/ControllerFilter.java b/src/main/java/com/mesalab/qgw/filter/ControllerFilter.java
index dc981837..4646b027 100644
--- a/src/main/java/com/mesalab/qgw/filter/ControllerFilter.java
+++ b/src/main/java/com/mesalab/qgw/filter/ControllerFilter.java
@@ -6,16 +6,12 @@ import javax.servlet.*;
import javax.servlet.annotation.WebFilter;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
@Component
@WebFilter(urlPatterns = "/*")
-public class ControllerFilter implements Filter {
-
- private static final Set<String> NOT_FILTER_PATHS = Collections.unmodifiableSet(new HashSet<>(
- Collections.singletonList("/knowledge_base/v1")));
+public class ControllerFilter implements Filter {
+ private static final String NOT_FILTER_PATHS_KB = "/v1/knowledge_base";
+ private static final String NOT_FILTER_PATHS_HOS = "/v1/hos";
@Override
public void init(FilterConfig filterConfig) throws ServletException {
@@ -26,7 +22,7 @@ public class ControllerFilter implements Filter {
public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain) throws IOException, ServletException {
HttpServletRequest httpServletRequest = ((HttpServletRequest) servletRequest);
String servletPath = httpServletRequest.getServletPath();
- if (NOT_FILTER_PATHS.contains(servletPath)) {
+ if (String.valueOf(servletPath).startsWith(NOT_FILTER_PATHS_HOS) || String.valueOf(servletPath).startsWith(NOT_FILTER_PATHS_KB)) {
filterChain.doFilter(servletRequest, servletResponse);
} else {
filterChain.doFilter(new RequestParamWrapper(httpServletRequest), servletResponse);
diff --git a/src/main/java/com/mesalab/qgw/filter/RequestParamWrapper.java b/src/main/java/com/mesalab/qgw/filter/RequestParamWrapper.java
index ea1c6cc6..82780646 100644
--- a/src/main/java/com/mesalab/qgw/filter/RequestParamWrapper.java
+++ b/src/main/java/com/mesalab/qgw/filter/RequestParamWrapper.java
@@ -3,7 +3,7 @@ package com.mesalab.qgw.filter;
import com.google.common.collect.Lists;
-import com.mesalab.common.enums.QueryParamEnum;
+import com.mesalab.common.enums.QueryParam;
import com.mesalab.common.utils.HttpHelper;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
@@ -34,12 +34,12 @@ public class RequestParamWrapper extends HttpServletRequestWrapper {
@Override
public String[] getParameterValues(String name) {
- if (QueryParamEnum.QUERY.getValue().equals(name)) {
+ if (QueryParam.QUERY.getValue().equals(name)) {
queryString = queryString.replaceAll("\\+", "%2B");
- List<NameValuePair> values = URLEncodedUtils.parse(queryString, Charset.forName("UTF-8"));
+ List<NameValuePair> values = URLEncodedUtils.parse(queryString, StandardCharsets.UTF_8);
List<String> valueList = Lists.newArrayList();
for (NameValuePair nameValuePair: values) {
- if (nameValuePair.getName().equals(QueryParamEnum.QUERY.getValue())) {
+ if (nameValuePair.getName().equals(QueryParam.QUERY.getValue())) {
valueList.add(nameValuePair.getValue());
}
}
diff --git a/src/main/java/com/mesalab/qgw/interceptor/GlobalExceptionHandler.java b/src/main/java/com/mesalab/qgw/interceptor/GlobalExceptionHandler.java
index e30f24fb..111e4b19 100644
--- a/src/main/java/com/mesalab/qgw/interceptor/GlobalExceptionHandler.java
+++ b/src/main/java/com/mesalab/qgw/interceptor/GlobalExceptionHandler.java
@@ -1,18 +1,22 @@
package com.mesalab.qgw.interceptor;
import cn.hutool.core.exceptions.ExceptionUtil;
+import cn.hutool.core.util.StrUtil;
import cn.hutool.crypto.digest.DigestUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.common.exception.BusinessException;;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.exception.BusinessException;
+import com.mesalab.common.utils.MDCUtil;
+import com.mesalab.common.utils.RandomUtil;
import com.mesalab.qgw.exception.QGWBusinessException;
import com.geedgenetworks.utils.StringUtil;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import org.springframework.web.context.request.async.AsyncRequestTimeoutException;
+
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@@ -23,6 +27,7 @@ public class GlobalExceptionHandler {
/**
* 捕获特定异常
+ *
* @param e
* @param request
*/
@@ -34,29 +39,28 @@ public class GlobalExceptionHandler {
@ExceptionHandler(Exception.class)
public BaseResult handleException(Exception e, HttpServletRequest request, HttpServletResponse response) {
- response.setStatus(ResultStatusEnum.BAD_REQUEST.getCode());
- String queryKey = DigestUtil.md5Hex(StringUtil.createUUID());
- log.error("queryKey:{}, message:{}, stackTrace:{}", queryKey, ExceptionUtil.getRootCauseMessage(e), getStackTrace(e));
+ response.setStatus(HttpStatusCodeEnum.BAD_REQUEST.getCode());
+ String queryKey = StrUtil.isEmpty(MDCUtil.getTraceId()) ? RandomUtil.getUUID() : MDCUtil.getTraceId();
+ log.error("requestId:{}, message:{}, stackTrace:{}", queryKey, ExceptionUtil.getRootCauseMessage(e), getStackTrace(e));
return BaseResultGenerator.failure(e.getMessage(), queryKey);
}
-
@ExceptionHandler({QGWBusinessException.class})
public BaseResult handleBusinessException(QGWBusinessException e, HttpServletRequest request, HttpServletResponse response) {
response.setStatus(e.getStatus());
- String queryKey = DigestUtil.md5Hex(StringUtil.createUUID());
+ String queryKey = StrUtil.isEmpty(MDCUtil.getTraceId()) ? RandomUtil.getUUID() : MDCUtil.getTraceId();
String message = (null != e.getMessage()) ? e.getMessage() : ExceptionUtil.getRootCauseMessage(e);
- log.error("queryKey:{}.message:{}.stackTrace:{}", queryKey, message, getStackTrace(e));
- return BaseResultGenerator.failure(e.getStatus(), e.getCode(), queryKey, message);
+ log.error("requestId:{}.message:{}.stackTrace:{}", queryKey, message, getStackTrace(e));
+ return BaseResultGenerator.failure(e.getStatus(), e.getCode(), queryKey, message);
}
@ExceptionHandler({BusinessException.class})
public BaseResult handleBusinessException(BusinessException e, HttpServletRequest request, HttpServletResponse response) {
response.setStatus(e.getStatus());
- String queryKey = DigestUtil.md5Hex(StringUtil.createUUID());
+ String queryKey = StrUtil.isEmpty(MDCUtil.getTraceId()) ? RandomUtil.getUUID() : MDCUtil.getTraceId();
String message = (null != e.getMessage()) ? e.getMessage() : ExceptionUtil.getRootCauseMessage(e);
- log.error("queryKey:{}.message:{}.stackTrace:{}", queryKey, message, getStackTrace(e));
+ log.error("requestId:{}.message:{}.stackTrace:{}", queryKey, message, getStackTrace(e));
return BaseResultGenerator.failure(e.getStatus(), e.getCode(), queryKey, message);
}
@@ -64,6 +68,4 @@ public class GlobalExceptionHandler {
return StringUtil.isNotEmpty(e.getStackTrace()) ? e.getStackTrace()[0].toString() : "";
}
-
-
- }
+}
diff --git a/src/main/java/com/mesalab/qgw/interceptor/QuerySubmitInterceptor.java b/src/main/java/com/mesalab/qgw/interceptor/QuerySubmitInterceptor.java
index 1f5dcfac..97a5575b 100644
--- a/src/main/java/com/mesalab/qgw/interceptor/QuerySubmitInterceptor.java
+++ b/src/main/java/com/mesalab/qgw/interceptor/QuerySubmitInterceptor.java
@@ -7,10 +7,12 @@ import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONWriter;
import com.google.common.cache.CacheStats;
import com.mesalab.common.utils.HttpHelper;
+import com.mesalab.common.utils.MDCUtil;
import com.mesalab.common.utils.QueryCacheUtils;
import com.mesalab.qgw.model.basic.CachedSubmitCheck;
import com.geedgenetworks.utils.StringUtil;
import lombok.Data;
+import org.slf4j.MDC;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.method.HandlerMethod;
@@ -24,9 +26,12 @@ public class QuerySubmitInterceptor extends HandlerInterceptorAdapter {
private static final Log log = LogFactory.get();
@Override
public boolean preHandle(HttpServletRequest request,
- HttpServletResponse response, Object handler) throws Exception
- {
-
+ HttpServletResponse response, Object handler) throws Exception {
+ String traceId = request.getHeader("Request-Id");
+ if (traceId == null || traceId.isEmpty()) {
+ traceId = MDCUtil.generateTraceId();
+ }
+ MDC.put(MDCUtil.TRACE_ID, traceId);
if (handler instanceof HandlerMethod) {
HandlerMethod handlerMethod = (HandlerMethod) handler;
CachedSubmitCheck submitCheck = handlerMethod.getMethodAnnotation(CachedSubmitCheck.class);
diff --git a/src/main/java/com/mesalab/qgw/listener/KnowledgeListener.java b/src/main/java/com/mesalab/qgw/listener/KnowledgeListener.java
deleted file mode 100644
index af68b7da..00000000
--- a/src/main/java/com/mesalab/qgw/listener/KnowledgeListener.java
+++ /dev/null
@@ -1,185 +0,0 @@
-package com.mesalab.qgw.listener;
-
-import cn.hutool.core.io.IoUtil;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.alibaba.fastjson2.JSON;
-import com.alibaba.nacos.api.config.ConfigService;
-import com.alibaba.nacos.api.config.listener.AbstractListener;
-import com.alibaba.nacos.api.exception.NacosException;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.mesalab.common.nacos.NacosConst;
-import com.mesalab.common.utils.IPUtil;
-import com.mesalab.qgw.model.basic.HttpConfig;
-import com.mesalab.qgw.service.impl.HttpClientService;
-import com.mesalab.services.common.entity.KnowledgeConfig;
-import com.mesalab.services.common.entity.KnowledgeBase;
-import com.mesalab.services.common.entity.KnowledgeConstant;
-import com.mesalab.services.configuration.HosConfig;
-import com.geedgenetworks.utils.StringUtil;
-import org.apache.http.Header;
-import org.apache.http.message.BasicHeader;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.stereotype.Component;
-
-import javax.annotation.PostConstruct;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * @Author wxs
- * @Date 2022/7/7
- */
-@Component
-public class KnowledgeListener {
-
- private static final Log log = LogFactory.get();
-
-
- @Autowired
- private HttpConfig httpConfig;
- @Autowired
- KnowledgeConfig knowledgeConfig;
- @Autowired
- private HttpClientService httpClientService;
- @Autowired
- private HosConfig hosConfig;
-
- @Autowired
- private ConfigService pubConfigService;
-
- @Autowired
- private ConfigService systemConfigService;
-
- @Value("${nacos.config.group}")
- private String groupId;
-
- @Value("${nacos.config.data-id}")
- private String dataId;
- private static Map<String, String> updateMap = new HashMap<>();
-
- @PostConstruct
- private void initKnowledge() {
- try {
- loadIpLocation(pubConfigService.getConfig(NacosConst.KNOWLEDGE_BASE_DATA_ID, NacosConst.DEFAULT_GROUP, httpConfig.getServerRequestTimeOut()));
- } catch (RuntimeException | NacosException e) {
- log.error("loadIpLocation error: {}", e.getMessage());
- }
- }
-
- @PostConstruct
- private void registerKnowledgeBaseListener() {
- try {
- pubConfigService.addListener(NacosConst.KNOWLEDGE_BASE_DATA_ID, NacosConst.DEFAULT_GROUP, new AbstractListener() {
- @Override
- public void receiveConfigInfo(String content) {
- loadIpLocation(content);
- }
- });
- } catch (RuntimeException | NacosException e) {
- log.error("register knowledgeBase listener error: {}", e.getMessage());
- }
- }
-
- @PostConstruct
- private void registerSystemConfigListener() {
- try {
- systemConfigService.addListener(dataId, groupId, new AbstractListener() {
- @Override
- public void receiveConfigInfo(String versionInfo) {
- try {
- loadIpLocation(pubConfigService.getConfig(NacosConst.KNOWLEDGE_BASE_DATA_ID, NacosConst.DEFAULT_GROUP, httpConfig.getServerRequestTimeOut()));
- } catch (NacosException e) {
- log.error("loadIpLocation error: {}", e.getMessage());
- }
- }
- });
- } catch (RuntimeException | NacosException e) {
- log.error("register systemConfig listener error: {}", e.getMessage());
- }
- }
-
- private void loadIpLocation(String knowledgeMetadata) {
- if (isLoadIpLocation(knowledgeMetadata)) {
- IPUtil.ipLookupBuild();
- log.info("ip location build success");
- } else {
- log.info("ip location no change");
- }
- }
-
- private boolean isLoadIpLocation(String knowledgeMetadata) {
- Map<String, KnowledgeBase> map = buildIPKnowledgeInfo(getKnowledgeBases(knowledgeMetadata));
- boolean isUpdate = false;
- for (Map.Entry<String, KnowledgeBase> entry : map.entrySet()) {
- KnowledgeBase knowledgeBase = entry.getValue();
- if (StringUtil.equals(knowledgeBase.getSha256(), updateMap.get(knowledgeBase.getId()))) {
- continue;
- }
- log.info("{}.{} version: {}, knowledge updated.", knowledgeBase.getName(), knowledgeBase.getFormat(), knowledgeBase.getVersion());
- download(knowledgeBase);
- updateMap.put(knowledgeBase.getId(), knowledgeBase.getSha256());
- isUpdate = true;
- }
- return isUpdate;
- }
-
- private void download(KnowledgeBase knowledgeBase) {
- FileOutputStream outputStream = null;
- InputStream inputStream = null;
- try {
- Header header = new BasicHeader(KnowledgeConstant.TOKEN, hosConfig.getToken());
- inputStream = httpClientService.httpGetInputStream(knowledgeBase.getPath(), httpConfig.getServerResponseTimeOut(), header);
- if (inputStream == null) {
- log.error("download file error, skip this update, knowledge is: {}", knowledgeBase);
- return;
- }
- outputStream = new FileOutputStream(KnowledgeConstant.DAT.concat(File.separator).concat(knowledgeBase.getName()).concat(".").concat(knowledgeBase.getFormat()));
- IoUtil.copy(inputStream, outputStream);
- log.info("knowledge download name: {}, version: {}", knowledgeBase.getName(), knowledgeBase.getVersion());
- } catch (IOException e) {
- log.error("download file error, message: {}", e.getMessage());
- } finally {
- IoUtil.close(inputStream);
- IoUtil.close(outputStream);
- }
- }
-
- private List<KnowledgeBase> getKnowledgeBases(String knowledgeMetadata) {
- log.info("knowledge base meta is: {}", knowledgeMetadata);
- if (StringUtil.isEmpty(knowledgeMetadata)) {
- return Lists.newArrayList();
- }
- return JSON.parseArray(knowledgeMetadata, KnowledgeBase.class);
- }
-
- private Map<String, KnowledgeBase> buildIPKnowledgeInfo(List<KnowledgeBase> knowledgeBaseList) {
- Map<String, KnowledgeBase> map = Maps.newHashMap();
- String ipBuiltIn = knowledgeConfig.getIpBuiltIn();
- String ipUserDefined = knowledgeConfig.getIpUserDefined();
- String ipAsn = knowledgeConfig.getIpAsn();
- for (KnowledgeBase knowledgeBase : knowledgeBaseList) {
- String name = knowledgeBase.getName();
- String version = knowledgeBase.getVersion();
- String concat = name.concat(":").concat(version);
- if (StringUtil.equals(concat, ipBuiltIn)
- || (StringUtil.equals(name, ipBuiltIn) && StringUtil.equals(version, KnowledgeConstant.LATEST))) {
- map.put(KnowledgeConstant.IP_BUILTIN, knowledgeBase);
- } else if (StringUtil.equals(concat, ipUserDefined)
- || (StringUtil.equals(name, ipUserDefined) && StringUtil.equals(version, KnowledgeConstant.LATEST))) {
- map.put(KnowledgeConstant.IP_USER_DEFINED, knowledgeBase);
- } else if (StringUtil.equals(concat, ipAsn)
- || (StringUtil.equals(name, ipUserDefined) && StringUtil.equals(version, KnowledgeConstant.LATEST))) {
- map.put(KnowledgeConstant.ASN_BUILTIN, knowledgeBase);
- }
- }
- return map;
- }
-}
diff --git a/src/main/java/com/mesalab/qgw/listener/KnowledgeScheduler.java b/src/main/java/com/mesalab/qgw/listener/KnowledgeScheduler.java
new file mode 100644
index 00000000..38c3d8fe
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/listener/KnowledgeScheduler.java
@@ -0,0 +1,117 @@
+package com.mesalab.qgw.listener;
+
+import cn.hutool.core.io.IoUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.google.common.collect.Maps;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.utils.IPUtil;
+import com.mesalab.qgw.model.basic.HttpConfig;
+import com.mesalab.qgw.service.impl.HttpClientServiceV2;
+import com.mesalab.services.common.entity.KnowledgeConfig;
+import com.mesalab.services.common.entity.KnowledgeConstant;
+import com.mesalab.services.service.KBService;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.PostConstruct;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @Author wxs
+ * @Date 2022/7/7
+ */
+@Component
+public class KnowledgeScheduler {
+
+ private static final Log log = LogFactory.get();
+
+
+ @Autowired
+ private HttpConfig httpConfig;
+ @Autowired
+ KnowledgeConfig knowledgeConfig;
+ @Autowired
+ private HttpClientServiceV2 httpClientServiceV2;
+ @Autowired
+ private KBService kbService;
+ private static final Map<String, String> updateMap = new HashMap<>();
+
+ @PostConstruct
+ private void loadKnowledge() {
+ try {
+ BaseResult baseResult = kbService.getList(null, null);
+ if (!baseResult.isSuccess()) {
+ log.error("get KB list error: {}", baseResult.getMessage());
+ }
+ List<Map<String, Object>> knowledgeList = (List<Map<String, Object>>) baseResult.getData();
+ for (Map<String, Object> knowledge : knowledgeList) {
+ String kbId = String.valueOf(knowledge.get("kb_id"));
+ String version = String.valueOf(knowledge.get("version"));
+ String sha256 = String.valueOf(knowledge.get("sha256"));
+ String path = String.valueOf(knowledge.get("path"));
+ if (knowledgeConfig.getIpUserDefined().equals(kbId.concat("_").concat(version))
+ && !sha256.equals(updateMap.get(kbId))) {
+ boolean downloadSucceed = downloadSucceed(path, KnowledgeConstant.DAT + File.separator + KnowledgeConstant.IP_USER_DEFINED + KnowledgeConstant.MMDB);
+ if (downloadSucceed) {
+ rebuildIpLookup();
+ updateMap.put(kbId, sha256);
+ }
+ continue;
+ }
+ if (knowledgeConfig.getIpBuiltIn().equals(kbId.concat("_").concat(version))
+ && !sha256.equals(updateMap.get(kbId))) {
+ boolean downloadSucceed = downloadSucceed(path, KnowledgeConstant.DAT + File.separator + KnowledgeConstant.IP_BUILTIN + KnowledgeConstant.MMDB);
+ if (downloadSucceed) {
+ rebuildIpLookup();
+ updateMap.put(kbId, sha256);
+ }
+ continue;
+ }
+ if (knowledgeConfig.getIpAsn().equals(kbId.concat("_").concat(version))
+ && !sha256.equals(updateMap.get(kbId))) {
+ boolean downloadSucceed = downloadSucceed(path, KnowledgeConstant.DAT + File.separator + KnowledgeConstant.ASN_BUILTIN + KnowledgeConstant.MMDB);
+ if (downloadSucceed) {
+ rebuildIpLookup();
+ updateMap.put(kbId, sha256);
+ }
+ }
+ }
+ } catch (RuntimeException e) {
+ log.error("loadIpLocation error: {}", e.getMessage());
+ }
+ }
+
+ private static void rebuildIpLookup() {
+ IPUtil.ipLookupBuild();
+ }
+
+ private boolean downloadSucceed(String url, String filePath) {
+ FileOutputStream outputStream = null;
+ InputStream inputStream = null;
+ try {
+ inputStream = httpClientServiceV2.getInputStream(url, httpConfig.getServerResponseTimeOut(), Maps.newHashMap());
+ if (inputStream == null) {
+ log.error("download file error, skip this update, knowledge is: {}", url);
+ return false;
+ }
+ outputStream = new FileOutputStream(filePath);
+ IoUtil.copy(inputStream, outputStream);
+ log.info("knowledge download path is: {}", filePath);
+ return true;
+ } catch (IOException e) {
+ log.error("download file error, message: {}", e.getMessage());
+ return false;
+ } finally {
+ IoUtil.close(inputStream);
+ IoUtil.close(outputStream);
+ }
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/AuditServiceLog.java b/src/main/java/com/mesalab/qgw/model/basic/AuditServiceLog.java
index aa5748b6..0188743d 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/AuditServiceLog.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/AuditServiceLog.java
@@ -8,7 +8,7 @@ public class AuditServiceLog {
private String annotation;
- private String queryKey;
+ private String requestId;
private String cacheKey;
@@ -16,11 +16,11 @@ public class AuditServiceLog {
private String clientIp;
- private long exeTime;
+ private long elapsed;
- private String dbType;
+ private String dbEngine;
- private String param;
+ private String requestParam;
private String url;
diff --git a/src/main/java/com/mesalab/qgw/model/basic/ClickHouseHttpSource.java b/src/main/java/com/mesalab/qgw/model/basic/ClickHouseHttpSource.java
index d27f290e..7ef769f8 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/ClickHouseHttpSource.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/ClickHouseHttpSource.java
@@ -25,6 +25,11 @@ public class ClickHouseHttpSource implements Serializable {
@NacosValue(value = "${clickhouse.enableApproximateOptimizer}", autoRefreshed = true)
private boolean enableApproximateOptimizer;
/**
+ * 是否指定时间为UTC
+ */
+ @NacosValue(value = "${clickhouse.convertDefaultTimezoneIntoUTC}", autoRefreshed = true)
+ private boolean convertDefaultTimezoneIntoUTC;
+ /**
* realTime的用户名
*/
@NacosValue(value = "${clickhouse.realTimeAccount.username}")
diff --git a/src/main/java/com/mesalab/qgw/model/basic/ClickHouseHttpQuery.java b/src/main/java/com/mesalab/qgw/model/basic/ClickHouseQueryParam.java
index 55e84898..f6610cf7 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/ClickHouseHttpQuery.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/ClickHouseQueryParam.java
@@ -9,8 +9,9 @@ import lombok.Data;
* @Description : ClickHouseHttpQuery
*/
@Data
-public class ClickHouseHttpQuery {
- //查询参数
+@Deprecated
+public class ClickHouseQueryParam {
+ @Deprecated
private String queryParameter;
//响应超时时间
private int socketTimeOut;
diff --git a/src/main/java/com/mesalab/qgw/model/basic/CommonRequestParam.java b/src/main/java/com/mesalab/qgw/model/basic/CommonRequestParam.java
new file mode 100644
index 00000000..11d8055b
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/model/basic/CommonRequestParam.java
@@ -0,0 +1,57 @@
+package com.mesalab.qgw.model.basic;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.collect.EnumMultiset;
+import com.mesalab.common.enums.ExecutionMode;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.enums.OutputMode;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.constant.QGWMessageConst;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import lombok.Data;
+
+import java.io.Serializable;
+import java.util.Arrays;
+
+/**
+ * Common request parameters for all query types, including: SQL and DSL.
+ * @Date 2023/12/6 15:30
+ * @Author wWei
+ */
+@Data
+public class CommonRequestParam implements Serializable {
+ private String id;
+ @JsonProperty("output_mode")
+ private OutputMode outputMode = OutputMode.JSON;
+ @JsonProperty("execution_mode")
+ private ExecutionMode executionMode = ExecutionMode.NORMAL;
+ @JsonProperty("is_dry_run")
+ private boolean isDryRun = false;
+ @JsonProperty("is_saved_query")
+ private boolean isSavedQuery = false;
+ @JsonProperty("timeout")
+ private Integer timeout;
+
+ public void setOutputMode(String outputMode) {
+ OutputMode[] values = OutputMode.values();
+ for (OutputMode value : values) {
+ if(value.getValue().equalsIgnoreCase(outputMode)){
+ outputMode = value.name();
+ break;
+ }
+ }
+ this.outputMode = OutputMode.valueOf(outputMode.toUpperCase());
+ }
+
+ public void setExecutionMode(String executionMode) {
+ this.executionMode = ExecutionMode.valueOf(executionMode.toUpperCase());
+ }
+
+ public void setIsDryRun(Integer isDryRun) {
+ this.isDryRun = isDryRun != 0;
+ }
+
+ public void setIsSavedQuery(Integer isSavedQuery) {
+ this.isSavedQuery = isSavedQuery != 0;
+ }
+} \ No newline at end of file
diff --git a/src/main/java/com/mesalab/qgw/model/basic/DSLProfile.java b/src/main/java/com/mesalab/qgw/model/basic/DSLQueryContext.java
index bad149fc..ce3a7894 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/DSLProfile.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/DSLQueryContext.java
@@ -18,7 +18,7 @@ import java.util.Objects;
@Builder
@NoArgsConstructor
@AllArgsConstructor
-public class DSLProfile implements Serializable {
+public class DSLQueryContext implements Serializable {
private String name;
private String granularity;
@@ -38,6 +38,8 @@ public class DSLProfile implements Serializable {
sql = StrUtil.isNotEmpty(table) ? sql.replace("$table", table) : sql;
+ sql = !CollectionUtils.isEmpty(this.intervals) ? sql.replace("$interval_seconds", Long.toString(getIntervalSeconds(this.intervals))) : sql;
+
if (StrUtil.isEmpty(this.filter) && CollectionUtils.isEmpty(this.intervals)) {
sql = sql.replace("$intervals_and_filter", " 1= 1 ");
} else if (StrUtil.isNotEmpty(this.filter) && !CollectionUtils.isEmpty(this.intervals)) {
@@ -54,6 +56,16 @@ public class DSLProfile implements Serializable {
return sql;
}
+ private long getIntervalSeconds(List<String> intervals) {
+ if (CollectionUtils.isEmpty(intervals) || !intervals.get(0).contains("/")) {
+ throw new IllegalArgumentException();
+ }
+ String[] split = intervals.get(0).split("/");
+ long start = DateUtil.parse(split[0]).getTime() / 1000;
+ long end = DateUtil.parse(split[1]).getTime() / 1000;
+ return end - start;
+ }
+
private String parseIntervals(List<String> intervals, String timeField, String timeFieldType) {
if (CollectionUtils.isEmpty(intervals) || !intervals.get(0).contains("/")) {
return null;
@@ -64,7 +76,7 @@ public class DSLProfile implements Serializable {
StringBuffer whereOfTime = new StringBuffer();
if ("unix_timestamp".equalsIgnoreCase(timeFieldType)) {
return whereOfTime.append(timeField).append(">= ").append(DateUtil.parse(start).getTime() / 1000).append(" AND ").append(timeField).append("< ").append(DateUtil.parse(end).getTime() / 1000).toString();
- } else{
+ } else {
return whereOfTime.append(timeField).append(">= '").append(start).append("' AND ").append(timeField).append("< '").append(end).append("'").toString();
}
}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/DSLQueryRequestParam.java b/src/main/java/com/mesalab/qgw/model/basic/DSLQueryRequestParam.java
new file mode 100644
index 00000000..8dbcad35
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/model/basic/DSLQueryRequestParam.java
@@ -0,0 +1,47 @@
+package com.mesalab.qgw.model.basic;
+
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.google.common.collect.Maps;
+import lombok.AllArgsConstructor;
+import lombok.Builder;
+import lombok.Data;
+import lombok.NoArgsConstructor;
+
+import java.io.Serializable;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * TODO
+ *
+ * @Classname DslQueryRequest
+ * @Date 2023/12/6 15:32
+ * @Author wWei
+ */
+@Data
+@Builder
+@AllArgsConstructor
+@NoArgsConstructor
+public class DSLQueryRequestParam extends CommonRequestParam implements Serializable {
+ @JsonProperty("name")
+ private String name;
+ @JsonProperty("data_source")
+ private String dataSource;
+ @JsonProperty("granularity")
+ private String granularity;
+ @JsonProperty("filter")
+ private String filter;
+ @JsonProperty("order_by")
+ private String orderBy;
+ @JsonProperty("intervals")
+ private List<String> intervals;
+ @JsonProperty("limit")
+ private String limit;
+ private Map<String, Object> customRequestParam = Maps.newHashMap();
+
+ @JsonAnySetter
+ public void setCustomAttributes(String key, Object value) {
+ customRequestParam.put(key, value);
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/DruidIoHttpSource.java b/src/main/java/com/mesalab/qgw/model/basic/DruidIoHttpSource.java
index 7dfc18ac..2230d8c0 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/DruidIoHttpSource.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/DruidIoHttpSource.java
@@ -14,4 +14,5 @@ public class DruidIoHttpSource implements Serializable {
private String url;
private String dbname;
private String skipEmptyBuckets = "true";
+ private Integer maxSubqueryRows;
}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/EngineConfigSource.java b/src/main/java/com/mesalab/qgw/model/basic/EngineConfigSource.java
index 025d8ca4..134e36b2 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/EngineConfigSource.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/EngineConfigSource.java
@@ -1,7 +1,6 @@
package com.mesalab.qgw.model.basic;
-import com.alibaba.nacos.api.config.ConfigType;
-import com.alibaba.nacos.api.config.annotation.NacosConfigurationProperties;
+import com.alibaba.nacos.api.config.annotation.NacosValue;
import lombok.Data;
import org.springframework.stereotype.Component;
@@ -9,10 +8,18 @@ import java.io.Serializable;
@Data
@Component
-@NacosConfigurationProperties(prefix = "engine", dataId = "${nacos.config.data-id}", groupId = "${nacos.config.group}", type = ConfigType.YAML, autoRefreshed = true)
public class EngineConfigSource implements Serializable {
+ @NacosValue(value = "${engine.maxCacheNum}", autoRefreshed = true)
private int maxCacheNum;
+ @NacosValue(value = "${engine.defaultResultNum}", autoRefreshed = true)
private int defaultResultNum;
-
+ @NacosValue(value = "${engine.fieldDiscovery.topK}", autoRefreshed = true)
+ private int fieldDiscoveryTopK;
+ @NacosValue(value = "${engine.packetCombine.delaySeconds}", autoRefreshed = true)
+ private int packetCombineDelaySeconds;
+ @NacosValue(value = "${engine.trafficSpectrum.clientIPAppResultNum}", autoRefreshed = true)
+ private int trafficSpectrumClientIPAppResultNum;
+ @NacosValue(value = "${engine.highLatency.threshold}", autoRefreshed = true)
+ private int highLatencyThreshold;
}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/EntityConfigSource.java b/src/main/java/com/mesalab/qgw/model/basic/EntityConfigSource.java
deleted file mode 100644
index 1c5c2d7e..00000000
--- a/src/main/java/com/mesalab/qgw/model/basic/EntityConfigSource.java
+++ /dev/null
@@ -1,66 +0,0 @@
-package com.mesalab.qgw.model.basic;
-
-import com.alibaba.nacos.api.config.annotation.NacosValue;
-import lombok.Data;
-import org.springframework.stereotype.Component;
-
-/**
- * 实体推荐默认num
- *
- * @Classname EntityConfigSource
- * @Date 2021/11/29 10:02 下午
- * @Author wWei
- */
-@Data
-@Component
-public class EntityConfigSource {
-
- /**
- * 默认活跃客户端IP数量
- */
- @NacosValue(value = "${service.entity.activeClientIP}", autoRefreshed = true)
- private int activeClientIP;
-
- /**
- * 默认活跃服务端IP数量(TCPBySession)
- */
- @NacosValue(value = "${service.entity.topServerIP.TCPBySession}", autoRefreshed = true)
- private int topServerIPByTCPSession;
-
- /**
- * 默认活跃服务端IP数量(TCPByUniqClientIP)
- */
- @NacosValue(value = "${service.entity.topServerIP.TCPByUniqClientIP}", autoRefreshed = true)
- private int topServerIPByTCPUniqClientIP;
-
- /**
- * 默认活跃服务端IP数量(UDPBySession)
- */
- @NacosValue(value = "${service.entity.topServerIP.UDPBySession}", autoRefreshed = true)
- private int topServerIPByUDPBySession;
-
- /**
- * 默认活跃服务端IP数量(UDPByUniqClientIP)
- */
- @NacosValue(value = "${service.entity.topServerIP.UDPByUniqClientIP}", autoRefreshed = true)
- private int topServerIPByUDPUniqClientIP;
-
- /**
- * 默认访问量最高SNI数量
- */
- @NacosValue(value = "${service.entity.topSNI.defaultSize}", autoRefreshed = true)
- private int topSNIDefaultSize;
-
- /**
- * 待推荐SNI数据集
- */
- @NacosValue(value = "${service.entity.topSNI.dataset}", autoRefreshed = true)
- private int topSNIDataset;
-
- /**
- * 待推荐GTP-C
- */
- @NacosValue(value = "${service.entity.gtpc.defaultSize}", autoRefreshed = true)
- private int topGTPCDefaultSize;
-
-}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/HttpResponseResult.java b/src/main/java/com/mesalab/qgw/model/basic/HttpResponseResult.java
new file mode 100644
index 00000000..0f2ad442
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/model/basic/HttpResponseResult.java
@@ -0,0 +1,22 @@
+package com.mesalab.qgw.model.basic;
+
+import lombok.AllArgsConstructor;
+import lombok.Data;
+import lombok.EqualsAndHashCode;
+import lombok.NoArgsConstructor;
+
+import java.io.Serializable;
+import java.util.Map;
+
+@Data
+@NoArgsConstructor
+@AllArgsConstructor
+@EqualsAndHashCode
+public class HttpResponseResult implements Serializable {
+ private Integer statusCode;
+ private String errorMessage;
+ private Map<String, String> responseHeaders;
+ private String responseBody;
+
+
+}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/QueryCache.java b/src/main/java/com/mesalab/qgw/model/basic/QueryCache.java
new file mode 100644
index 00000000..aed03fd1
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/model/basic/QueryCache.java
@@ -0,0 +1,40 @@
+package com.mesalab.qgw.model.basic;
+
+import com.google.common.collect.Maps;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.services.configuration.JobConfig;
+import lombok.Data;
+
+import java.io.Serializable;
+import java.util.Map;
+
+/**
+ * TODO
+ *
+ * @Classname QueryJobInfo
+ * @Date 2023/12/6 18:20
+ * @Author wWei
+ */
+@Data
+public class QueryCache implements Serializable {
+ private String type;
+ private Long latestQueryTimeMs = System.currentTimeMillis();
+ private BaseResult<Object> baseResult;
+ public QueryCache(String jobId) {
+ this.baseResult = new BaseResult<>();
+ Map<String, Object> job = Maps.newLinkedHashMap();
+ job.put(JobConfig.JOB_ID, jobId);
+ job.put(JobConfig.IS_DONE, false);
+ job.put(JobConfig.DONE_PROGRESS, 0.0);
+ job.put(JobConfig.IS_CANCELED, false);
+ job.put(JobConfig.IS_FAILED, false);
+ job.put(JobConfig.REASON, null);
+ job.put(JobConfig.START_TIME, null);
+ job.put(JobConfig.END_TIME, null);
+ Map<String, Object> links = Maps.newLinkedHashMap();
+ links.put(JobConfig.LINKS_STATUS, "/v1/query/job/" + jobId);
+ links.put(JobConfig.LINKS_RESULT, "/v1/query/job/" + jobId + "/result");
+ job.put(JobConfig.LINKS, links);
+ this.baseResult.setJob(job);
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/QueryProfile.java b/src/main/java/com/mesalab/qgw/model/basic/QueryProfile.java
deleted file mode 100644
index 0c256fbd..00000000
--- a/src/main/java/com/mesalab/qgw/model/basic/QueryProfile.java
+++ /dev/null
@@ -1,42 +0,0 @@
-package com.mesalab.qgw.model.basic;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.geedgenetworks.utils.StringUtil;
-import com.mesalab.common.enums.DBTypeEnum;
-import lombok.*;
-import java.io.Serializable;
-@Data
-@Builder
-@NoArgsConstructor
-@AllArgsConstructor
-public class QueryProfile implements Serializable {
-
- private String query;
- private String option;
- private Boolean sampled;
- private String format;
- private String queryId;
- private String resultId;
- /**
- * @see DBTypeEnum
- */
- private String dbType;
- /**
- * 方言数据库类型,用于SQL格式解析校验
- */
- private String dialectDBType;
- private SQLQuerySource engineQuerySource;
- private SQLQuerySource dbQuerySource;
-
- public String getResultId() {
- return resultId;
- }
-
- public boolean queryParamIsNull() {
- return StringUtil.isBlank(query);
- }
-
- public void setResult_id(String resultId) {
- this.resultId = resultId;
- }
-}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/SQLQueryContext.java b/src/main/java/com/mesalab/qgw/model/basic/SQLQueryContext.java
new file mode 100644
index 00000000..634ff1a6
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/model/basic/SQLQueryContext.java
@@ -0,0 +1,35 @@
+package com.mesalab.qgw.model.basic;
+
+import com.geedgenetworks.utils.StringUtil;
+import lombok.*;
+
+import java.io.Serializable;
+@Data
+@Builder
+@NoArgsConstructor
+@AllArgsConstructor
+public class SQLQueryContext implements Serializable {
+
+ private String originalSQL;
+ private String option;
+ private Boolean sampled;
+ private String format;
+ private String queryId;
+ private String resultId;
+ private Integer timeout;
+ private String dbEngine;
+ private String sqlDialect; // SQL Dialect,such as: Mysql,Hive, etc. It used syntax and semantic rules to transform SQL query.
+ private SelectStatement federationSelectStatement;
+ private SelectStatement dbSelectStatement;
+ public String getResultId() {
+ return resultId;
+ }
+
+ public boolean originalSQLIsNull() {
+ return StringUtil.isBlank(originalSQL);
+ }
+
+ public void setResultId(String resultId) {
+ this.resultId = resultId;
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/SQLQuerySource.java b/src/main/java/com/mesalab/qgw/model/basic/SQLQuerySource.java
deleted file mode 100644
index d125b47a..00000000
--- a/src/main/java/com/mesalab/qgw/model/basic/SQLQuerySource.java
+++ /dev/null
@@ -1,82 +0,0 @@
-package com.mesalab.qgw.model.basic;
-
-import com.google.common.base.Joiner;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-import com.mesalab.common.utils.sqlparser.SQLHelper;
-import com.mesalab.qgw.model.basic.udf.UDF;
-import com.geedgenetworks.utils.StringUtil;
-import lombok.Data;
-import net.sf.jsqlparser.expression.Expression;
-import net.sf.jsqlparser.statement.select.*;
-
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * SQL Parser properties
- */
-@Data
-public class SQLQuerySource {
-
- private String execSQL;
- private String sqlBody;
- private List<String> tableNames = Lists.newArrayList();
- private String partitionKey;
- private List<Join> joins;
- private FromItem fromItem;
- private List<SelectItem> selectItems;
- private Map<String, String> aliasFields = Maps.newHashMap();
- private Expression whereExpression;
- private GroupByElement groupByElement;
- private List<OrderByElement> orderByElements;
- private Map<String, String> groupDimension = Maps.newHashMap();
- private String limit;
- private boolean isEnableLimit;
-
- private SubSelect subSelect;
- private List<SQLQuerySource> subSqlQuerySources = Lists.newArrayList();
- private Set<UDF> udfSet = Sets.newHashSet();
-
- public boolean isEnableLimit() {
- return !SQLHelper.INVALID_LIMIT_DESC.equalsIgnoreCase(limit);
- }
-
- public String getSelect() {
- return Joiner.on(",").join(this.selectItems);
- }
-
- public String getOrderBy() {
- return Joiner.on(",").join(this.orderByElements);
- }
- public Map<String, String> getGroupDimension() {
- if (groupByElement == null) {
- return null;
- }
- groupBy:
- for (Expression groupByExpression : groupByElement.getGroupByExpressions()) {
-
- String groupBy = groupByExpression.toString();
- if ((groupBy.startsWith("\"") && groupBy.endsWith("\""))
- || ((groupBy.startsWith("`") && groupBy.endsWith("`")))) {
- groupBy = groupBy.substring(1, groupBy.length() - 1);
- }
- if (StringUtil.isNotBlank(aliasFields.get(groupBy))) {
- groupDimension.put(groupBy, aliasFields.get(groupBy));
- } else {
- //非别名,确定实际的别名
- for (String key : aliasFields.keySet()) {
- if (aliasFields.get(key).equalsIgnoreCase(groupBy)) {
- groupDimension.put(key, groupBy);
- continue groupBy;
- }
- }
- groupDimension.put(groupBy, groupBy);
- }
-
- }
- return groupDimension;
- }
-}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/SelectStatement.java b/src/main/java/com/mesalab/qgw/model/basic/SelectStatement.java
new file mode 100644
index 00000000..c3d1c82f
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/model/basic/SelectStatement.java
@@ -0,0 +1,103 @@
+package com.mesalab.qgw.model.basic;
+
+import com.google.common.base.Joiner;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import com.mesalab.common.utils.sqlparser.SQLFunctionUtil;
+import com.mesalab.common.utils.sqlparser.SQLHelper;
+import com.mesalab.qgw.model.basic.udf.UDF;
+import com.geedgenetworks.utils.StringUtil;
+import lombok.Data;
+import net.sf.jsqlparser.expression.Expression;
+import net.sf.jsqlparser.expression.Function;
+import net.sf.jsqlparser.expression.operators.relational.ExpressionList;
+import net.sf.jsqlparser.statement.select.*;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * A parsed SQL statement
+ */
+@Data
+public class SelectStatement {
+ private String execSQL;
+ private String sqlBody;
+ private List<String> tableNames = Lists.newArrayList();
+ private String partitionKey;
+ private List<Join> joins;
+ private FromItem fromItem;
+ private List<SelectItem> selectItems;
+ private Map<String, String> aliasFields = Maps.newHashMap();
+ private Expression whereExpression;
+ private GroupByElement groupByElement;
+ private List<OrderByElement> orderByElements;
+ private Map<String, String> groupDimension = Maps.newHashMap();
+ private String limit;
+ private boolean isEnableLimit;
+
+ private SubSelect subSelect;
+ private List<SelectStatement> subSqlQuerySources = Lists.newArrayList();
+ private Set<UDF> udfSet = Sets.newHashSet();
+
+ public boolean isEnableLimit() {
+ return !SQLHelper.INVALID_LIMIT_DESC.equalsIgnoreCase(limit);
+ }
+
+ public String getSelect() {
+ return Joiner.on(",").join(this.selectItems);
+ }
+
+ public String getOrderBy() {
+ return Joiner.on(",").join(this.orderByElements);
+ }
+
+ public Map<String, String> getGroupDimension() {
+ if (groupByElement == null) {
+ return Maps.newHashMap();
+ }
+ ExpressionList groupByExpressionList = groupByElement.getGroupByExpressionList();
+
+ groupBy:
+ for (Expression expression : groupByExpressionList.getExpressions()) {
+ String groupBy = expression.toString();
+ groupBy = SQLHelper.removeQuotesAndBackticks(groupBy);
+ if (expression instanceof Function && SQLFunctionUtil.ROLLUP.equalsIgnoreCase(((Function) expression).getName())) {
+ List<Expression> funParamList = ((Function) expression).getParameters().getExpressions();
+ paramGroupBy:
+ for (Expression param : funParamList) {
+ String paramGroupBy = param.toString();
+ paramGroupBy = SQLHelper.removeQuotesAndBackticks(paramGroupBy);
+ if (StringUtil.isNotBlank(aliasFields.get(paramGroupBy))) {
+ groupDimension.put(paramGroupBy, aliasFields.get(paramGroupBy));
+ } else {
+ //非别名,确定实际的别名
+ for (String key : aliasFields.keySet()) {
+ if (aliasFields.get(key).equalsIgnoreCase(paramGroupBy)) {
+ groupDimension.put(key, paramGroupBy);
+ continue paramGroupBy;
+ }
+ }
+ groupDimension.put(paramGroupBy, paramGroupBy);
+ }
+ }
+ } else {
+ if (StringUtil.isNotBlank(aliasFields.get(groupBy))) {
+ groupDimension.put(groupBy, aliasFields.get(groupBy));
+ } else {
+ //非别名,确定实际的别名
+ for (String key : aliasFields.keySet()) {
+ if (aliasFields.get(key).equalsIgnoreCase(groupBy)) {
+ groupDimension.put(key, groupBy);
+ continue groupBy;
+ }
+ }
+ groupDimension.put(groupBy, groupBy);
+ }
+ }
+ }
+ return groupDimension;
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/SqlQueryRequestParam.java b/src/main/java/com/mesalab/qgw/model/basic/SqlQueryRequestParam.java
new file mode 100644
index 00000000..a58063be
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/model/basic/SqlQueryRequestParam.java
@@ -0,0 +1,31 @@
+package com.mesalab.qgw.model.basic;
+
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.google.common.collect.Maps;
+import lombok.AllArgsConstructor;
+import lombok.Builder;
+import lombok.Data;
+import lombok.NoArgsConstructor;
+
+import java.io.Serializable;
+import java.util.Map;
+
+/**
+ *
+ * HTTP Rest API request parameters for SQL query.
+ * @Date 2023/12/6 15:32
+ * @Author wWei
+ */
+@Data
+@Builder
+@AllArgsConstructor
+@NoArgsConstructor
+public class SqlQueryRequestParam extends CommonRequestParam implements Serializable {
+ private String statement;
+ private Map<String, Object> customRequestParam = Maps.newHashMap();
+
+ @JsonAnySetter
+ public void setCustomAttributes(String key, Object value) {
+ customRequestParam.put(key, value);
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN.java b/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN.java
index dc2c05e9..063d6a00 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN.java
@@ -3,7 +3,7 @@ package com.mesalab.qgw.model.basic.udf;
import com.google.common.collect.Lists;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.utils.IPUtil;
-import com.mesalab.qgw.model.basic.SQLQuerySource;
+import com.mesalab.qgw.model.basic.SelectStatement;
import com.geedgenetworks.utils.StringUtil;
import lombok.AllArgsConstructor;
import lombok.Data;
@@ -28,15 +28,15 @@ public class IP_TO_ASN implements UDF {
}
@Override
- public Object execute(SQLQuerySource sqlQuerySource, BaseResult baseResult) {
+ public Object execute(SelectStatement selectStatement, boolean isDryRun, BaseResult baseResult) {
List<Object> results = (List<Object>)baseResult.getData();
List<Map<String, Object>> targetResult = Lists.newArrayList();
String addressLabel = StringUtil.EMPTY;
- for (String alias : sqlQuerySource.getAliasFields().keySet()) {
- if (sqlQuerySource.getAliasFields().get(alias).contains(address) &&
- sqlQuerySource.getAliasFields().get(alias).contains(name) ) {
+ for (String alias : selectStatement.getAliasFields().keySet()) {
+ if (selectStatement.getAliasFields().get(alias).contains(address) &&
+ selectStatement.getAliasFields().get(alias).contains(name) ) {
addressLabel = alias;
break;
}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN_DETAIL.java b/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN_DETAIL.java
index 7cfd4f48..f98fa450 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN_DETAIL.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN_DETAIL.java
@@ -3,7 +3,7 @@ package com.mesalab.qgw.model.basic.udf;
import com.google.common.collect.Lists;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.utils.IPUtil;
-import com.mesalab.qgw.model.basic.SQLQuerySource;
+import com.mesalab.qgw.model.basic.SelectStatement;
import com.geedgenetworks.utils.StringUtil;
import lombok.AllArgsConstructor;
import lombok.Data;
@@ -28,15 +28,15 @@ public class IP_TO_ASN_DETAIL implements UDF {
}
@Override
- public Object execute(SQLQuerySource sqlQuerySource, BaseResult baseResult) {
+ public Object execute(SelectStatement selectStatement, boolean isDryRun, BaseResult baseResult) {
List<Object> results = (List<Object>)baseResult.getData();
List<Map<String, Object>> targetResult = Lists.newArrayList();
String addressLabel = StringUtil.EMPTY;
- for (String alias : sqlQuerySource.getAliasFields().keySet()) {
- if (sqlQuerySource.getAliasFields().get(alias).contains(address) &&
- sqlQuerySource.getAliasFields().get(alias).contains(name) ) {
+ for (String alias : selectStatement.getAliasFields().keySet()) {
+ if (selectStatement.getAliasFields().get(alias).contains(address) &&
+ selectStatement.getAliasFields().get(alias).contains(name) ) {
addressLabel = alias;
break;
}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN_ORG.java b/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN_ORG.java
index 442bd417..9d55afbc 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN_ORG.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ASN_ORG.java
@@ -3,7 +3,7 @@ package com.mesalab.qgw.model.basic.udf;
import com.google.common.collect.Lists;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.utils.IPUtil;
-import com.mesalab.qgw.model.basic.SQLQuerySource;
+import com.mesalab.qgw.model.basic.SelectStatement;
import com.geedgenetworks.utils.StringUtil;
import lombok.AllArgsConstructor;
import lombok.Data;
@@ -28,15 +28,15 @@ public class IP_TO_ASN_ORG implements UDF {
}
@Override
- public Object execute(SQLQuerySource sqlQuerySource, BaseResult baseResult) {
+ public Object execute(SelectStatement selectStatement, boolean isDryRun, BaseResult baseResult) {
List<Object> results = (List<Object>)baseResult.getData();
List<Map<String, Object>> targetResult = Lists.newArrayList();
String addressLabel = StringUtil.EMPTY;
- for (String alias : sqlQuerySource.getAliasFields().keySet()) {
- if (sqlQuerySource.getAliasFields().get(alias).contains(address) &&
- sqlQuerySource.getAliasFields().get(alias).contains(name) ) {
+ for (String alias : selectStatement.getAliasFields().keySet()) {
+ if (selectStatement.getAliasFields().get(alias).contains(address) &&
+ selectStatement.getAliasFields().get(alias).contains(name) ) {
addressLabel = alias;
break;
}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_CITY.java b/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_CITY.java
index f0678427..8bbc0e0a 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_CITY.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_CITY.java
@@ -3,7 +3,7 @@ package com.mesalab.qgw.model.basic.udf;
import com.google.common.collect.Lists;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.utils.IPUtil;
-import com.mesalab.qgw.model.basic.SQLQuerySource;
+import com.mesalab.qgw.model.basic.SelectStatement;
import com.geedgenetworks.utils.StringUtil;
import lombok.AllArgsConstructor;
import lombok.Data;
@@ -28,14 +28,14 @@ public class IP_TO_CITY implements UDF {
}
@Override
- public Object execute(SQLQuerySource sqlQuerySource, BaseResult baseResult) {
+ public Object execute(SelectStatement selectStatement, boolean isDryRun, BaseResult baseResult) {
List<Object> results = (List<Object>)baseResult.getData();
List<Map<String, Object>> targetResult = Lists.newArrayList();
String addressLabel = StringUtil.EMPTY;
- for (String alias : sqlQuerySource.getAliasFields().keySet()) {
- if (sqlQuerySource.getAliasFields().get(alias).contains(address) &&
- sqlQuerySource.getAliasFields().get(alias).contains(name) ) {
+ for (String alias : selectStatement.getAliasFields().keySet()) {
+ if (selectStatement.getAliasFields().get(alias).contains(address) &&
+ selectStatement.getAliasFields().get(alias).contains(name) ) {
addressLabel = alias;
break;
}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_COUNTRY.java b/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_COUNTRY.java
index 042ac858..7b919a97 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_COUNTRY.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_COUNTRY.java
@@ -3,7 +3,7 @@ package com.mesalab.qgw.model.basic.udf;
import com.google.common.collect.Lists;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.utils.IPUtil;
-import com.mesalab.qgw.model.basic.SQLQuerySource;
+import com.mesalab.qgw.model.basic.SelectStatement;
import com.geedgenetworks.utils.StringUtil;
import lombok.AllArgsConstructor;
import lombok.Data;
@@ -28,13 +28,13 @@ public class IP_TO_COUNTRY implements UDF {
}
@Override
- public Object execute(SQLQuerySource sqlQuerySource, BaseResult baseResult) {
+ public Object execute(SelectStatement selectStatement, boolean isDryRun, BaseResult baseResult) {
List<Object> results = (List<Object>)baseResult.getData();
List<Map<String, Object>> targetResult = Lists.newArrayList();
String addressLabel = StringUtil.EMPTY;
- for (String alias : sqlQuerySource.getAliasFields().keySet()) {
- if (sqlQuerySource.getAliasFields().get(alias).contains(address) &&
- sqlQuerySource.getAliasFields().get(alias).contains(name) ) {
+ for (String alias : selectStatement.getAliasFields().keySet()) {
+ if (selectStatement.getAliasFields().get(alias).contains(address) &&
+ selectStatement.getAliasFields().get(alias).contains(name) ) {
addressLabel = alias;
break;
}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_GEO.java b/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_GEO.java
index caabeac2..7f5107fb 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_GEO.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_GEO.java
@@ -3,7 +3,7 @@ package com.mesalab.qgw.model.basic.udf;
import com.google.common.collect.Lists;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.utils.IPUtil;
-import com.mesalab.qgw.model.basic.SQLQuerySource;
+import com.mesalab.qgw.model.basic.SelectStatement;
import com.geedgenetworks.utils.StringUtil;
import lombok.AllArgsConstructor;
import lombok.Data;
@@ -27,15 +27,15 @@ public class IP_TO_GEO implements UDF {
}
@Override
- public Object execute(SQLQuerySource sqlQuerySource, BaseResult baseResult) {
+ public Object execute(SelectStatement selectStatement, boolean isDryRun, BaseResult baseResult) {
List<Object> results = (List<Object>)baseResult.getData();
List<Map<String, Object>> targetResult = Lists.newArrayList();
String addressLabel = StringUtil.EMPTY;
- for (String alias : sqlQuerySource.getAliasFields().keySet()) {
- if (sqlQuerySource.getAliasFields().get(alias).contains(address) &&
- sqlQuerySource.getAliasFields().get(alias).contains(name) ) {
+ for (String alias : selectStatement.getAliasFields().keySet()) {
+ if (selectStatement.getAliasFields().get(alias).contains(address) &&
+ selectStatement.getAliasFields().get(alias).contains(name) ) {
addressLabel = alias;
break;
}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ISP.java b/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ISP.java
index 59c28e61..e73d3f98 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ISP.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/udf/IP_TO_ISP.java
@@ -3,7 +3,7 @@ package com.mesalab.qgw.model.basic.udf;
import com.google.common.collect.Lists;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.utils.IPUtil;
-import com.mesalab.qgw.model.basic.SQLQuerySource;
+import com.mesalab.qgw.model.basic.SelectStatement;
import com.geedgenetworks.utils.StringUtil;
import lombok.AllArgsConstructor;
import lombok.Data;
@@ -28,15 +28,15 @@ public class IP_TO_ISP implements UDF {
}
@Override
- public Object execute(SQLQuerySource sqlQuerySource, BaseResult baseResult) {
+ public Object execute(SelectStatement selectStatement, boolean isDryRun, BaseResult baseResult) {
List<Object> results = (List<Object>)baseResult.getData();
List<Map<String, Object>> targetResult = Lists.newArrayList();
String addressLabel = StringUtil.EMPTY;
- for (String alias : sqlQuerySource.getAliasFields().keySet()) {
- if (sqlQuerySource.getAliasFields().get(alias).contains(address) &&
- sqlQuerySource.getAliasFields().get(alias).contains(name) ) {
+ for (String alias : selectStatement.getAliasFields().keySet()) {
+ if (selectStatement.getAliasFields().get(alias).contains(address) &&
+ selectStatement.getAliasFields().get(alias).contains(name) ) {
addressLabel = alias;
break;
}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/udf/ROLLUP.java b/src/main/java/com/mesalab/qgw/model/basic/udf/ROLLUP.java
new file mode 100644
index 00000000..23ec8e84
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/model/basic/udf/ROLLUP.java
@@ -0,0 +1,114 @@
+package com.mesalab.qgw.model.basic.udf;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.entity.DataTypeMapping;
+import com.mesalab.common.utils.sqlparser.SQLHelper;
+import com.mesalab.qgw.constant.MetaConst;
+import com.mesalab.qgw.model.basic.SelectStatement;
+import lombok.AllArgsConstructor;
+import lombok.Data;
+import lombok.NoArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+
+import java.util.*;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+@Slf4j
+@Data
+@AllArgsConstructor
+@NoArgsConstructor
+public class ROLLUP implements UDF {
+ private String name;
+ private String regularDimension;
+ private String drillDownDimension;
+
+
+ public ROLLUP(UDFElements udfElements) {
+ this.name = udfElements.getName();
+ this.regularDimension = SQLHelper.removeQuotesAndBackticks(udfElements.getParams().get(0).toString());
+ this.drillDownDimension = SQLHelper.removeQuotesAndBackticks(udfElements.getParams().get(1).toString());
+ }
+
+ @Override
+ public Object execute(SelectStatement selectStatement, boolean isDryRun, BaseResult baseResult) {
+ List<Map<String, String>> meta = (List<Map<String, String>>) baseResult.getMeta();
+ for (Map<String, String> stringStringMap : meta) {
+ if (stringStringMap.get(MetaConst.META_CATEGORY).equals(MetaConst.META_CATEGORY_DIMENSION)) {
+ stringStringMap.put(MetaConst.META_TYPE, DataTypeMapping.STRING);
+ }
+ }
+ List<Map<String, Object>> data = (List<Map<String, Object>>) baseResult.getData();
+
+ // TODO: 样例数据时,需要补充subtotal rows
+ if (isDryRun) {
+ List<Map<String, Object>> newData = new ArrayList<>();
+ newData.add(Maps.newLinkedHashMap());
+ for (Map<String, Object> element : data) {
+ Map<String, Object> newElement = new LinkedHashMap<>(element);
+ newElement.forEach((k, v) -> {
+ if (drillDownDimension.equals(k) || drillDownDimension.equals(selectStatement.getAliasFields().get(k))) {
+ newElement.put(k, null);
+ }
+ });
+ newData.add(newElement);
+ newData.add(element);
+ }
+ data = newData;
+ }
+ // TODO: 遍历data 将rollup数据集结果标记subtotal、并且删除total总计行
+ Iterator<Map<String, Object>> iterator = data.iterator();
+ HashSet<Object> drillDownDimensionSets = Sets.newHashSet();
+ AtomicBoolean hasRemoveRegularDimension = new AtomicBoolean(false);
+
+ while (iterator.hasNext()) {
+ Map<String, Object> next = iterator.next();
+ if (!hasRemoveRegularDimension.get()) {
+ // TODO 删除total rows
+ if (meta.stream().filter(o -> MetaConst.META_CATEGORY_DIMENSION.equalsIgnoreCase(o.get(MetaConst.META_CATEGORY))).allMatch(o -> getRollupRowsDefaultValues().contains(next.get(o.get(MetaConst.META_NAME))))) {
+ iterator.remove();
+ hasRemoveRegularDimension.set(true);
+ continue;
+ }
+ }
+ next.forEach((k, v) -> {
+ if (drillDownDimension.equals(k) || drillDownDimension.equals(selectStatement.getAliasFields().get(k))) {
+ Object regularDimensionValue = next.get(regularDimension);
+ if (!next.containsKey(regularDimension)) {
+ Set<String> aliasSet = selectStatement.getAliasFields().keySet();
+ for (String alias : aliasSet) {
+ if (selectStatement.getAliasFields().get(alias).equals(regularDimension)) {
+ regularDimensionValue = next.get(alias);
+ break;
+ }
+ }
+ }
+ if (getRollupRowsDefaultValues().contains(next.get(k)) && (!drillDownDimensionSets.contains(regularDimensionValue))) {
+ next.put(k, "Subtotal");
+ drillDownDimensionSets.add(regularDimensionValue);
+ }
+ }
+ if (meta.stream().anyMatch(o -> o.get(MetaConst.META_NAME).equals(k)
+ && MetaConst.META_CATEGORY_DIMENSION.equals(o.get(MetaConst.META_CATEGORY)))
+ && next.get(k) != null
+ && !(next.get(k) instanceof List)) {
+ next.put(k, String.valueOf(next.get(k)));
+ }
+ });
+ }
+ return data;
+ }
+
+ private static List<Object> getRollupRowsDefaultValues() {
+ List<Object> rollupValues = new ArrayList<>();
+ rollupValues.add("");
+ rollupValues.add(0);
+ rollupValues.add(null);
+ rollupValues.add(Lists.newArrayList());
+ rollupValues.add("1970-01-01 00:00:00");
+ rollupValues.add("1970-01-01 00:00:00.000");
+ return rollupValues;
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/model/basic/udf/TIME_FLOOR_WITH_FILL.java b/src/main/java/com/mesalab/qgw/model/basic/udf/TIME_FLOOR_WITH_FILL.java
index 43eb0015..3a144420 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/udf/TIME_FLOOR_WITH_FILL.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/udf/TIME_FLOOR_WITH_FILL.java
@@ -15,8 +15,8 @@ import com.mesalab.common.utils.sqlparser.SQLFunctionUtil;
import com.mesalab.common.utils.SpringContextUtil;
import com.mesalab.common.utils.sqlparser.SQLHelper;
import com.mesalab.qgw.model.basic.EngineConfigSource;
-import com.mesalab.qgw.model.basic.SQLQuerySource;
-import com.mesalab.qgw.service.MetadataService;
+import com.mesalab.qgw.model.basic.SelectStatement;
+import com.mesalab.qgw.service.DatabaseService;
import com.geedgenetworks.utils.DateUtils;
import com.geedgenetworks.utils.StringUtil;
import lombok.AllArgsConstructor;
@@ -39,7 +39,7 @@ import java.util.*;
@NoArgsConstructor
public class TIME_FLOOR_WITH_FILL implements UDF{
private static final Log log = LogFactory.get();
- private MetadataService metadataService = (MetadataService) SpringContextUtil.getBean("metadataService");
+ private DatabaseService databaseService = (DatabaseService) SpringContextUtil.getBean("databaseService");
private EngineConfigSource engineConfigSource = (EngineConfigSource) SpringContextUtil.getBean("engineConfigSource");
private String name;
private String timestamp;
@@ -90,13 +90,16 @@ public class TIME_FLOOR_WITH_FILL implements UDF{
@Override
- public Object execute(SQLQuerySource sqlQuerySource, BaseResult baseResult) {
+ public Object execute(SelectStatement selectStatement, boolean isDryRun, BaseResult baseResult) {
List<Object> results = (List<Object>)baseResult.getData();
+ if(isDryRun) {
+ return results;
+ }
List<Map<String, Object>> targetResult = Lists.newArrayList();
List<String> groupLabels = Lists.newArrayList();
String timeGranLabel = null;
try {
- Map<String, String> groupDimension = sqlQuerySource.getGroupDimension();
+ Map<String, String> groupDimension = selectStatement.getGroupDimension();
if (results.isEmpty() || StringUtil.isBlank(fill) || StringUtil.isEmpty(groupDimension)) {
return results;
}
@@ -125,7 +128,7 @@ public class TIME_FLOOR_WITH_FILL implements UDF{
return results;
}
- WhereTimeRange whereTimeRange = getWhereTimeRange(sqlQuerySource);
+ WhereTimeRange whereTimeRange = getWhereTimeRange(selectStatement);
Date benchmarkDate = getBenchmarkDate(resultsFirst, timeGranLabel);
targetResult = fillResultBasedOnTimeSeries(results, groupLabels, timeGranLabel);
@@ -168,7 +171,7 @@ public class TIME_FLOOR_WITH_FILL implements UDF{
groupLabels, timeGranLabel);
if (StringUtil.isNotEmpty(dateRangeList)) {
for (String key : targetTimeSeriesMap.keySet()) {
- targetTimeSeriesMap.put(key, ConvertUtil.completeTimeseries((TreeMap<String, Map<String, Object>>) targetTimeSeriesMap.get(key), dateRangeList, fill.toString()));
+ targetTimeSeriesMap.put(key, ConvertUtil.completeTimeseries((TreeMap<String, Map<String, Object>>) targetTimeSeriesMap.get(key), dateRangeList, fill));
}
}
targetResult = ConvertUtil.convertTimeSeriesToList(targetTimeSeriesMap, groupLabels, timeGranLabel);
@@ -176,13 +179,13 @@ public class TIME_FLOOR_WITH_FILL implements UDF{
log.error("Parser function : TIME_FLOOR_WITH_FILL error :{} ", e.toString());
return results;
} finally {
- if (StringUtil.isNotBlank(sqlQuerySource.getLimit())
- && !sqlQuerySource.getLimit().equalsIgnoreCase(SQLHelper.INVALID_LIMIT_DESC)
- && targetResult.size() > Integer.parseInt(sqlQuerySource.getLimit())) {
- targetResult = targetResult.subList(0, Integer.parseInt(sqlQuerySource.getLimit()));
+ if (StringUtil.isNotBlank(selectStatement.getLimit())
+ && !selectStatement.getLimit().equalsIgnoreCase(SQLHelper.INVALID_LIMIT_DESC)
+ && targetResult.size() > Integer.parseInt(selectStatement.getLimit())) {
+ targetResult = targetResult.subList(0, Integer.parseInt(selectStatement.getLimit()));
}
}
- sortByTime(sqlQuerySource, targetResult, timeGranLabel);
+ sortByTime(selectStatement, targetResult, timeGranLabel);
List<String> objects = Lists.newArrayList();
objects.add(timeGranLabel);
objects.addAll(groupLabels);
@@ -231,7 +234,7 @@ public class TIME_FLOOR_WITH_FILL implements UDF{
});
}
- private void sortByTime(SQLQuerySource sqlQuerySource, List<Map<String, Object>> targetResult, String timeGranLabel) {
+ private void sortByTime(SelectStatement sqlQuerySource, List<Map<String, Object>> targetResult, String timeGranLabel) {
if(StringUtil.isEmpty(sqlQuerySource.getOrderByElements())){
return;
}
@@ -302,8 +305,8 @@ public class TIME_FLOOR_WITH_FILL implements UDF{
* @param sqlQuerySource
* @return
*/
- private WhereTimeRange getWhereTimeRange(SQLQuerySource sqlQuerySource) {
- String dbType = metadataService.getDBTypeByTableName(sqlQuerySource.getTableNames().get(0));
+ private WhereTimeRange getWhereTimeRange(SelectStatement sqlQuerySource) {
+ String dbType = databaseService.getDBEngineByTableName(sqlQuerySource.getTableNames().get(0));
WhereTimeRange whereTimeRange = new WhereTimeRange();
List<FillParam> outToInnerList = getFillParams(sqlQuerySource);
for (FillParam fillParam : outToInnerList) {
@@ -315,9 +318,9 @@ public class TIME_FLOOR_WITH_FILL implements UDF{
return whereTimeRange;
}
- private List<FillParam> getFillParams(SQLQuerySource sqlQuerySource) {
+ private List<FillParam> getFillParams(SelectStatement sqlQuerySource) {
List<FillParam> result = Lists.newArrayList();
- SQLQuerySource currentSource = sqlQuerySource;
+ SelectStatement currentSource = sqlQuerySource;
boolean isOriginalDataSource = false;
while (!isOriginalDataSource) {
FillParam currentParam = new FillParam();
diff --git a/src/main/java/com/mesalab/qgw/model/basic/udf/UDF.java b/src/main/java/com/mesalab/qgw/model/basic/udf/UDF.java
index 6af3dd10..052f8a70 100644
--- a/src/main/java/com/mesalab/qgw/model/basic/udf/UDF.java
+++ b/src/main/java/com/mesalab/qgw/model/basic/udf/UDF.java
@@ -1,9 +1,9 @@
package com.mesalab.qgw.model.basic.udf;
import com.mesalab.common.entity.BaseResult;
-import com.mesalab.qgw.model.basic.SQLQuerySource;
+import com.mesalab.qgw.model.basic.SelectStatement;
public interface UDF {
- Object execute(SQLQuerySource sqlQuerySource, BaseResult baseResult);
+ Object execute(SelectStatement selectStatement, boolean isDryRun, BaseResult baseResult);
}
diff --git a/src/main/java/com/mesalab/network/model/protocol/ProtocolTree.java b/src/main/java/com/mesalab/qgw/model/dsl/LiveChartProtocol.java
index 27657305..43122b1d 100644
--- a/src/main/java/com/mesalab/network/model/protocol/ProtocolTree.java
+++ b/src/main/java/com/mesalab/qgw/model/dsl/LiveChartProtocol.java
@@ -1,40 +1,52 @@
-package com.mesalab.network.model.protocol;
+package com.mesalab.qgw.model.dsl;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
-import com.mesalab.network.common.Constants;
+import com.mesalab.qgw.constant.dsl.LiveChartConstants;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
+import java.io.Serializable;
import java.util.List;
import java.util.Map;
import java.util.Objects;
+/**
+ * @author wangwei
+ */
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
-public class ProtocolTree {
+@JsonPropertyOrder({"id", "name", "parentId", "childrens", "metrics", "sentBytes", "receivedBytes", "totalSentBytes", "totalReceivedBytes"})
+public class LiveChartProtocol implements Serializable {
private String id;
private String name;
+ @JsonProperty("parent_id")
private String parentId;
- private List<ProtocolTree> childrens = Lists.newArrayList();
+ private List<LiveChartProtocol> childrens = Lists.newArrayList();
private Map<String, Object> metrics = Maps.newLinkedHashMap();
+ @JsonProperty("sent_bytes")
private long sentBytes;
+ @JsonProperty("received_bytes")
private long receivedBytes;
+ @JsonProperty("total_sent_bytes")
private long totalSentBytes;
+ @JsonProperty("total_received_bytes")
private long totalReceivedBytes;
- public ProtocolTree(String id, String name, String parentId) {
+ public LiveChartProtocol(String id, String name, String parentId) {
this.id = id;
this.name = name;
this.parentId = parentId;
}
- public ProtocolTree(String id, String name, String parentId, long sentBytes, long receivedBytes) {
+ public LiveChartProtocol(String id, String name, String parentId, long sentBytes, long receivedBytes) {
this.id = id;
this.name = name;
this.parentId = parentId;
@@ -48,8 +60,8 @@ public class ProtocolTree {
public String getParentId() {
- return id.lastIndexOf(Constants.ENCAPSULATION_PATH_SEPARATOR) > 0 ?
- id.substring(0, id.lastIndexOf(Constants.ENCAPSULATION_PATH_SEPARATOR)) : null;
+ return id.lastIndexOf(LiveChartConstants.ENCAPSULATION_PATH_SEPARATOR) > 0 ?
+ id.substring(0, id.lastIndexOf(LiveChartConstants.ENCAPSULATION_PATH_SEPARATOR)) : null;
}
public void setParentId(String parentId) {
@@ -60,7 +72,7 @@ public class ProtocolTree {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
- ProtocolTree that = (ProtocolTree) o;
+ LiveChartProtocol that = (LiveChartProtocol) o;
return Objects.equals(id, that.id);
}
diff --git a/src/main/java/com/mesalab/qgw/monitor/ArangoHealthIndicator.java b/src/main/java/com/mesalab/qgw/monitor/ArangoHealthIndicator.java
index 6b769bdc..1b373112 100644
--- a/src/main/java/com/mesalab/qgw/monitor/ArangoHealthIndicator.java
+++ b/src/main/java/com/mesalab/qgw/monitor/ArangoHealthIndicator.java
@@ -57,6 +57,7 @@ public class ArangoHealthIndicator extends AbstractHealthIndicator {
StringUtil.EMPTY, new BasicHeader(HttpHeaders.AUTHORIZATION, String.valueOf(jwt)));
Map map = JSON.parseObject(result, Map.class);
if (StringUtil.isEmpty(map) || StringUtil.isEmpty(map.get("version"))) {
+ JwtCache.remove(Constant.ARANGO_CACHE_JWT);
builder.down()
.withDetail("app", APP)
.withDetail("message", "连接Arango 失败: " + result);
@@ -64,9 +65,11 @@ public class ArangoHealthIndicator extends AbstractHealthIndicator {
builder.up()
.withDetail("app", APP)
.withDetail("url", arangoConfig.getServer())
- .withDetail("message", "ok");
+ .withDetail("message", "ok")
+ .withDetail("version", map.get("version"));
}
} catch (RuntimeException e) {
+ JwtCache.remove(Constant.ARANGO_CACHE_JWT);
builder.down()
.withDetail("app", APP)
.withDetail("message", "连接Arango 失败");
diff --git a/src/main/java/com/mesalab/qgw/monitor/ClickHouseHealthIndicator.java b/src/main/java/com/mesalab/qgw/monitor/ClickHouseHealthIndicator.java
index 1b652f8b..758fb86d 100644
--- a/src/main/java/com/mesalab/qgw/monitor/ClickHouseHealthIndicator.java
+++ b/src/main/java/com/mesalab/qgw/monitor/ClickHouseHealthIndicator.java
@@ -2,7 +2,7 @@ package com.mesalab.qgw.monitor;
import cn.hutool.core.util.StrUtil;
import cn.hutool.core.util.URLUtil;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.qgw.model.basic.ClickHouseHttpSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.health.AbstractHealthIndicator;
@@ -38,22 +38,24 @@ public class ClickHouseHealthIndicator extends AbstractHealthIndicator {
.append(clickHouseHttpSource.getRealTimeAccountUserName()).append("&")
.append("password=").append(clickHouseHttpSource.getRealTimeAccountPin()).append("&")
.append("database=").append(clickHouseHttpSource.getDbName()).append("&")
- .append("query=").append("select timezone() as timeZone, now() as curTimestamp")
+ .append("query=").append("select version() as version,timezone() as timeZone, now() as timestamp")
.append(" FORMAT JSON ;");
String url = queryURL + queryParamBuilder;
try {
ResponseEntity responseEntity = restTemplate.getForEntity(url, Map.class);
- if (responseEntity.getStatusCodeValue() == ResultStatusEnum.SUCCESS.getCode()) {
+ if (responseEntity.getStatusCodeValue() == HttpStatusCodeEnum.SUCCESS.getCode()) {
Map body = (Map) responseEntity.getBody();
Map data = (Map) ((List) body.get("data")).get(0);
String dbTimeZone = data.get("timeZone").toString();
- String curDatetime = data.get("curTimestamp").toString();
+ String curDatetime = data.get("timestamp").toString();
+ String version = StrUtil.subBefore(data.get("version").toString(),".", true);
builder.up()
.withDetail("app", APP)
.withDetail("url", clickHouseHttpSource.getUrl())
.withDetail("message", "ok")
.withDetail("timeZone", dbTimeZone)
- .withDetail("curTimestamp", curDatetime);
+ .withDetail("timestamp", curDatetime)
+ .withDetail("version",version);
} else {
builder.down()
.withDetail("app", APP)
diff --git a/src/main/java/com/mesalab/qgw/monitor/DruidHealthIndicator.java b/src/main/java/com/mesalab/qgw/monitor/DruidHealthIndicator.java
index 6ecf528f..061c8ce7 100644
--- a/src/main/java/com/mesalab/qgw/monitor/DruidHealthIndicator.java
+++ b/src/main/java/com/mesalab/qgw/monitor/DruidHealthIndicator.java
@@ -4,7 +4,7 @@ import cn.hutool.core.util.StrUtil;
import cn.hutool.core.util.URLUtil;
import com.alibaba.fastjson2.JSON;
import com.google.common.collect.Maps;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.qgw.model.basic.DruidIoHttpSource;
import lombok.Data;
import org.springframework.beans.factory.annotation.Autowired;
@@ -49,7 +49,7 @@ public class DruidHealthIndicator extends AbstractHealthIndicator {
HttpEntity request = new HttpEntity(JSON.toJSONString(druidQueryParam), headers);
ResponseEntity responseEntity = restTemplate.postForEntity(queryURL, request, Object.class);
- if (responseEntity.getStatusCodeValue() == ResultStatusEnum.SUCCESS.getCode()) {
+ if (responseEntity.getStatusCodeValue() == HttpStatusCodeEnum.SUCCESS.getCode()) {
List body = (List) responseEntity.getBody();
String timeZone = String.valueOf(((Map) (body.get(0))).get("timeZone"));
String curDatetime = String.valueOf(((Map) (body.get(0))).get("curTimestamp"));
@@ -58,7 +58,7 @@ public class DruidHealthIndicator extends AbstractHealthIndicator {
.withDetail("url", druidIoHttpSource.getUrl())
.withDetail("message", "ok")
.withDetail("timeZone", timeZone)
- .withDetail("curTimestamp", curDatetime);
+ .withDetail("timestamp", curDatetime);
} else {
builder.down()
.withDetail("app", APP)
diff --git a/src/main/java/com/mesalab/qgw/monitor/HbaseHealthIndicator.java b/src/main/java/com/mesalab/qgw/monitor/HbaseHealthIndicator.java
index 43ff4ccd..5cfba23b 100644
--- a/src/main/java/com/mesalab/qgw/monitor/HbaseHealthIndicator.java
+++ b/src/main/java/com/mesalab/qgw/monitor/HbaseHealthIndicator.java
@@ -39,14 +39,11 @@ public class HbaseHealthIndicator extends AbstractHealthIndicator {
HbaseDialect.PROPERTIES.setProperty("hbase.rpc.timeout", hBaseAPISource.getRpcTimeout());
HbaseDialect.PROPERTIES.setProperty("hbase.client.scanner.timeout.period", hBaseAPISource.getRpcTimeout());
HbaseDialect.PROPERTIES.setProperty("phoenix.query.timeoutMs", hBaseAPISource.getRpcTimeout());
- Connection conn = null;
- PreparedStatement ps = null;
- try {
- String url = "jdbc:phoenix:".concat(hBaseAPISource.getZookeeperQuorum()).concat(":").concat(hBaseAPISource.getZookeeperZnodeParent());
- conn = DriverManager.getConnection(url, HbaseDialect.PROPERTIES);
- String sql = "select 1";
- ps = conn.prepareStatement(sql);
- PhoenixResultSet resultSet = (PhoenixResultSet) ps.executeQuery();
+ String url = "jdbc:phoenix:".concat(hBaseAPISource.getZookeeperQuorum()).concat(":").concat(hBaseAPISource.getZookeeperZnodeParent());
+ String sql = "select 1";
+ try (Connection conn = DriverManager.getConnection(url, HbaseDialect.PROPERTIES);
+ PreparedStatement ps = conn.prepareStatement(sql);
+ ResultSet resultSet = ps.executeQuery()) {
if (resultSet.next()) {
builder.up()
.withDetail("app", APP)
@@ -61,10 +58,6 @@ public class HbaseHealthIndicator extends AbstractHealthIndicator {
builder.down()
.withDetail("app", APP)
.withDetail("message", e.getMessage());
- } finally {
- if (conn != null) {
- conn.close();
- }
}
}
-}
+} \ No newline at end of file
diff --git a/src/main/java/com/mesalab/qgw/monitor/HosHealthIndicator.java b/src/main/java/com/mesalab/qgw/monitor/HosHealthIndicator.java
index 70924044..65c3c379 100644
--- a/src/main/java/com/mesalab/qgw/monitor/HosHealthIndicator.java
+++ b/src/main/java/com/mesalab/qgw/monitor/HosHealthIndicator.java
@@ -1,7 +1,7 @@
package com.mesalab.qgw.monitor;
import cn.hutool.core.util.StrUtil;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.qgw.service.impl.HttpClientService;
import com.mesalab.services.configuration.HosConfig;
import org.springframework.beans.factory.annotation.Autowired;
@@ -33,7 +33,7 @@ public class HosHealthIndicator extends AbstractHealthIndicator {
Map<String, String> headers = new HashMap<>();
headers.put("token", hosConfig.getToken());
Map<String, String> resultMap = httpClientService.httpGet(url, headers, 3000);
- if (Integer.parseInt(resultMap.get("status")) == ResultStatusEnum.SUCCESS.getCode()) {
+ if (Integer.parseInt(resultMap.get("status")) == HttpStatusCodeEnum.SUCCESS.getCode()) {
builder.up()
.withDetail("app", APP)
.withDetail("url", url)
diff --git a/src/main/java/com/mesalab/qgw/monitor/JVMHealthIndicator.java b/src/main/java/com/mesalab/qgw/monitor/JVMHealthIndicator.java
index d9f81141..11152db4 100644
--- a/src/main/java/com/mesalab/qgw/monitor/JVMHealthIndicator.java
+++ b/src/main/java/com/mesalab/qgw/monitor/JVMHealthIndicator.java
@@ -21,6 +21,6 @@ public class JVMHealthIndicator extends AbstractHealthIndicator {
builder.up()
.withDetail("app", "JVM")
.withDetail("timeZone", TimeZone.getDefault().getID())
- .withDetail("curTimestamp", DateUtils.getCurrentDate("yyyy-MM-dd'T'HH:mm:ss:SSS'Z'"));
+ .withDetail("timestamp", DateUtils.getCurrentDate("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"));
}
}
diff --git a/src/main/java/com/mesalab/qgw/monitor/JobAdminHealthIndicator.java b/src/main/java/com/mesalab/qgw/monitor/JobAdminHealthIndicator.java
index 73925236..e99738c4 100644
--- a/src/main/java/com/mesalab/qgw/monitor/JobAdminHealthIndicator.java
+++ b/src/main/java/com/mesalab/qgw/monitor/JobAdminHealthIndicator.java
@@ -1,7 +1,7 @@
package com.mesalab.qgw.monitor;
import cn.hutool.core.util.StrUtil;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.qgw.model.basic.JobAdminHttpSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.health.AbstractHealthIndicator;
@@ -45,10 +45,10 @@ public class JobAdminHealthIndicator extends AbstractHealthIndicator {
try {
ResponseEntity responseEntity = restTemplate.postForEntity(jobAdminHttpSource.getUrl() + "/login?", request, Map.class);
- if (responseEntity.getStatusCodeValue() == ResultStatusEnum.SUCCESS.getCode()) {
+ if (responseEntity.getStatusCodeValue() == HttpStatusCodeEnum.SUCCESS.getCode()) {
Map body = (HashMap) responseEntity.getBody();
int code = Integer.parseInt(String.valueOf(body.get("code")));
- if (code == ResultStatusEnum.SUCCESS.getCode()) {
+ if (code == HttpStatusCodeEnum.SUCCESS.getCode()) {
builder.up()
.withDetail("app", APP)
.withDetail("url", jobAdminHttpSource.getUrl())
diff --git a/src/main/java/com/mesalab/qgw/monitor/MariaDBHealthIndicator.java b/src/main/java/com/mesalab/qgw/monitor/MariaDBHealthIndicator.java
index da4fb023..de866fc7 100644
--- a/src/main/java/com/mesalab/qgw/monitor/MariaDBHealthIndicator.java
+++ b/src/main/java/com/mesalab/qgw/monitor/MariaDBHealthIndicator.java
@@ -4,6 +4,7 @@ import cn.hutool.core.util.StrUtil;
import com.alibaba.nacos.api.config.annotation.NacosValue;
import com.google.common.base.Splitter;
import com.jfinal.plugin.activerecord.Db;
+import lombok.Data;
import org.springframework.boot.actuate.health.AbstractHealthIndicator;
import org.springframework.boot.actuate.health.Health;
import org.springframework.stereotype.Component;
@@ -34,17 +35,19 @@ public class MariaDBHealthIndicator extends AbstractHealthIndicator {
return;
}
try {
- List<Object> query = Db.query("SELECT CURRENT_TIMESTAMP AS curTimestamp");
+ List<Object> query = Db.query("SELECT CURRENT_TIMESTAMP AS timestamp,version() AS version");
if (!CollectionUtils.isEmpty(query)) {
String substring = url.substring(url.indexOf("?") + 1);
Map<String, String> map = Splitter.on("&").withKeyValueSeparator("=").split(substring);
+ Object[] row = (Object[]) query.get(0);
String timeZone = map.get("serverTimezone");
builder.up()
.withDetail("app", APP)
.withDetail("url", url)
.withDetail("message", "ok")
+ .withDetail("version",StrUtil.subBefore(String.valueOf(row[1]), "-", false))
.withDetail("timeZone", timeZone)
- .withDetail("curTimestamp", query.get(0));
+ .withDetail("timestamp", row[0]);
} else {
builder.down()
diff --git a/src/main/java/com/mesalab/qgw/monitor/NacosHealthIndicator.java b/src/main/java/com/mesalab/qgw/monitor/NacosHealthIndicator.java
index b501a2e3..7666ebd9 100644
--- a/src/main/java/com/mesalab/qgw/monitor/NacosHealthIndicator.java
+++ b/src/main/java/com/mesalab/qgw/monitor/NacosHealthIndicator.java
@@ -4,7 +4,7 @@ import cn.hutool.core.util.StrUtil;
import cn.hutool.core.util.URLUtil;
import cn.hutool.json.JSONUtil;
import com.jayway.jsonpath.JsonPath;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.common.nacos.NacosConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.health.AbstractHealthIndicator;
@@ -39,13 +39,15 @@ public class NacosHealthIndicator extends AbstractHealthIndicator {
return;
}
try {
-
ResponseEntity responseEntity = restTemplate.getForEntity(URLUtil.normalize(nacosConfig.getServerAddr() + "/nacos/actuator/health"), Map.class);
- if (responseEntity.getStatusCodeValue() == ResultStatusEnum.SUCCESS.getCode()
+ ResponseEntity<Map> resMap = restTemplate.getForEntity(URLUtil.normalize(nacosConfig.getServerAddr() + "/nacos/v1/console/server/state"), Map.class);
+ if (resMap.getStatusCodeValue() == HttpStatusCodeEnum.SUCCESS.getCode()
+ && responseEntity.getStatusCodeValue() == HttpStatusCodeEnum.SUCCESS.getCode()
&& "UP".equalsIgnoreCase(JsonPath.read(JSONUtil.toJsonStr(responseEntity.getBody()), "$.status"))) {
builder.up()
.withDetail("app", APP)
.withDetail("url", nacosConfig.getServerAddr())
+ .withDetail("version",resMap.getBody().get("version"))
.withDetail("message", "ok");
} else {
builder.down()
diff --git a/src/main/java/com/mesalab/qgw/service/DslService.java b/src/main/java/com/mesalab/qgw/service/DSLService.java
index 91d7333a..66ca4baa 100644
--- a/src/main/java/com/mesalab/qgw/service/DslService.java
+++ b/src/main/java/com/mesalab/qgw/service/DSLService.java
@@ -1,8 +1,8 @@
package com.mesalab.qgw.service;
import com.mesalab.common.entity.BaseResult;
-import com.mesalab.network.model.protocol.ProtocolTree;
-import com.mesalab.qgw.model.basic.DSLProfile;
+import com.mesalab.qgw.model.dsl.LiveChartProtocol;
+import com.mesalab.qgw.model.basic.DSLQueryContext;
import java.util.List;
import java.util.Map;
@@ -15,7 +15,17 @@ import java.util.Map;
* @Date 2023/11/27 10:36
* @Author wWei
*/
-public interface DslService {
+public interface DSLService {
+
+ /**
+ * Desc: exec DSL
+ *
+ * @param dslProfile
+ * @return {@link BaseResult}
+ * @created by wWei
+ * @date 2023/12/11 15:54
+ */
+ BaseResult execDsl(DSLQueryContext dslProfile, boolean isDryRun);
/**
* Desc: Application and Protocol Summary
@@ -25,7 +35,7 @@ public interface DslService {
* @created by wWei
* @date 2023/11/27 10:40
*/
- BaseResult appAndProtocolSummary(DSLProfile dslProfile);
+ BaseResult appAndProtocolSummary(DSLQueryContext dslProfile, boolean isDryRun);
/**
* Desc: application and protocol tree composition
@@ -35,7 +45,7 @@ public interface DslService {
* @created by wWei
* @date 2023/11/28 10:38
*/
- BaseResult appAndProtocolTreeComposition(DSLProfile dslProfile);
+ BaseResult appAndProtocolTreeComposition(DSLQueryContext dslProfile, boolean isDryRun);
/**
* Desc: application and protocol tree throughput
@@ -45,7 +55,7 @@ public interface DslService {
* @created by wWei
* @date 2023/11/28 10:38
*/
- BaseResult applicationAndProtocolTreeThroughput(DSLProfile dslProfile);
+ BaseResult applicationAndProtocolTreeThroughput(DSLQueryContext dslProfile, boolean isDryRun);
/**
* Desc: top apps
@@ -55,7 +65,7 @@ public interface DslService {
* @created by wWei
* @date 2023/11/28 10:39
*/
- BaseResult applicationAndProtocolTopApp(DSLProfile dslProfile);
+ BaseResult applicationAndProtocolTopApp(DSLQueryContext dslProfile, boolean isDryRun);
/**
* Desc: app-related internal ips
@@ -65,7 +75,7 @@ public interface DslService {
* @created by wWei
* @date 2023/11/28 10:40
*/
- BaseResult applicationAndProtocolAppRelatedInternalIps(DSLProfile dslProfile);
+ BaseResult applicationAndProtocolAppRelatedInternalIps(DSLQueryContext dslProfile, boolean isDryRun);
/**
* Desc: app-throughput
@@ -75,7 +85,7 @@ public interface DslService {
* @created by wWei
* @date 2023/11/28 10:40
*/
- BaseResult applicationAndProtocolAppThroughput(DSLProfile dslProfile);
+ BaseResult applicationAndProtocolAppThroughput(DSLQueryContext dslProfile, boolean isDryRun);
/**
* Desc: app summary
@@ -85,30 +95,30 @@ public interface DslService {
* @created by wWei
* @date 2023/11/28 10:41
*/
- BaseResult applicationAndProtocolAppSummary(DSLProfile dslProfile);
+ BaseResult applicationAndProtocolAppSummary(DSLQueryContext dslProfile, boolean isDryRun);
/**
- * Desc: Get Subscriber ID Relate Client IPs
+ * Desc: IP Learning (FQDN-IP)
*
* @param dslProfile
* @return {@link BaseResult}
* @created by wWei
- * @date 2023/11/30 11:31
+ * @date 2023/12/1 11:04
*/
- BaseResult realTimeDataAnalyticsSubscriberIdRelateIp(DSLProfile dslProfile);
+ BaseResult ipLearningFqdnRelateIp(DSLQueryContext dslProfile, boolean isDryRun);
/**
- * Desc: GTP-C Mobile Identities AND APNs Relate TEID
+ * Desc: IP Learning Active IP
*
* @param dslProfile
* @return {@link BaseResult}
* @created by wWei
- * @date 2023/11/30 11:35
+ * @date 2023/12/1 11:04
*/
- BaseResult realTimeDataAnalyticsMobileIdentityRelateTeid(DSLProfile dslProfile);
+ BaseResult ipLearningActiveIp(DSLQueryContext dslProfile, boolean isDryRun);
- List<ProtocolTree> buildFlatStructure(List<Map> protocolData);
+ List<LiveChartProtocol> buildFlatStructure(List<Map> protocols);
- List<ProtocolTree> buildHierarchicalStructure(List<ProtocolTree> nodes);
+ List<LiveChartProtocol> buildHierarchicalStructure(List<LiveChartProtocol> nodes);
}
diff --git a/src/main/java/com/mesalab/qgw/service/MetadataService.java b/src/main/java/com/mesalab/qgw/service/DatabaseService.java
index 3e235592..63de4cb2 100644
--- a/src/main/java/com/mesalab/qgw/service/MetadataService.java
+++ b/src/main/java/com/mesalab/qgw/service/DatabaseService.java
@@ -2,11 +2,14 @@ package com.mesalab.qgw.service;
import com.mesalab.common.entity.BaseResult;
-import org.apache.avro.Schema;
+import com.mesalab.qgw.model.job.EncryptionInfo;
+import com.mesalab.qgw.model.job.StorageDeletionInfo;
+
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
-public interface MetadataService {
+public interface DatabaseService {
/**
* 查询schema信息
@@ -17,6 +20,8 @@ public interface MetadataService {
*/
Map getSchemaInfo(String type, String name, boolean displayStorageSize);
+ Map<String, List<Object>> getSchemaDataDict(String name);
+
/**
* 查询字典信息
*
@@ -57,7 +62,7 @@ public interface MetadataService {
* @param tableName
* @return
*/
- String getDBTypeByTableName(String tableName);
+ String getDBEngineByTableName(String tableName);
/**
@@ -80,12 +85,11 @@ public interface MetadataService {
* Desc: 通过表名获取Schema
*
* @param name
- * @return {@link Schema}
+ * @return {@link }
* @created by wWei
* @date 2021/7/27 2:41 下午
*/
- Schema getSchemaByName(String name);
-
+ LinkedHashMap<String, Object> getSchemaByName(String name);
/**
@@ -97,5 +101,57 @@ public interface MetadataService {
*/
BaseResult updateSchema(String name, Map<String, Object> schemaMap);
+ /**
+ * 系统存储配额
+ *
+ * @return
+ */
+ BaseResult getStorageQuota();
+
+ /**
+ * 获取每天日志存储变化
+ *
+ * @param searchStartTime
+ * @param searchEndTime
+ * @return
+ */
+ BaseResult dailyTrendOfStorage(String searchStartTime, String searchEndTime);
+
+ /**
+ * 数据配额设置:
+ * 1. 调度任务
+ * 2. schema TTL设置
+ *
+ * @param list
+ * @return
+ */
+ BaseResult deleteStorage(List<StorageDeletionInfo> list);
+
+ /**
+ * 数据配额设置状态
+ *
+ * @param logType
+ * @return
+ */
+ BaseResult getDeleteStorageStatus(String logType);
+
+ /**
+ * 获取日志储存配额利用率
+ *
+ * @return
+ */
+ BaseResult getStorageUsageStatus();
+
+ /**
+ * 自定义查询ID: 由 Catalog(数据库类型): resultID+query 组成。
+ *
+ * @param resultId
+ * @param query
+ * @return {@link String}
+ * @created by wWei
+ * @date 2021/1/7 6:48 下午
+ */
+ String getCustomQueryId(String resultId, String query);
+
}
diff --git a/src/main/java/com/mesalab/qgw/service/DatasetService.java b/src/main/java/com/mesalab/qgw/service/DatasetService.java
new file mode 100644
index 00000000..80666408
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/DatasetService.java
@@ -0,0 +1,57 @@
+package com.mesalab.qgw.service;
+
+import com.google.common.collect.Lists;
+import com.mesalab.common.entity.BaseResult;
+
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * 数据集市管理服务
+ *
+ * @Classname DatasetService
+ */
+public interface DatasetService
+{
+ /**
+ * Desc: 获取动态变量
+ *
+ * @param
+ * @return {@link List<LinkedHashMap>}
+ */
+ List<LinkedHashMap> getVariable();
+
+ /**
+ * Desc: 获取单个SQL模版
+ *
+ * @param datasetId
+ * @return {@link Map<String, Object>}
+ */
+ Map<String, Object> getDataset(String datasetId);
+
+ /**
+ * Desc: 批量获取SQL模版
+ *
+ * @param
+ * @return {@link List<Map<String, String>>}
+ */
+ List<Map<String, Object>> getDatasets(List<String> ids, String category, String backendEngine);
+
+ /**
+ * Desc: 结果预览
+ *
+ * @param datasetId
+ * @return {@link BaseResult}
+ */
+ BaseResult getPreview(String datasetId);
+
+ /**
+ * Desc: 获取执行SQL
+ *
+ * @param
+ * @return
+ */
+ String buildExecSQL(List<LinkedHashMap> variables, String sql);
+
+}
diff --git a/src/main/java/com/mesalab/qgw/service/DiagnosisService.java b/src/main/java/com/mesalab/qgw/service/DiagnosisService.java
index fd64a76a..7df569b5 100644
--- a/src/main/java/com/mesalab/qgw/service/DiagnosisService.java
+++ b/src/main/java/com/mesalab/qgw/service/DiagnosisService.java
@@ -7,24 +7,8 @@ public interface DiagnosisService {
BaseResult validateSchema();
- BaseResult validateMetadata();
-
- BaseResult runPocSQL(boolean isSaveResult, String option, String category);
-
BaseResult getPocSQL(String dialect, Integer queryNo);
- BaseResult getMessageInfo(String param);
JSONObject getVersionInfo();
-
-
- /**
- * Desc: 一致性校验
- *
- * @param
- * @return {@link BaseResult}
- * @created by wWei
- * @date 2022/6/5 9:24 下午
- */
- BaseResult consistencyCheck();
}
diff --git a/src/main/java/com/mesalab/qgw/service/HosService.java b/src/main/java/com/mesalab/qgw/service/HosService.java
new file mode 100644
index 00000000..eb191901
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/HosService.java
@@ -0,0 +1,20 @@
+package com.mesalab.qgw.service;
+
+import com.mesalab.common.entity.BaseResult;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.io.InputStream;
+import java.util.List;
+import java.util.Map;
+
+
+public interface HosService {
+
+ BaseResult getFileList(String bucketName, String prefix, Map<String, String> udfMetaParam);
+
+ BaseResult uploadFile(String bucketName, String fileName, InputStream inputStream, Map<String, String> headers);
+
+ BaseResult deleteFileList(String bucketName, List<String> collect);
+
+ BaseResult getFile(String bucketName, String fileName);
+}
diff --git a/src/main/java/com/mesalab/qgw/service/PacketCombineDslService.java b/src/main/java/com/mesalab/qgw/service/PacketCombineDslService.java
new file mode 100644
index 00000000..15c1b198
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/PacketCombineDslService.java
@@ -0,0 +1,26 @@
+package com.mesalab.qgw.service;
+
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.qgw.model.basic.DSLQueryRequestParam;
+
+import java.io.InputStream;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * packet combine service
+ *
+ * @Classname PacketCombineDslService
+ * @Date 2024/4/19 15:29
+ * @Author wWei
+ */
+public interface PacketCombineDslService {
+
+ BaseResult run(DSLQueryRequestParam dslQueryRequestParam);
+
+ List<Map<String, Object>> getPackets(String sql);
+
+ byte[] packetCombine(List<Map<String, Object>> records);
+
+ Object uploadFile(String fileName, InputStream inputStream);
+}
diff --git a/src/main/java/com/mesalab/qgw/service/QueryJobService.java b/src/main/java/com/mesalab/qgw/service/QueryJobService.java
new file mode 100644
index 00000000..ecb99f61
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/QueryJobService.java
@@ -0,0 +1,66 @@
+package com.mesalab.qgw.service;
+
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.qgw.model.basic.DSLQueryRequestParam;
+import com.mesalab.qgw.model.basic.SqlQueryRequestParam;
+
+import java.util.List;
+
+/**
+ *
+ * @Date 2023/12/6 18:09
+ * @Author wWei
+ */
+public interface QueryJobService {
+
+ /**
+ * Create an SQL saved query and return the query job id. It will storage the query job and results in the database.
+ * The query job will be executed in the background through the SAVED-QUERY-SCHEDULER.
+ * @param sqlQueryRequestParam
+ * @return {@link BaseResult}
+ * @created by wWei
+ * @date 2023/12/11 11:02
+ */
+ BaseResult createSQLSavedQuery(SqlQueryRequestParam sqlQueryRequestParam);
+
+ /**
+ * Create an SQL ad-hoc query and return the query job id. The query will be executed immediately.
+ * It Support three modes of execution: NORMAL, BLOCKING, and ONESHOT.
+ * NORMAL: The query will be executed in the asynchronous thread pool.
+ * BLOCKING: The query will return the jobID when the query is completed (Sync Mode).
+ * ONESHOT: The query will return the results in the same call (Sync Mode). Does not return the job ID.
+ * @param sqlQueryRequestParam
+ * @return {@link BaseResult}
+ * @created by wWei
+ * @date 2023/12/11 11:28
+ */
+ BaseResult createSQLAdHocQuery(SqlQueryRequestParam sqlQueryRequestParam);
+
+ /**
+ * Create a DSL ad-hoc query and return the query job id. The query will be executed immediately.
+ * It Support three modes of execution: NORMAL, BLOCKING, and ONESHOT[Recommend].
+ * @param dslQueryRequestParam
+ * @return {@link BaseResult}
+ * @return
+ */
+ BaseResult createDSLAdHocQuery(DSLQueryRequestParam dslQueryRequestParam);
+
+ BaseResult getAdHocQueryResultById(String id);
+
+ BaseResult getSavedQueryResultById(String id);
+
+ BaseResult getAdHocQueryStatusById(String id);
+
+ BaseResult getSavedQueryStatusById(String id);
+
+
+ BaseResult getSavedQueryResult(List<String> ids);
+
+ BaseResult getAdHocQueryResult(List<String> ids);
+
+ BaseResult getSavedQueryStatus(List<String> ids);
+
+ BaseResult getAdHocQueryStatus(List<String> ids);
+
+ BaseResult deleteSavedQueryById(String id);
+}
diff --git a/src/main/java/com/mesalab/qgw/service/QueryService.java b/src/main/java/com/mesalab/qgw/service/QueryService.java
deleted file mode 100644
index 63e90323..00000000
--- a/src/main/java/com/mesalab/qgw/service/QueryService.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package com.mesalab.qgw.service;
-
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.qgw.model.basic.QueryProfile;
-
-public interface QueryService {
-
- BaseResult executeQuery(QueryProfile param);
-
-} \ No newline at end of file
diff --git a/src/main/java/com/mesalab/qgw/service/RewriteTable.java b/src/main/java/com/mesalab/qgw/service/RewriteTable.java
index 48d08beb..2a91b570 100644
--- a/src/main/java/com/mesalab/qgw/service/RewriteTable.java
+++ b/src/main/java/com/mesalab/qgw/service/RewriteTable.java
@@ -1,7 +1,6 @@
package com.mesalab.qgw.service;
import com.mesalab.common.utils.SpringContextUtil;
-import com.mesalab.qgw.service.MetadataService;
import com.geedgenetworks.utils.StringUtil;
import net.sf.jsqlparser.expression.Alias;
import net.sf.jsqlparser.expression.ExpressionVisitor;
@@ -18,7 +17,7 @@ import net.sf.jsqlparser.util.deparser.SelectDeParser;
*/
public class RewriteTable extends SelectDeParser {
- private static MetadataService metadataService = (MetadataService) SpringContextUtil.getBean("metadataService");
+ private static DatabaseService databaseService = (DatabaseService) SpringContextUtil.getBean("databaseService");
public RewriteTable(ExpressionVisitor expressionVisitor, StringBuilder buffer) {
super(expressionVisitor, buffer);
@@ -27,7 +26,7 @@ public class RewriteTable extends SelectDeParser {
@Override
public void visit(Table table) {
if (StringUtil.isBlank(table.getSchemaName())) {
- table.setSchemaName(metadataService.getDBNameByTableName(table.getName()));
+ table.setSchemaName(databaseService.getDBNameByTableName(table.getName()));
}
if (((table.getAlias() == null)) || (StringUtil.isBlank(table.getAlias().getName()))) {
Alias alias = new Alias(table.getName(), true);
diff --git a/src/main/java/com/mesalab/qgw/service/SQLSyncQueryService.java b/src/main/java/com/mesalab/qgw/service/SQLSyncQueryService.java
new file mode 100644
index 00000000..c9287a19
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/SQLSyncQueryService.java
@@ -0,0 +1,10 @@
+package com.mesalab.qgw.service;
+
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.qgw.model.basic.SQLQueryContext;
+
+public interface SQLSyncQueryService {
+
+ BaseResult executeQuery(SQLQueryContext param);
+
+} \ No newline at end of file
diff --git a/src/main/java/com/mesalab/qgw/service/SystemService.java b/src/main/java/com/mesalab/qgw/service/SystemService.java
deleted file mode 100644
index 8cea8433..00000000
--- a/src/main/java/com/mesalab/qgw/service/SystemService.java
+++ /dev/null
@@ -1,64 +0,0 @@
-package com.mesalab.qgw.service;
-
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.qgw.model.job.EncryptionInfo;
-import com.mesalab.qgw.model.job.StorageDeletionInfo;
-
-import java.util.List;
-
-public interface SystemService {
-
- /**
- * 系统存储配额
- * @return
- */
- BaseResult getStorageQuota();
-
- /**
- * 获取每天日志存储变化
- * @param searchStartTime
- * @param searchEndTime
- * @return
- */
- BaseResult dailyTrendOfStorage(String searchStartTime, String searchEndTime);
-
- /**
- * 数据配额设置:
- * 1. 调度任务
- * 2. schema TTL设置
- * @param list
- * @return
- */
- BaseResult deleteStorage(List<StorageDeletionInfo> list);
-
- /**
- * 数据配额设置状态
- * @param logType
- * @return
- */
- BaseResult getDeleteStorageStatus(String logType);
-
-
- /**
- * 自定义查询ID: 由 Catalog(数据库类型): resultID+query 组成。
- *
- * @param resultId
- * @param query
- * @return {@link String}
- * @created by wWei
- * @date 2021/1/7 6:48 下午
- */
- String getCustomQueryId(String resultId, String query);
-
- /**
- * Desc: 通过queryId查询SQL任务执行状态
- *
- * @param param
- * @return {@link BaseResult}
- * @created by wWei
- * @date 2021/1/7 6:49 下午
- */
- BaseResult getCiphertext(EncryptionInfo param);
-
-
-}
diff --git a/src/main/java/com/mesalab/qgw/service/TrafficSpectrumDslService.java b/src/main/java/com/mesalab/qgw/service/TrafficSpectrumDslService.java
new file mode 100644
index 00000000..e1393ddc
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/TrafficSpectrumDslService.java
@@ -0,0 +1,39 @@
+package com.mesalab.qgw.service;
+
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.qgw.model.basic.DSLQueryRequestParam;
+
+import java.util.List;
+import java.util.Map;
+
+/**
+ * TODO
+ *
+ * @Classname TrafficSpectrumDslService
+ * @Date 2024/5/10 10:13
+ * @Author wWei
+ */
+public interface TrafficSpectrumDslService {
+ BaseResult run(DSLQueryRequestParam dslQueryRequestParam);
+
+ List<Map<String, Object>> summary(DSLQueryRequestParam dslQueryRequestParam);
+
+ List<Map<String, Object>> uniqIpsStat(DSLQueryRequestParam dslQueryRequestParam);
+
+ List<Map<String, Object>> topServerIpAndServerDomain(DSLQueryRequestParam dslQueryRequestParam);
+
+ List<Map<String, Object>> internalExternalDimensionMetricsStat(DSLQueryRequestParam dslQueryRequestParam);
+
+ List<Map<String, Object>> networkThroughputTrend(DSLQueryRequestParam dslQueryRequestParam);
+
+ Map<String, Object> getInternalExternalBipartiteGraph(Map<String, Object> data, Integer clientIPAppResultNum);
+
+ Map<String, Object> mergeInternalExternalBipartiteGraph(Map<String, Object> data1, Map<String, Object> data2);
+
+ List<Map<String, Object>> mergeLinks(List<Map<String, Object>> links);
+
+
+ Map<String, List<Map<String, Object>>> distinctNodes(Map<String, List<Map<String, Object>>> nodes);
+
+
+}
diff --git a/src/main/java/com/mesalab/qgw/service/TroubleshootingService.java b/src/main/java/com/mesalab/qgw/service/TroubleshootingService.java
new file mode 100644
index 00000000..bce315d0
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/TroubleshootingService.java
@@ -0,0 +1,16 @@
+package com.mesalab.qgw.service;
+
+import com.alibaba.fastjson2.JSONObject;
+import com.mesalab.common.entity.BaseResult;
+
+public interface TroubleshootingService {
+ JSONObject getComponentStatus();
+
+ BaseResult consistencyCheck();
+
+ BaseResult validateMetadata();
+
+ BaseResult benchmarkTest(String test, boolean isSaved);
+
+ BaseResult datesetVerification();
+}
diff --git a/src/main/java/com/mesalab/qgw/service/UtilService.java b/src/main/java/com/mesalab/qgw/service/UtilService.java
new file mode 100644
index 00000000..d535f4d7
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/UtilService.java
@@ -0,0 +1,10 @@
+package com.mesalab.qgw.service;
+
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.qgw.model.job.EncryptionInfo;
+
+public interface UtilService {
+ BaseResult getSQLSyntaxTree(String sql);
+
+ BaseResult getCiphertext(EncryptionInfo param);
+}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/DatabaseServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/DatabaseServiceImpl.java
new file mode 100644
index 00000000..9a00e7cc
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/impl/DatabaseServiceImpl.java
@@ -0,0 +1,1197 @@
+package com.mesalab.qgw.service.impl;
+
+import cn.hutool.core.collection.CollectionUtil;
+import cn.hutool.core.util.NumberUtil;
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.crypto.digest.DigestUtil;
+import cn.hutool.json.JSONException;
+import cn.hutool.json.JSONUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.alibaba.fastjson2.JSON;
+import com.alibaba.fastjson2.JSONWriter;
+import com.alibaba.nacos.api.config.ConfigService;
+import com.alibaba.nacos.api.config.listener.AbstractListener;
+import com.alibaba.nacos.api.exception.NacosException;
+import com.geedgenetworks.utils.DateUtils;
+import com.geedgenetworks.utils.Encodes;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.jayway.jsonpath.JsonPath;
+import com.jfinal.plugin.activerecord.Db;
+import com.jfinal.plugin.activerecord.Record;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.entity.BaseResultGenerator;
+import com.mesalab.common.enums.*;
+import com.mesalab.common.utils.sqlparser.SQLHelper;
+import com.mesalab.qgw.constant.QGWMessageConst;
+import com.mesalab.common.nacos.NacosConfig;
+import com.mesalab.common.nacos.NacosConst;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.HttpConfig;
+import com.mesalab.qgw.model.basic.JobAdminHttpSource;
+import com.mesalab.qgw.model.basic.SQLQueryContext;
+import com.mesalab.qgw.model.basic.ClickHouseHttpSource;
+import com.mesalab.qgw.model.job.ExecutorParam;
+import com.mesalab.qgw.model.job.StorageDeletionInfo;
+import com.mesalab.qgw.model.job.XxlJobInfo;
+import com.mesalab.qgw.model.metadata.MetadataBean;
+import com.mesalab.qgw.model.metadata.MetadataConfig;
+import com.mesalab.qgw.service.DatabaseService;
+import com.mesalab.qgw.service.SQLSyncQueryService;
+import com.geedgenetworks.utils.StringUtil;
+import com.mesalab.services.common.property.SqlPropertySourceFactory;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cglib.beans.BeanMap;
+import org.springframework.context.EnvironmentAware;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.core.env.Environment;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+import java.lang.reflect.AccessibleObject;
+import java.lang.reflect.Field;
+import java.time.Duration;
+import java.util.*;
+
+import static com.mesalab.services.service.impl.SQLDatasetServiceImpl.QUERY_ID_SEPARATOR;
+
+/**
+ * @Date: 2021-03-11 15:48
+ * @Author : liuyongqiang
+ * @ClassName : DatabaseServiceImpl
+ * @Description : 获取Nacos注册中心元数据实现类
+ */
+@Service("databaseService")
+@PropertySource(value = "classpath:http-sql-template.sql", factory = SqlPropertySourceFactory.class)
+public class DatabaseServiceImpl implements DatabaseService, EnvironmentAware {
+
+ private static final Log log = LogFactory.get();
+ @Autowired
+ private NacosConfig nacosConfig;
+ @Autowired
+ private ConfigService systemConfigService;
+ @Autowired
+ private ConfigService pubConfigService;
+ @Autowired
+ private ClickHouseHttpSource clickHouseHttpSource;
+ @Autowired
+ private SQLSyncQueryService sqlSyncQueryService;
+ @Autowired
+ public MetadataConfig metadataConfig;
+ @Autowired
+ private JobAdminHttpSource jobAdminHttpSource;
+ @Autowired
+ private HttpClientService httpClientService;
+ @Autowired
+ HttpConfig httpConfig;
+ @Autowired
+ Environment environment;
+
+ private Map<String, Object> schemaCache = Maps.newHashMap();
+ private Map<String, Object> originalCache = Maps.newHashMap();
+ private final static String KEY_REF = "$ref";
+ private final static String KEY = "key";
+ private final static String VALUE = "value";
+ private final static String CODE = "code";
+ private final static String INDEX_KEY = "index_key";
+ private Environment env;
+
+ private Map<String, String> headers = Maps.newHashMap();
+
+ @PostConstruct
+ public void init() {
+ addMetadataListener();
+ log.info("Initializing Schema Tables Complete");
+ }
+
+ @Override
+ public Map<String, Object> getSchemaInfo(String type, String name, boolean displayStorageSize) {
+ if (MetadataType.TABLES.getValue().equals(type)) {
+ return loadTables(name);
+ } else if (MetadataType.FIELDS.getValue().equals(type)) {
+ return loadFields(name, displayStorageSize);
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.NOT_FOUND.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.SCHEMA_URL_NOT_FOUND));
+ }
+
+ @Override
+ public Map<String, List<Object>> getSchemaDataDict(String tableName) {
+ Map<String, Object> schema = getSchemaInfo(MetadataType.FIELDS.getValue(), tableName, false);
+ Map<String, List<Object>> schemaDataDict = Maps.newHashMap();
+ if (schema != null) {
+ List<Map<String, Object>> read = JsonPath.read(schema, "$.fields[?(@.doc.data != null && @.doc.data.size() > 0)]");
+ for (int i = 0; i < read.size(); i++) {
+ Map<String, Object> map = read.get(i);
+ String name = String.valueOf(map.get("name"));
+ List<Object> codes = JsonPath.read(map, "$.doc.data[*].code");
+ if (codes == null || codes.isEmpty()) {
+ continue;
+ }
+ schemaDataDict.put(name, codes);
+ }
+ }
+ return schemaDataDict;
+ }
+
+ @Override
+ public String getPartitionKey(String tableName) {
+ String partitionKey = StringUtil.EMPTY;
+ LinkedHashMap<String, Object> schemaMap = getSchemaByName(tableName);
+ if (schemaMap.containsKey("doc")) {
+ LinkedHashMap<String, Object> doc = (LinkedHashMap<String, Object>) schemaMap.get("doc");
+ Object partition_key = doc.get("partition_key");
+ partitionKey = StringUtil.isEmpty(partition_key) ? StringUtil.EMPTY : partition_key.toString();
+ }
+ return partitionKey;
+ }
+
+ @Override
+ public List<String> getIndexKey(String tableName) {
+ Map schemaJsonMap = loadFields(tableName, false);
+ if (StringUtil.isEmpty(schemaJsonMap) || StringUtil.isEmpty(schemaJsonMap.get("doc"))) {
+ return Lists.newArrayList();
+ }
+ LinkedHashMap docMap = (LinkedHashMap) schemaJsonMap.get("doc");
+ Object indexKey = docMap.get(INDEX_KEY);
+ return StringUtil.isEmpty(indexKey) ? Lists.newArrayList() : (List<String>) indexKey;
+ }
+
+ @Override
+ public String getValueByKeyInSchemaDoc(String tableName, String key) {
+ LinkedHashMap<String, Object> schemaMap = getSchemaByName(tableName);
+ if (schemaMap.containsKey("doc")) {
+ LinkedHashMap<String, Object> docMap = (LinkedHashMap<String, Object>) schemaMap.get("doc");
+ Object value = docMap.get(key);
+ if (value instanceof Map) {
+ return JSONUtil.toJsonStr(JSONUtil.parseObj(value));
+ } else {
+ return StringUtil.isEmpty(value) ? StringUtil.EMPTY : value.toString();
+ }
+ }
+ return StringUtil.EMPTY;
+ }
+
+ @Override
+ public String getDBEngineByTableName(String tableName) {
+ LinkedHashMap<String, Object> schemaMap = getSchemaByName(tableName);
+ String data = StringUtil.EMPTY;
+ if (StringUtil.isEmpty(schemaMap)) {
+ return data;
+ }
+ for (MetadataBean meta : metadataConfig.getMetadata()) {
+ if (meta.getTables().contains(tableName)) {
+ return getDBTypeByGroup(meta.getGroup());
+ }
+ }
+ return data;
+ }
+
+ private String getDBTypeByGroup(String group) {
+ String type;
+ switch (group) {
+ case "CLICKHOUSE_GROUP":
+ type = DBEngineType.CLICKHOUSE.getValue();
+ break;
+ case "HBASE_GROUP":
+ type = DBEngineType.HBASE.getValue();
+ break;
+ case "DRUID_GROUP":
+ type = DBEngineType.DRUID.getValue();
+ break;
+ default:
+ type = StringUtil.EMPTY;
+ }
+ return type;
+ }
+
+ @Override
+ public String getDBNameByTableName(String tableName) {
+ LinkedHashMap<String, Object> schemaMap = getSchemaByName(tableName);
+ return StringUtil.isNotEmpty(schemaMap) ? String.valueOf(schemaMap.get("namespace")) : clickHouseHttpSource.getDbName();
+ }
+
+ @Override
+ public List<String> getAllTable() {
+ List<String> list = new ArrayList<>();
+ metadataConfig.getMetadata().forEach(o -> {
+ if (StringUtil.isNotEmpty(o.getTables())) {
+ list.addAll(o.getTables());
+ }
+ });
+ return list;
+ }
+
+ @Override
+ public LinkedHashMap<String, Object> getSchemaByName(String tableName) {
+ if (!getAllTable().contains(tableName)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(),
+ CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(),
+ tableName + ":" + QGWMessageConst.TABLE_NOT_EXIST));
+ }
+ LinkedHashMap<String, Object> map;
+ if (!StrUtil.isBlankIfStr(map = (LinkedHashMap<String, Object>) schemaCache.get(tableName))) {
+ return map;
+ }
+ try {
+ String content = systemConfigService.getConfig(tableName.concat(NacosConst.JSON_SUFFIX), nacosConfig.getGroup(), 3000);
+ if (StrUtil.isBlankIfStr(content)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), tableName + ":" + QGWMessageConst.QUERY_SCHEMA_ERROR));
+ }
+ schemaCache.put(tableName, map = parseSchema(content));
+ systemConfigService.addListener(tableName.concat(NacosConst.JSON_SUFFIX), nacosConfig.getGroup(), new AbstractListener() {
+ @Override
+ public void receiveConfigInfo(String configInfo) {
+ log.info("ReceiveConfigInfo Schema Fields {}", configInfo);
+ schemaCache.remove(tableName);
+ schemaCache.put(tableName, parseSchema(configInfo));
+ }
+ });
+ } catch (NacosException e) {
+ log.error("NacosException:{}", e);
+ }
+ return map;
+ }
+
+ @Override
+ public BaseResult updateSchema(String name, Map<String, Object> paramMap) {
+ try {
+ String content = systemConfigService.getConfig(name.concat(".json"), nacosConfig.getGroup(), 3000);
+ updateSchema(name, paramMap, content);
+ String indexTable = getValueByKeyInSchemaDoc(name, "index_table");
+ if (StringUtil.isNotBlank(indexTable)) {
+ List<String> list = Splitter.on(",").omitEmptyStrings().splitToList(indexTable);
+ for (String item : list) {
+ String config = systemConfigService.getConfig(item.concat(".json"), nacosConfig.getGroup(), 3000);
+ updateSchema(item, paramMap, config);
+ }
+ }
+ return BaseResultGenerator.success4Message("ok");
+ } catch (NacosException e) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ e.getMessage());
+ }
+ }
+
+ @Override
+ public BaseResult getStorageQuota() {
+ List<Record> storageData = Db.find(env.getProperty("SYSTEM_STORAGE_QUOTA"));
+ List<Map> result = Lists.newArrayList();
+ BaseResult storageUsageStatus = getStorageUsageStatus();
+ List<Map<String, Object>> usageData = Lists.newArrayList();
+ if (storageUsageStatus.isSuccess()) {
+ usageData = (List<Map<String, Object>>) storageUsageStatus.getData();
+ }
+ for (LogType value : LogType.values()) {
+ if (value.getValue().equals(LogType.ALL.getValue())) {
+ continue;
+ }
+ Map<String, Object> resultItem = Maps.newHashMap();
+ storageData.forEach(record -> {
+ Map<String, Object> columns = record.getColumns();
+ if (value.getValue().equals(columns.get("type"))) {
+ resultItem.putAll(columns);
+ }
+ });
+
+ BaseResult deleteStorageStatus = getDeleteStorageStatus(value.getValue());
+ if (deleteStorageStatus.isSuccess()) {
+ List<StorageDeletionInfo> data = (List<StorageDeletionInfo>) deleteStorageStatus.getData();
+ resultItem.put("type", value.getLabel());
+ resultItem.put("max_days", data.get(0).getMaxDays());
+ resultItem.put("default_max_days", data.get(0).getDefaultMaxDays());
+ }
+ usageData.forEach(o -> {
+ Map<String, Object> map = JSON.parseObject(String.valueOf(o.get("executorParam")), Map.class);
+ if (value.getValue().equals(map.get("logType"))) {
+ resultItem.put("max_usage", map.get("maxUsage"));
+ }
+ });
+ result.add(resultItem);
+
+ }
+ return BaseResultGenerator.success(result);
+ }
+
+ @Override
+ public BaseResult dailyTrendOfStorage(String searchStartTime, String searchEndTime) {
+ Date currentDate = DateUtils.convertStringToDate(DateUtils.getCurrentDate(), DateUtils.YYYY_MM_DD);
+
+ if (StringUtil.isBlank(searchStartTime) && StringUtil.isBlank(searchEndTime)) {
+ searchStartTime = DateUtils.getFormatDate(DateUtils.getSomeDate(currentDate, -7), DateUtils.YYYY_MM_DD_HH24_MM_SS);
+ searchEndTime = DateUtils.getFormatDate(currentDate, DateUtils.YYYY_MM_DD_HH24_MM_SS);
+ }
+
+ if (StringUtil.isNotBlank(searchStartTime) && StringUtil.isNotBlank(searchEndTime)) {
+ String sql = String.format(Objects.requireNonNull(env.getProperty("SYSTEM_DAILY_TREND_OF_STORAGE")), searchStartTime, searchEndTime);
+ List<Record> data = Db.find(sql);
+ List<Map<String, Object>> result = Lists.newArrayList();
+ data.forEach(record -> {
+ Map<String, Object> columns = record.getColumns();
+ columns.put("type", LogType.getLabelByValue(String.valueOf(columns.get("type"))));
+ result.add(columns);
+ });
+ return BaseResultGenerator.success(result);
+ } else {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.START_TIME_AND_END_TIME_NOT_NULL));
+ }
+ }
+
+ @Override
+ public BaseResult deleteStorage(List<StorageDeletionInfo> list) {
+ if (!logTypeValid(list)) {
+ return BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "Match failed, please check log type!"));
+ }
+
+ preProcessOfLogType(list);
+
+ if (jobIsBusy(list)) {
+ return BaseResultGenerator.failure(HttpStatusCodeEnum.LOCKED.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "The task is busy, please try later!"));
+ }
+
+ BaseResult baseResult = null;
+ for (StorageDeletionInfo info : list) {
+ baseResult = executeDeleteStorageJob(info);
+ }
+ return baseResult;
+ }
+
+ @Override
+ public BaseResult getDeleteStorageStatus(String logType) {
+ setCookie();
+ List<StorageDeletionInfo> list = new ArrayList<>();
+ if (StringUtil.isBlank(logType)) {
+
+ StorageDeletionInfo trafficInfo = getStorageDeletionInfoByHandler(LogType.TRAFFIC_LOGS.getValue(), JobHandlerOption.DELETE_TRAFFIC_DATA_JOB_HANDLER.getValue());
+ list.add(trafficInfo);
+
+ StorageDeletionInfo reportInfo = getStorageDeletionInfoByHandler(LogType.METRICS.getValue(), JobHandlerOption.DELETE_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue());
+ list.add(reportInfo);
+
+ StorageDeletionInfo fileInfo = getStorageDeletionInfoByHandler(LogType.FILES.getValue(), JobHandlerOption.DELETE_FILES_JOB_HANDLER.getValue());
+ list.add(fileInfo);
+ } else if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(logType)) {
+ StorageDeletionInfo trafficInfo = getStorageDeletionInfoByHandler(logType, JobHandlerOption.DELETE_TRAFFIC_DATA_JOB_HANDLER.getValue());
+ list.add(trafficInfo);
+ } else if (LogType.METRICS.getValue().equalsIgnoreCase(logType)) {
+ StorageDeletionInfo reportInfo = getStorageDeletionInfoByHandler(logType, JobHandlerOption.DELETE_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue());
+ list.add(reportInfo);
+ } else if (LogType.FILES.getValue().equalsIgnoreCase(logType)) {
+ StorageDeletionInfo reportInfo = getStorageDeletionInfoByHandler(logType, JobHandlerOption.DELETE_FILES_JOB_HANDLER.getValue());
+ list.add(reportInfo);
+ } else {
+ return BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), "No matching log type: " + logType);
+ }
+ list.forEach(o -> o.setType(LogType.getLabelByValue(o.getType())));
+ return BaseResultGenerator.success("ok", list);
+ }
+
+ @Override
+ public BaseResult getStorageUsageStatus() {
+ setCookie();
+ List<Map> trafficDate = getAllDataByHandler(JobHandlerOption.DELETE_OLD_LOG_JOB_HANDLER.getValue());
+ return BaseResultGenerator.success("ok", trafficDate);
+ }
+
+ @Override
+ public String getCustomQueryId(String resultId, String query) {
+ String tableName = SQLHelper.getTableName(query).get(0);
+ String dbType = getDBEngineByTableName(tableName);
+ return DigestUtil.md5Hex(dbType) + QUERY_ID_SEPARATOR + DigestUtil.md5Hex(resultId + query.trim());
+ }
+
+ private boolean updateSchema(String name, Map<String, Object> paramMap, String content) throws NacosException {
+ if (StringUtil.isNotEmpty(content)) {
+ Map<String, Object> data = JSON.parseObject(content, Map.class);
+ data.put("doc", rewriteDoc("ttl", data.get("doc"), paramMap.get("doc")));
+ List<Map<String, Object>> fields = (List<Map<String, Object>>) data.get("fields");
+ List<String> indexKey = getIndexKey(name);
+ for (Map<String, Object> field : fields) {
+ if (indexKey.contains(field.get("name"))) {
+ Map<String, Object> ttlNull = Maps.newHashMap();
+ ttlNull.put("ttl", null);
+ field.put("doc", rewriteDoc("ttl", field.get("doc"), ttlNull));
+ } else {
+ field.put("doc", rewriteDoc("ttl", field.get("doc"), JsonPath.read(paramMap, "$.fields[?(@.name == \"" + field.get("name") + "\")].doc")));
+ }
+ field.put("doc", rewriteDoc("visibility", field.get("doc"), JsonPath.read(paramMap, "$.fields[?(@.name == \"" + field.get("name") + "\")].doc")));
+ }
+ content = JSON.toJSONString(data, JSONWriter.Feature.WriteNulls);
+ log.info("push Schema, content is: {}", content);
+ return systemConfigService.publishConfig(name.concat(".json"), nacosConfig.getGroup(), JSONUtil.formatJsonStr(content));
+ } else {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), QGWMessageConst.QUERY_SCHEMA_ERROR));
+ }
+ }
+
+ /**
+ * Desc: 重写[tables |field].doc
+ *
+ * @param key
+ * @param original
+ * @param param
+ * @return {@link Map< String, Object>}
+ * @created by wWei
+ * @date 2022/5/17 10:02 上午
+ */
+ private Map<String, Object> rewriteDoc(String key, Object original, Object param) {
+ Map<String, Object> doc = Maps.newHashMap();
+ if (StringUtil.isNotEmpty(original)) {
+ doc = (Map<String, Object>) original;
+ }
+ Map<String, Object> paramDoc = Maps.newHashMap();
+ if (param instanceof Map) {
+ paramDoc = (Map<String, Object>) param;
+ } else if (param instanceof List) {
+ List item = (List) param;
+ paramDoc = (Map<String, Object>) item.get(0);
+ }
+ if (paramDoc.containsKey(key)) {
+ Object value = paramDoc.get(key);
+ if (NumberUtil.isNumber(String.valueOf(value))) {
+ doc.put(key, Long.parseLong(value.toString()));
+ } else {
+ doc.put(key, value);
+ }
+ } else {
+ doc.remove(key);
+ }
+ return doc;
+ }
+
+ private void addMetadataListener() {
+ try {
+ systemConfigService.addListener(NacosConst.META_DATA_ID, nacosConfig.getGroup(), new AbstractListener() {
+ @Override
+ public void receiveConfigInfo(String configInfo) {
+ log.info("ReceiveConfigInfo metadata {}", configInfo);
+ schemaCache = Maps.newHashMap();
+ }
+ });
+ } catch (NacosException e) {
+ log.error("NacosException: ", e);
+ }
+ }
+
+ private Map loadFields(String name, boolean displayStorageSize) {
+ LinkedHashMap<String, Object> schemaMap = getSchemaByName(name);
+ if (StringUtil.isEmpty(schemaMap)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.NOT_FOUND.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), QGWMessageConst.SCHEMA_URL_NOT_FOUND);
+ }
+ LinkedHashMap schemaDoc = (LinkedHashMap) schemaMap.get("doc");
+ if (StringUtil.isEmpty(schemaDoc) &&
+ StringUtil.isNotEmpty(schemaMap.get("doc"))) {
+ log.error("{} schema's doc isn't jsonString and won't convert: {}", name, schemaMap.get("doc"));
+ }
+ List<Map> data = Lists.newArrayList();
+ if (displayStorageSize) {
+ data = getStorageSize(name);
+ schemaDoc = (LinkedHashMap) fillSchemaStorageSize(schemaDoc, data);
+ }
+ if (StringUtil.isNotEmpty(schemaDoc)) {
+ schemaMap.put("doc", schemaDoc);
+ }
+ List<Map> fields = (List<Map>) schemaMap.get("fields");
+ LinkedHashMap mapDoc;
+ for (Map next : fields) {
+ mapDoc = (LinkedHashMap) next.get("doc");
+ mapDoc = (LinkedHashMap) fillFieldStorageSize(data, mapDoc, next);
+ if (StringUtil.isNotEmpty(mapDoc)) {
+ next.put("doc", mapDoc);
+ }
+ if (StringUtil.isEmpty(mapDoc) && !StringUtil.isEmpty(next.get("doc"))) {
+ log.error("{} field's doc isn't jsonString and won't convert: {}", next.get("name"), next.get("doc"));
+ }
+ }
+ fillReference(schemaMap, name);
+ return schemaMap;
+ }
+
+ private Map fillSchemaStorageSize(LinkedHashMap schemaDoc, List<Map> data) {
+ if (CollectionUtil.isEmpty(data)) {
+ return schemaDoc;
+ }
+ if (StringUtil.isEmpty(schemaDoc)) {
+ schemaDoc = new LinkedHashMap();
+ }
+ long total = 0;
+ for (Map datum : data) {
+ total += Long.parseLong(String.valueOf(datum.get("bytes")));
+ }
+ schemaDoc.put("size", total);
+ return schemaDoc;
+ }
+
+ private Map fillFieldStorageSize(List<Map> data, LinkedHashMap mapDoc, Map next) {
+ if (CollectionUtil.isEmpty(data)) {
+ return mapDoc;
+ }
+ if (StringUtil.isEmpty(mapDoc)) {
+ mapDoc = new LinkedHashMap();
+ }
+ try {
+ List<Object> name = JsonPath.read(data, "$.[?(@.field == \"" + next.get("name") + "\")].bytes");
+ mapDoc.put("size", Long.parseLong(String.valueOf(name.get(0))));
+ } catch (RuntimeException e) {
+ log.error("fill {} field storage size error, message is: {}", next.get("name"), e.getMessage());
+ }
+ return mapDoc;
+ }
+
+ private List<Map> getStorageSize(String name) {
+ try {
+ List<String> symbols = (List<String>) loadTables(clickHouseHttpSource.getDbName()).get("symbols");
+ String sql = null;
+ if (symbols.contains(name)) {
+ sql = String.format(Objects.requireNonNull(environment.getProperty("SCHEMA_STORAGE_SIZE")), name, clickHouseHttpSource.getDbName());
+ }
+ BaseResult baseResult = StringUtil.isBlank(sql) ? BaseResultGenerator.success(Lists.newArrayList()) : sqlSyncQueryService.executeQuery(SQLQueryContext.builder().originalSQL(sql).build());
+ if (baseResult.isSuccess()) {
+ return (List<Map>) baseResult.getData();
+ } else {
+ log.error("get {} schema storage size error on DB.", name);
+ }
+ } catch (RuntimeException ex) {
+ log.error("get {} schema storage size error on DB, message is {}", name, ex.getMessage());
+ }
+ return Lists.newArrayList();
+ }
+
+ /**
+ * @param map 需操作对象
+ * @param cfgName 当前文件名称
+ * @return void
+ * @Description $ref实际引用部分赋值
+ * @author wanghao
+ * @date 2021/9/1 17:02
+ */
+
+ private void fillReference(Map map, String cfgName) {
+ for (Object mapKey : map.keySet()) {
+ Object keyObj = map.get(mapKey);
+ if (keyObj instanceof Map) {
+ Map mapValue = (Map) keyObj;
+ if (mapValue.containsKey(KEY_REF)) {
+ if (INDEX_KEY.equals(String.valueOf(mapKey))) {
+ String refValue = String.valueOf(mapValue.get(KEY_REF));
+ mapValue.put(KEY_REF, refValue);
+ }
+ Object refValue = getRefValue(cfgName, mapValue);
+ map.put(mapKey, regexValue(refValue, mapValue));
+ } else {
+ fillReference(mapValue, cfgName);
+ }
+ } else if (keyObj instanceof List) {
+ List listValue = (List) keyObj;
+ for (Object next : listValue) {
+ if (next instanceof Map) {
+ fillReference((Map) next, cfgName);
+ }
+ }
+ }
+ }
+ }
+
+ @Override
+ public Object getCfg(String cfgName) {
+ if (originalCache.containsKey(cfgName)) {
+ return originalCache.get(cfgName);
+ }
+ String content = null;
+ try {
+ content = getPubCfg(cfgName);
+ if (!StrUtil.isBlankIfStr(content)) {
+ originalCache.put(cfgName, content);
+ return content;
+ }
+ content = systemConfigService.getConfig(cfgName, nacosConfig.getGroup(), 3000);
+ originalCache.put(cfgName, content);
+ systemConfigService.addListener(cfgName, nacosConfig.getGroup(), new AbstractListener() {
+ @Override
+ public void receiveConfigInfo(String configInfo) {
+ log.info("ReceiveConfigInfo Schema Fields {}", configInfo);
+ originalCache.put(cfgName, configInfo);
+ schemaCache.clear();
+ }
+ });
+ } catch (NacosException e) {
+ log.error("NacosException:{}", e);
+ }
+ return content;
+ }
+
+ private String getPubCfg(String cfgName) throws NacosException {
+ String content = pubConfigService.getConfig(cfgName, NacosConst.DEFAULT_GROUP, 3000);
+ pubConfigService.addListener(cfgName, NacosConst.DEFAULT_GROUP, new AbstractListener() {
+ @Override
+ public void receiveConfigInfo(String configInfo) {
+ log.info("ReceiveConfigInfo Schema Fields {}", configInfo);
+ originalCache.put(cfgName, configInfo);
+ schemaCache.clear();
+ }
+ });
+ return content;
+ }
+
+ private Object getRefValue(String schemaName, Map map) {
+ Object data = null;
+ String[] split = getRefStrArray(map);
+ String jsonPath = getJsonPath(split);
+ try {
+ Object cfg = getCfg(getFileName(split, schemaName));
+ data = JsonPath.read(String.valueOf(cfg), jsonPath);
+ } catch (RuntimeException e) {
+ log.warn("read reference schema error: {}", e);
+ }
+ return data;
+ }
+
+ private Object regexValue(Object obj, Map<String, String> map) {
+ if (map.size() <= 1) {
+ return obj;
+ }
+ List<Object> key = Lists.newArrayList();
+ List<Object> value = Lists.newArrayList();
+ try {
+ key.addAll(JsonPath.read(obj, map.get(KEY)));
+ value.addAll(JsonPath.read(obj, map.get(VALUE)));
+ } catch (RuntimeException ex) {
+ log.warn("parse reference schema error: JsonPath.read error");
+ return Lists.newArrayList();
+ }
+ if (key.size() != value.size()) {
+ log.warn("parse reference schema error: key and value non-correspondence");
+ return Lists.newArrayList();
+ }
+ List<Map<String, Object>> list = Lists.newArrayList();
+ for (int i = 0; i < key.size(); i++) {
+ HashMap<String, Object> item = Maps.newLinkedHashMap();
+ item.put(CODE, key.get(i));
+ item.put(VALUE, value.get(i));
+ list.add(item);
+ }
+ return list;
+ }
+
+ private String[] getRefStrArray(Map refMap) {
+ String v = (String) refMap.get(KEY_REF);
+ return v.split("#", 2);
+ }
+
+ private String getJsonPath(String[] strArray) {
+ return "$".concat(strArray[1].replace("/", "."));
+ }
+
+ private String getFileName(String[] strArray, String fileName) {
+ return StrUtil.isEmpty(strArray[0]) ? fileName.concat(NacosConst.JSON_SUFFIX) : strArray[0];
+ }
+
+ /**
+ * @param name
+ * @Description: 从Nacos配置中心获取Tables
+ * @Author: liuyongqiang
+ * @Date: 2021/3/11 18:17
+ * @return: com.mesalab.common.base.BaseResult
+ **/
+ private Map<String, Object> loadTables(String name) {
+ Map<String, Object> date = new LinkedHashMap<>();
+ List<String> tables = new ArrayList<>();
+ metadataConfig.getMetadata().forEach(o -> {
+ if (o.getNamespace().equalsIgnoreCase(name)) {
+ tables.addAll(o.getTables());
+ }
+ }
+ );
+ if (tables.isEmpty()) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.NOT_FOUND.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), QGWMessageConst.SCHEMA_URL_NOT_FOUND);
+ }
+ date.put("type", "enum");
+ date.put("name", name);
+ date.put("symbols", tables);
+ return date;
+ }
+
+ private LinkedHashMap parseSchema(String configInfo) {
+ if (StrUtil.isBlank(configInfo)) return null;
+ LinkedHashMap schemaMap = null;
+ try {
+ schemaMap = JSON.parseObject(configInfo, LinkedHashMap.class);
+ Object docSchema;
+ if (StringUtil.isNotEmpty(docSchema = schemaMap.get("doc"))) {
+ schemaMap.put("doc", JSON.parseObject(JSON.toJSONString(docSchema, JSONWriter.Feature.WriteNulls), LinkedHashMap.class));
+ }
+ Object fieldsObj;
+ if (StringUtil.isNotEmpty(fieldsObj = schemaMap.get("fields"))) {
+ List<LinkedHashMap> fields = (List<LinkedHashMap>) fieldsObj;
+ for (Map next : fields) {
+ Object docField;
+ if (StringUtil.isEmpty(docField = next.get("doc"))) {
+ continue;
+ }
+ next.put("doc", JSON.parseObject(JSON.toJSONString(docField, JSONWriter.Feature.WriteNulls), LinkedHashMap.class));
+ }
+ }
+ } catch (JSONException | ClassCastException e) {
+ log.error("update Schema error: {}", e);
+ }
+ return schemaMap;
+ }
+
+ private boolean logTypeValid(List<StorageDeletionInfo> list) {
+ for (StorageDeletionInfo info : list) {
+ String value = DatabaseServiceImpl.LogType.getValueByLabel(info.getType());
+ if (StringUtil.isEmpty(value)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * @Description 涉及对ALL、LogType(value,label)处理
+ * @Param list:
+ * @return: java.util.List<com.mesalab.qgw.model.job.StorageDeletionInfo>
+ * @Created by wWei
+ */
+ private List<StorageDeletionInfo> preProcessOfLogType(List<StorageDeletionInfo> list) {
+ if (list.size() == 1) {
+ StorageDeletionInfo deletionInfo = list.get(0);
+ Integer maxDays = deletionInfo.getMaxDays();
+ Integer defaultMaxDays = deletionInfo.getDefaultMaxDays();
+ if (LogType.ALL.getValue().equalsIgnoreCase(list.get(0).getType())) {
+ list.clear();
+ list.add(new StorageDeletionInfo(LogType.TRAFFIC_LOGS.getValue(), maxDays, defaultMaxDays));
+ list.add(new StorageDeletionInfo(LogType.METRICS.getValue(), maxDays, defaultMaxDays));
+ list.add(new StorageDeletionInfo(LogType.FILES.getValue(), maxDays, defaultMaxDays));
+ return list;
+ } else if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(list.get(0).getType()) |
+ LogType.FILES.getValue().equalsIgnoreCase(list.get(0).getType()) |
+ LogType.METRICS.getLabel().equalsIgnoreCase(list.get(0).getType())) {
+ list.clear();
+ list.add(new StorageDeletionInfo(LogType.getValueByLabel(deletionInfo.getType()), maxDays, defaultMaxDays));
+ return list;
+ }
+ }
+ list.forEach(o -> o.setType(LogType.getValueByLabel(o.getType())));
+ return list;
+ }
+
+ private boolean jobIsBusy(List<StorageDeletionInfo> list) {
+ setCookie();
+ for (StorageDeletionInfo info : list) {
+
+ String handler = getDeletePartHandlerByLogType(info.getType());
+ if (jobIsBusyByHandler(handler)) {
+ return true;
+ }
+ handler = getDeleteAllHandlerByLogType(info.getType());
+ if (jobIsBusyByHandler(handler)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ private String getDeletePartHandlerByLogType(String logType) {
+ String jobHandler = StringUtil.EMPTY;
+ if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(logType)) {
+ jobHandler = JobHandlerOption.DELETE_TRAFFIC_DATA_JOB_HANDLER.getValue();
+ } else if (LogType.METRICS.getValue().equalsIgnoreCase(logType)) {
+ jobHandler = JobHandlerOption.DELETE_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue();
+ } else if (LogType.FILES.getValue().equalsIgnoreCase(logType)) {
+ jobHandler = JobHandlerOption.DELETE_FILES_JOB_HANDLER.getValue();
+ }
+ return jobHandler;
+ }
+
+ private boolean jobIsBusyByHandler(String handler) {
+ Map dataByHandler = getDataByHandler(handler);
+ String id = String.valueOf(dataByHandler.get("id"));
+ BaseResult result = queryJobStatusByJobId(Integer.parseInt(id));
+ if (result.getStatus().equals(HttpStatusCodeEnum.LOCKED.getCode())) {
+ return true;
+ } else if (!result.getStatus().equals(HttpStatusCodeEnum.SUCCESS.getCode())) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), result.getMessage()));
+ }
+ return false;
+ }
+
+ private void updateSchema(StorageDeletionInfo info, Map dataByHandler) {
+ Map<String, Map> previous = Maps.newHashMap();
+ try {
+ Map schemaInfo = getSchemaInfo(MetadataType.TABLES.getValue(), clickHouseHttpSource.getDbName(), false);
+ Object tables = schemaInfo.get("symbols");
+ if (StringUtil.isEmpty(tables)) {
+ return;
+ }
+ long ttl = Duration.ofDays(info.getMaxDays()).getSeconds();
+ List<String> list = (List) tables;
+ for (String tableName : list) {
+ Map schemaMap = getSchemaInfo(MetadataType.FIELDS.getValue(), tableName, false);
+ List<Object> schemaDocTTL = JsonPath.read(schemaMap, "$.[?(@.doc.ttl != null)].doc.ttl");
+ List<Object> fieldDocTTL = JsonPath.read(schemaMap, "$.fields[?(@.doc.ttl != null)].doc.ttl");
+ if (schemaDocTTL.isEmpty() && fieldDocTTL.isEmpty()) {
+ continue;
+ }
+ previous.put(tableName, getSchemaInfo(MetadataType.FIELDS.getValue(), tableName, false));
+ Object schemaDoc = schemaMap.get("doc");
+ Map<String, Object> map = Maps.newHashMap();
+ if (StringUtil.isNotEmpty(schemaDoc)) {
+ map = (Map<String, Object>) schemaDoc;
+ }
+ if (StringUtil.isNotEmpty(map.get("ttl")) && ttl < Long.parseLong(map.get("ttl").toString())) {
+ map.put("ttl", ttl);
+ schemaMap.put("doc", map);
+ }
+ List<Map<String, Object>> fields = JsonPath.read(schemaMap, "$.fields");
+ List<String> indexKey = getIndexKey(tableName);
+ for (Map<String, Object> field : fields) {
+ Object doc = field.get("doc");
+ if (StringUtil.isEmpty(doc)) {
+ doc = Maps.newHashMap();
+ }
+ Map<String, Object> fieldDoc = (Map<String, Object>) doc;
+ if (indexKey.contains(field.get("name").toString())) {
+ fieldDoc.put("ttl", null);
+ field.put("doc", fieldDoc);
+ continue;
+ }
+ if (StringUtil.isNotEmpty(fieldDoc.get("ttl")) && ttl < Long.parseLong(fieldDoc.get("ttl").toString())) {
+ fieldDoc.put("ttl", ttl);
+ field.put("doc", fieldDoc);
+ }
+ }
+ updateSchema(tableName, schemaMap);
+ }
+ } catch (RuntimeException ex) {
+ for (String tableName : previous.keySet()) {
+ updateSchema(tableName, previous.get(tableName));
+ }
+ executeManageJob("update", mapToBean(dataByHandler, XxlJobInfo.class));
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "up schema error in storage set ", ex.getMessage()));
+ }
+ }
+
+
+ /**
+ * 通过handler Value获取jobInfo数据
+ *
+ * @param handlerValue
+ * @return
+ */
+ private Map getDataByHandler(String handlerValue) {
+ StringBuilder url = new StringBuilder(jobAdminHttpSource.getUrl()).
+ append("/jobinfo/pageList?jobGroup=-1&triggerStatus=-1&executorHandler=").
+ append(handlerValue);
+ Map<String, String> resultPageList = httpClientService.httpGet(url.toString(), headers, httpConfig.getServerResponseTimeOut());
+ if (StringUtil.isNotEmpty(resultPageList) && resultPageList.get("status").equals(String.valueOf(HttpStatusCodeEnum.SUCCESS.getCode()))) {
+ Map<String, Object> maps = JSON.parseObject(resultPageList.get("result"), Map.class);
+ if (StringUtil.isEmpty(maps)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Get or package result exception: " + JSON.toJSONString(resultPageList)));
+ }
+ List<Map> data = (List) maps.get("data");
+ if (data.size() >= 1) {
+ return data.get(0);
+ } else {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "The scheduled task has no task information matching executorHandler (" + handlerValue + ")"));
+ }
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Get scheduled task exception: " + JSON.toJSONString(resultPageList)));
+ }
+
+ /**
+ * 通过Handler获取全量数据
+ *
+ * @param handlerValue
+ * @return
+ */
+ private List<Map> getAllDataByHandler(String handlerValue) {
+ StringBuilder url = new StringBuilder(jobAdminHttpSource.getUrl()).
+ append("/jobinfo/pageList?jobGroup=-1&triggerStatus=-1&executorHandler=").
+ append(handlerValue);
+ Map<String, String> resultPageList = httpClientService.httpGet(url.toString(), headers, httpConfig.getServerResponseTimeOut());
+ if (StringUtil.isNotEmpty(resultPageList) && resultPageList.get("status").equals(String.valueOf(HttpStatusCodeEnum.SUCCESS.getCode()))) {
+ Map<String, Object> maps = JSON.parseObject(resultPageList.get("result"), Map.class);
+ if (StringUtil.isEmpty(maps)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Get or package result exception: " + JSON.toJSONString(resultPageList)));
+ }
+ return (List) maps.get("data");
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Get scheduled task exception: " + JSON.toJSONString(resultPageList)));
+ }
+
+ /**
+ * 查询任务状态
+ *
+ * @param jobId
+ * @return
+ */
+ private BaseResult queryJobStatusByJobId(int jobId) {
+ BaseResult baseResult = null;
+ StringBuilder url = new StringBuilder(jobAdminHttpSource.getUrl()).append("/jobinfo/jobBeat?jobId=").append(jobId);
+ Map<String, String> resultMap = httpClientService.httpGet(url.toString(), headers, httpConfig.getServerResponseTimeOut());
+ baseResult = resultEncapsulationOfJob(resultMap);
+ return baseResult;
+ }
+
+ /**
+ * 执行调度任务结果封装
+ *
+ * @param resultMap
+ * @return
+ */
+ private BaseResult resultEncapsulationOfJob(Map<String, String> resultMap) {
+ BaseResult baseResult;
+ if (StringUtil.isEmpty(resultMap)) {
+ baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "The service is busy, please contact the scheduling platform!"));
+ } else {
+ if (resultMap.get("status").equals(String.valueOf(HttpStatusCodeEnum.SUCCESS.getCode()))) {
+ Map result = JSON.parseObject(resultMap.get("result"), Map.class);
+ if (result.get("code").equals(HttpStatusCodeEnum.SUCCESS.getCode())) {
+ baseResult = BaseResultGenerator.success("ok", null);
+ } else if (result.get("code").equals(HttpStatusCodeEnum.LOCKED.getCode())) {
+ baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.LOCKED.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Clear Task is Running."));
+ } else {
+ baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), result.get("msg")));
+ }
+ } else {
+ baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Clear Task Failed."));
+ }
+ }
+ return baseResult;
+ }
+
+ private String getDeleteAllHandlerByLogType(String logType) {
+ String jobHandler = StringUtil.EMPTY;
+ if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(logType)) {
+ jobHandler = JobHandlerOption.DELETE_ALL_TRAFFIC_DATA_JOB_HANDLER.getValue();
+ } else if (LogType.METRICS.getValue().equalsIgnoreCase(logType)) {
+ jobHandler = JobHandlerOption.DELETE_ALL_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue();
+ } else if (LogType.FILES.getValue().equalsIgnoreCase(logType)) {
+ jobHandler = JobHandlerOption.DELETE_ALL_FILES_JOB_HANDLER.getValue();
+ }
+ return jobHandler;
+ }
+
+ /**
+ * 获取调度平台Cookie
+ *
+ * @return
+ */
+ public void setCookie() {
+ String url = jobAdminHttpSource.getUrl();
+ headers.put("Content-Type", "application/json");
+ String urlParamsByMap = getUrlParamsByMap(getObjectToMap(jobAdminHttpSource));
+ int socketTimeOut = httpConfig.getServerResponseTimeOut();
+ Map httpPostResponseHeads = httpClientService.getHttpPostResponseHeads(url + "/login?" + urlParamsByMap, headers, socketTimeOut);
+ String cookie = String.valueOf(httpPostResponseHeads.get("SET-COOKIE"));
+ headers.put("Cookie", cookie);
+ }
+
+ public static String getUrlParamsByMap(Map<String, Object> map) {
+ if (map == null) {
+ return "";
+ }
+ StringBuilder sb = new StringBuilder();
+ for (Map.Entry<String, Object> entry : map.entrySet()) {
+ sb.append(entry.getKey()).append("=").append(Encodes.urlEncode(String.valueOf(entry.getValue())));
+ sb.append("&");
+ }
+ String s = sb.toString();
+ if (s.endsWith("&")) {
+ s = StringUtil.substringBeforeLast(s, "&");
+ }
+ return s;
+ }
+
+ public static Map<String, Object> getObjectToMap(Object obj) {
+ Map<String, Object> map = new LinkedHashMap<String, Object>();
+ Class<?> clazz = obj.getClass();
+ Field[] declaredFields = clazz.getDeclaredFields();
+ AccessibleObject.setAccessible(declaredFields, true);
+ for (Field field : declaredFields) {
+ String fieldName = field.getName();
+ Object value = null;
+ try {
+ value = field.get(obj);
+ } catch (IllegalAccessException e) {
+ log.error("Illegal Access Exception: ", e);
+ }
+ if (value == null) {
+ value = "";
+ }
+ map.put(fieldName, value);
+ }
+ return map;
+ }
+
+ /**
+ * 执行日志删除任务: 调用调度任务任务
+ *
+ * @param info
+ * @return
+ */
+ private BaseResult executeDeleteStorageJob(StorageDeletionInfo info) {
+
+ BaseResult baseResult;
+ String jobHandler;
+ String logType = info.getType();
+ ExecutorParam executorParam = new ExecutorParam();
+ if (0 == info.getMaxDays()) {
+ jobHandler = getDeleteAllHandlerByLogType(logType);
+ Map dataByHandler = getDataByHandler(jobHandler);
+
+ StorageDeletionInfo storageDeletionInfoByHandler = getStorageDeletionInfoByHandler(logType, getDeletePartHandlerByLogType(logType));
+ executorParam.setMaxDays(storageDeletionInfoByHandler.getMaxDays());
+ XxlJobInfo xxlJobInfo = setXxlJobInfoParam(dataByHandler, executorParam);
+ baseResult = executeTriggerAndUpdate(xxlJobInfo);
+ } else {
+ jobHandler = getDeletePartHandlerByLogType(info.getType());
+ executorParam.setMaxDays(info.getMaxDays());
+ Map oldData = getDataByHandler(jobHandler);
+ XxlJobInfo newData = setXxlJobInfoParam(oldData, executorParam);
+ baseResult = executeManageJob("update", newData);
+ if (baseResult.isSuccess() && LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(info.getType())) {
+ updateSchema(info, oldData);
+ }
+ if (baseResult.isSuccess()) {
+ baseResult = executeManageJob("trigger", newData);
+ }
+ }
+ if (!baseResult.getStatus().equals(HttpStatusCodeEnum.SUCCESS.getCode())) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), QGWMessageConst.SCHEDULED_TASK_ERROR));
+ }
+ return baseResult;
+ }
+
+ /**
+ * 触发执行器并更新任务
+ *
+ * @param xxlJobInfo
+ * @return
+ */
+ private BaseResult executeTriggerAndUpdate(XxlJobInfo xxlJobInfo) {
+ BaseResult baseResult;
+ BaseResult resultExecute = executeManageJob("trigger", xxlJobInfo);
+ if (resultExecute.getStatus().equals(HttpStatusCodeEnum.SUCCESS.getCode())) {
+ BaseResult resultUpdate = executeManageJob("update", xxlJobInfo);
+ if (resultUpdate.getStatus().equals(HttpStatusCodeEnum.SUCCESS.getCode())) {
+ baseResult = BaseResultGenerator.success("ok", null);
+ } else {
+ baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Update task failed:" + xxlJobInfo.getExecutorHandler()));
+ }
+ } else {
+ baseResult = BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Failed to execute task:" + xxlJobInfo.getExecutorHandler()));
+ }
+ return baseResult;
+ }
+
+ private StorageDeletionInfo getStorageDeletionInfoByHandler(String logType, String jobHandlerValue) {
+ Map trafficDate = getDataByHandler(jobHandlerValue);
+ XxlJobInfo xxlJobInfo = mapToBean(trafficDate, XxlJobInfo.class);
+ StorageDeletionInfo executorParam = JSON.parseObject(StrUtil.toUnderlineCase(xxlJobInfo.getExecutorParam()), StorageDeletionInfo.class);
+ executorParam.setType(logType);
+ return executorParam;
+ }
+
+ public static <T> T mapToBean(Map<String, Object> map, Class<T> clazz) {
+ T bean = null;
+ try {
+ bean = clazz.newInstance();
+ } catch (InstantiationException e) {
+ log.error("Instantiation Exception: ", e);
+ } catch (IllegalAccessException e) {
+ log.error("Illegal Access Exception: ", e);
+ }
+ BeanMap beanMap = BeanMap.create(bean);
+ beanMap.putAll(map);
+ return bean;
+ }
+
+ private XxlJobInfo setXxlJobInfoParam(Map data, ExecutorParam executorParam) {
+ XxlJobInfo xxlJobInfo = mapToBean(data, XxlJobInfo.class);
+ ExecutorParam executor = JSON.parseObject(xxlJobInfo.getExecutorParam(), ExecutorParam.class);
+ executorParam.setDefaultMaxDays(executor.getDefaultMaxDays());
+ xxlJobInfo.setExecutorParam(JSON.toJSONString(executorParam));
+ return xxlJobInfo;
+ }
+
+ /**
+ * 执行调度平台任务更新
+ *
+ * @param executeType
+ * @param xxlJobInfo
+ * @return
+ */
+ private BaseResult executeManageJob(String executeType, XxlJobInfo xxlJobInfo) {
+ BaseResult baseResult = null;
+ String params = getUrlParamsByMap(getObjectToMap(xxlJobInfo));
+ String url = String.format("%s/jobinfo/%s/?%s", jobAdminHttpSource.getUrl(), executeType, params);
+ Map<String, String> resultMap = httpClientService.httpGet(url, headers, httpConfig.getServerResponseTimeOut());
+ log.warn("请求调度任务" + executeType + "接口" + url);
+ baseResult = resultEncapsulationOfJob(resultMap);
+ return baseResult;
+ }
+
+ @Override
+ public void setEnvironment(Environment environment) {
+ this.env = environment;
+ }
+
+ @Getter
+ @AllArgsConstructor
+ enum LogType {
+ TRAFFIC_LOGS("Traffic Logs", "Traffic Logs"),
+ METRICS("Metrics", "Metrics"),
+ FILES("Files", "Files"),
+ ALL("All", "All");
+ private final String value;
+ private final String label;
+
+
+ public static String getValueByLabel(String label) {
+ for (LogType enums : LogType.values()) {
+ if (enums.getLabel().equals(label)) {
+ return enums.getValue();
+ }
+ }
+ return "";
+ }
+
+ public static String getLabelByValue(String value) {
+ for (LogType enums : LogType.values()) {
+ if (enums.getValue().equals(value)) {
+ return enums.getLabel();
+ }
+ }
+ return "";
+ }
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/DatasetServiceImp.java b/src/main/java/com/mesalab/qgw/service/impl/DatasetServiceImp.java
new file mode 100644
index 00000000..b923b28e
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/impl/DatasetServiceImp.java
@@ -0,0 +1,206 @@
+package com.mesalab.qgw.service.impl;
+
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.alibaba.fastjson2.JSON;
+import com.geedgenetworks.utils.DateUtils;
+import com.geedgenetworks.utils.StringUtil;
+import com.google.common.collect.Lists;
+import com.jfinal.plugin.activerecord.Db;
+import com.jfinal.plugin.activerecord.Record;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.enums.*;
+import com.mesalab.common.nacos.NacosConst;
+import com.mesalab.qgw.constant.QGWMessageConst;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.*;
+import com.mesalab.qgw.service.DatabaseService;
+import com.mesalab.qgw.service.QueryJobService;
+import com.mesalab.qgw.service.SQLSyncQueryService;
+import com.mesalab.services.common.property.SqlPropertySourceFactory;
+import com.mesalab.qgw.service.DatasetService;
+import org.apache.commons.collections.CollectionUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.core.env.Environment;
+import org.springframework.stereotype.Service;
+
+import java.time.Instant;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+
+@Service("datasetService")
+@PropertySource(value = "classpath:http-sql-template.sql", factory = SqlPropertySourceFactory.class)
+public class DatasetServiceImp
+ implements DatasetService {
+ private static final Log log = LogFactory.get();
+ private static Pattern pFieldVariable = Pattern.compile("\\$\\{(metric|dimension)_(.*?)\\}", Pattern.CASE_INSENSITIVE);
+ private static Pattern pLeftRightFlag = Pattern.compile("\\[\\[(.*?)\\]\\]", Pattern.CASE_INSENSITIVE);
+ private final static String TEMPLATE = "template";
+ private final static String BACKEND_ENGINE = "backend_engine";
+
+ private final static String DATASET_TYPE = "type";
+ @Autowired
+ Environment env;
+ @Autowired
+ DatabaseService databaseService;
+ @Autowired
+ SQLSyncQueryService sqlSyncQueryService;
+ @Autowired
+ private QueryJobService queryJobService;
+
+ @Override
+ public List<LinkedHashMap> getVariable() {
+ Object codeInfo = databaseService.getCfg(NacosConst.DATASETS_VARIABLES);
+ if (StringUtil.isNotEmpty(codeInfo)) {
+ Object json = JSON.toJSON(codeInfo);
+ List<LinkedHashMap> variables = JSON.parseArray(json.toString(), LinkedHashMap.class);
+ for (LinkedHashMap variable : variables) {
+ String key = String.valueOf(variable.get("key"));
+ String def = String.valueOf(variable.get("default"));
+ Date currentDate = DateUtils.convertStringToDate(DateUtils.getCurrentDate(DateUtils.YYYY_MM_DD_HH24_MM_SS), DateUtils.YYYY_MM_DD_HH24_MM_SS);
+ if ("start_time".equals(key)&& StringUtil.isBlank(def)){
+ def = DateUtils.getFormatDate(DateUtils.getSomeHour(currentDate, -1), DateUtils.YYYY_MM_DD_HH24_MM_SS);
+ variable.put("default", def);
+ }
+ if ("end_time".equals(key)&& StringUtil.isBlank(def)){
+ def = DateUtils.getFormatDate(currentDate, DateUtils.YYYY_MM_DD_HH24_MM_SS);
+ variable.put("default", def);
+ }
+ }
+ return variables;
+ }
+ return Lists.newArrayList();
+ }
+
+ @Override
+ public Map<String, Object> getDataset(String datasetId) {
+ List<Record> list = Db.find(buildGetDatasetSQL(Lists.newArrayList(datasetId), null, null));
+ if (CollectionUtils.isEmpty(list)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getMessage(),QGWMessageConst.DATASET_ID_NOT_EXIST));
+ }
+ List<Map<String, Object>> results = formatResultData(list);
+ Map<String, Object> result = results.get(0);
+ log.info("ID is: {}, Dataset is: {}", datasetId, result);
+ return result;
+ }
+
+ @Override
+ public List<Map<String, Object>> getDatasets(List<String> ids, String category, String backendEngine) {
+ String sql = buildGetDatasetSQL(ids, category, backendEngine);
+ List<Record> list = Db.find(sql);
+ if (CollectionUtils.isEmpty(list)) {
+ return Lists.newArrayList();
+ }
+ return formatResultData(list);
+ }
+
+ private static List<Map<String, Object>> formatResultData(List<Record> list) {
+ List<Map<String, Object>> results = new ArrayList<>();
+ list.forEach(record -> {
+ Map<String, Object> columns = record.getColumns();
+ columns.put("generated_time", Instant.ofEpochSecond(Long.parseLong(String.valueOf(columns.get("generated_time")))));
+ columns.put("last_update_time", Instant.ofEpochSecond(Long.parseLong(String.valueOf(columns.get("last_update_time")))));
+ results.add(record.getColumns());
+ });
+ return results;
+ }
+
+ @Override
+ public BaseResult getPreview(String datasetId) {
+ Map<String, Object> dataset = getDataset(datasetId);
+ String template = buildExecSQL(getVariable(), String.valueOf(dataset.get(TEMPLATE)));
+ String datasetType = String.valueOf(dataset.get(DATASET_TYPE));
+ if (datasetType.equalsIgnoreCase("sql")) {
+ SqlQueryRequestParam sqlQueryRequest = JSON.parseObject(template, SqlQueryRequestParam.class);
+ sqlQueryRequest.setExecutionMode(ExecutionMode.ONESHOT.getValue());
+ log.info("Dataset Preview, ID is: {}, Type is: {}, Exec SQL is: {}", datasetId, "sql", sqlQueryRequest.getStatement());
+ return queryJobService.createSQLAdHocQuery(sqlQueryRequest);
+ } else if (datasetType.equalsIgnoreCase("dsl")) {
+ DSLQueryRequestParam dslQueryRequest = JSON.parseObject(template, DSLQueryRequestParam.class);
+ dslQueryRequest.setExecutionMode(ExecutionMode.ONESHOT.getValue());
+ log.info("Dataset Preview, ID is: {}, Type is: {}, DSL is: {}", datasetId, "dsl", template);
+ return queryJobService.createDSLAdHocQuery(dslQueryRequest);
+ } else {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getMessage(), "not Supported"));
+ }
+ }
+
+
+ @Override
+ public String buildExecSQL(List<LinkedHashMap> variables, String sql) {
+ Matcher matcher = pLeftRightFlag.matcher(sql);
+ while (matcher.find()) {
+ sql = processOptionalClause(sql, "[[", "]]");
+ }
+ sql = processFieldVariable(sql);
+ for (LinkedHashMap linkedHashMap : variables) {
+ String variable = String.valueOf(linkedHashMap.get("key"));
+ String def = String.valueOf(linkedHashMap.get("default"));
+ if ("filter".equals(variable) && StringUtil.isBlank(def)) {
+ def = " 1 = 1";
+ }
+ String parameter = "${".concat(variable).concat("}");
+ sql = sql.replace(parameter, def);
+ }
+ return sql;
+ }
+
+ private String buildGetDatasetSQL(List<String> ids, String category, String backendEngine) {
+ List<String> filterList = Lists.newArrayList();
+ if (!ids.isEmpty()) {
+ filterList.add(" identifier_name IN ( '".concat(String.join("', '", ids)).concat("')"));
+ }
+ if (StrUtil.isNotBlank(category)) {
+ filterList.add(" category = '".concat(category.replace("'", "\\'")).concat("'"));
+ }
+ if (StrUtil.isNotBlank(backendEngine)) {
+ filterList.add(" backend_engine = '".concat(backendEngine.replace("'", "\\'")).concat("'"));
+ }
+ String filter = String.join(" AND ", filterList);
+ return String.format(Objects.requireNonNull(env.getProperty("SQL_DATASETS")), StrUtil.isNotBlank(filter) ? "WHERE ".concat(filter) : "");
+ }
+
+ private String processFieldVariable(String str) {
+ Matcher matcher = pFieldVariable.matcher(str);
+ StringBuffer sb = new StringBuffer();
+ while (matcher.find()) {
+ matcher.appendReplacement(sb, matcher.group(2));
+ }
+ matcher.appendTail(sb);
+ return sb.toString();
+ }
+
+ private String processOptionalClause(String str, String leftFlag, String rightFlag) {
+ String left = "";
+ String right = "";
+ String center = "";
+ boolean leftMark = false;
+ for (int i = 0; i < str.length(); i++) {
+ String element = str.substring(i, i + 1);
+ if (leftFlag.startsWith(element) && leftFlag.equals(str.substring(i, i + leftFlag.length()))) {
+ left = str.substring(0, i);
+ leftMark = true;
+ continue;
+ }
+ if (leftMark) {
+ if (rightFlag.startsWith(element) && rightFlag.equals(str.substring(i, i + rightFlag.length()))) {
+ right = str.substring(i + rightFlag.length());
+ center = str.substring(left.length(), str.length() - (right.length()));
+ break;
+ }
+ }
+ }
+ if (StrUtil.isNotEmpty(center)) {
+ String substring = center.substring(leftFlag.length(), center.length() - leftFlag.length());
+ str = left.concat(substring).concat(right);
+ }
+ return str;
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/DiagnosisServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/DiagnosisServiceImpl.java
index 24d15708..d6f7d20e 100644
--- a/src/main/java/com/mesalab/qgw/service/impl/DiagnosisServiceImpl.java
+++ b/src/main/java/com/mesalab/qgw/service/impl/DiagnosisServiceImpl.java
@@ -1,86 +1,45 @@
package com.mesalab.qgw.service.impl;
-import cn.hutool.core.collection.CollectionUtil;
-import cn.hutool.core.io.FileUtil;
import cn.hutool.core.io.file.FileReader;
-import cn.hutool.core.io.file.FileWriter;
-import cn.hutool.core.util.IdUtil;
-import cn.hutool.core.util.NumberUtil;
-import cn.hutool.core.util.StrUtil;
-import cn.hutool.core.util.URLUtil;
-import cn.hutool.http.HttpStatus;
-import cn.hutool.json.JSONUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson.JSONObject;
-import com.alibaba.fastjson2.JSON;
import com.alibaba.nacos.api.config.ConfigService;
import com.alibaba.nacos.api.exception.NacosException;
import com.google.common.base.Stopwatch;
import com.google.common.collect.Lists;
-import com.google.common.collect.MapDifference;
import com.google.common.collect.Maps;
-import com.jayway.jsonpath.JsonPath;
-import com.jfinal.plugin.activerecord.Db;
-import com.jfinal.plugin.activerecord.Record;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.entity.BaseResultGenerator;
import com.mesalab.common.enums.*;
import com.mesalab.common.nacos.NacosConfig;
-import com.mesalab.common.nacos.NacosConst;
import com.mesalab.qgw.benchmark.DialectWriter;
-import com.mesalab.qgw.constant.QGWMessageConst;
import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
+import com.mesalab.common.exception.CommonErrorCode;
import com.mesalab.qgw.model.basic.*;
-import com.mesalab.qgw.service.QueryService;
import com.mesalab.qgw.service.DiagnosisService;
-import com.mesalab.qgw.service.MetadataService;
-import com.mesalab.services.common.enums.EntityQueryType;
+import com.mesalab.qgw.service.DatabaseService;
import com.mesalab.services.common.property.SqlPropertySourceFactory;
-import com.mesalab.services.service.EntityService;
import com.geedgenetworks.utils.*;
-import com.mesalab.services.service.SQLDatasetService;
import lombok.Data;
-import org.apache.avro.Schema;
-import org.apache.commons.collections.CollectionUtils;
-import org.apache.commons.math3.stat.StatUtils;
-import org.apache.http.NameValuePair;
-import org.apache.http.client.utils.URLEncodedUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.EnvironmentAware;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
-import javax.annotation.PostConstruct;
-import javax.annotation.Resource;
import java.io.File;
import java.io.IOException;
-import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.TimeUnit;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
import java.util.stream.Collectors;
-import java.util.stream.Stream;
@Service("diagnosisService")
@PropertySource(value = "classpath:http-sql-template.sql", factory = SqlPropertySourceFactory.class)
-public class
-DiagnosisServiceImpl implements DiagnosisService, EnvironmentAware {
+public class DiagnosisServiceImpl implements DiagnosisService, EnvironmentAware {
private static final Log log = LogFactory.get();
- private static final String AVRO_SUFFIX = ".avsc";
- private static final String METADATA_PREFIX = "Metadata/";
private static final String SCHEMA_PREFIX = "Schema/";
- private static final String HBASE_DBNAME2 = "tsg_galaxy";
- private static final String EXECUTE_ENGINE= "execute_engine";
- private static final String IDENTIFIER_NAME = "identifier_name";
- private static final String TEMPLATE = "template";
-
- @Autowired
- private QueryService queryService;
@Autowired
private DialectWriter dialectWriter;
@@ -91,36 +50,17 @@ DiagnosisServiceImpl implements DiagnosisService, EnvironmentAware {
@Autowired
private HBaseAPISource hBaseAPISource;
@Autowired
- private MetadataService metadataService;
- @Autowired
- private EntityConfigSource entityConfigSourceTemp;
- @Autowired
- private EntityService entityService;
- @Autowired
- private SQLDatasetService sqlDatasetService;
-
+ private DatabaseService databaseService;
@Autowired
private NacosConfig nacosConfig;
@Autowired
private ConfigService systemConfigService;
-
- @Autowired
- private HttpClientService httpClientService;
@Autowired
HttpConfig httpConfig;
- @PostConstruct
- public void init() {
- entityConfigSource = this.entityConfigSourceTemp;
- }
-
private Environment env;
- private static EntityConfigSource entityConfigSource;
private final static String DATA_ID = "version.json";
- private static Pattern pTTL = Pattern.compile(".*toIntervalSecond\\((\\d+)\\)", Pattern.CASE_INSENSITIVE);
- private static final String trafficPort = "8123";
-
@Override
public JSONObject getVersionInfo() {
@@ -147,132 +87,24 @@ DiagnosisServiceImpl implements DiagnosisService, EnvironmentAware {
Stopwatch watch = Stopwatch.createStarted();
Map<String, Object> statistics = Maps.newLinkedHashMap();
List<Object> data = Lists.newArrayList();
- Map<String, Map<String, String>> clickhouseResult = parseSchemaByDatasource(clickHouseHttpSource.getDbName(), metadataService.getAllTable());
- Map<String, Map<String, String>> druidResult = parseSchemaByDatasource(druidIoHttpSource.getDbname(), metadataService.getAllTable());
- Map<String, Map<String, String>> hbaseResult = parseSchemaByDatasource(hBaseAPISource.getDbName(), metadataService.getAllTable());
- Map<String, Map<String, String>> hbase2Result = parseSchemaByDatasource(HBASE_DBNAME2, metadataService.getAllTable());
+ Map<String, Map<String, String>> clickhouseResult = parseSchemaByDatasource(clickHouseHttpSource.getDbName(), databaseService.getAllTable());
+ Map<String, Map<String, String>> druidResult = parseSchemaByDatasource(druidIoHttpSource.getDbname(), databaseService.getAllTable());
+ Map<String, Map<String, String>> hbaseResult = parseSchemaByDatasource(hBaseAPISource.getDbName(), databaseService.getAllTable());
data.add(clickhouseResult.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));
data.add(druidResult.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));
data.add(hbaseResult.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));
- data.add(hbase2Result.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));
long executeTime = watch.elapsed(TimeUnit.MILLISECONDS);
statistics.put("elapsed", executeTime);
- result = BaseResultGenerator.success("Avro Schema Validation Success.", data, statistics);
- result.setFormatType(QueryFormatEnum.JSON.getValue());
+ result = BaseResultGenerator.success("Schema Validation Success.", data, statistics);
+ result.setOutputMode(OutputMode.JSON.getValue());
} catch (RuntimeException e) {
- log.error("Avro Schema Validation Fail:{}", e);
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),
- QGWErrorCode.SQL_QUERY_FEDERATION_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_QUERY_FEDERATION_EXCEPTION.getMessage(),e.getMessage()));
-
- }
- return result;
- }
-
- /**
- * 验证schema与数据库表结构是否一致
- *
- * @return
- */
- @Override
- public BaseResult validateMetadata() {
- BaseResult result = null;
- try {
- Stopwatch watch = Stopwatch.createStarted();
- Map<String, Object> statistics = Maps.newLinkedHashMap();
- List<Object> data = Lists.newArrayList();
- checkCKIndexKey(clickHouseHttpSource.getDbName(), metadataService.getAllTable());
- Map<String, Map<String, String>> clickhouseResult = checkMetadataByDatasource(clickHouseHttpSource.getDbName(), metadataService.getAllTable());
- Map<String, Map<String, String>> druidResult = checkMetadataByDatasource(druidIoHttpSource.getDbname(), metadataService.getAllTable());
- Map<String, Map<String, String>> hbaseResult = checkMetadataByDatasource(hBaseAPISource.getDbName(), metadataService.getAllTable());
- Map<String, Map<String, String>> hbase2Result = checkMetadataByDatasource(HBASE_DBNAME2, metadataService.getAllTable());
-
- data.add(clickhouseResult.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));
- data.add(druidResult.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));
- data.add(hbaseResult.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));
- data.add(hbase2Result.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));
-
- statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS));
- result = BaseResultGenerator.success("Metadata Validation Success, Only Table Exist Fields detail.", data, statistics);
- result.setFormatType(QueryFormatEnum.JSON.getValue());
- } catch (RuntimeException e) {
- log.error("Metadata Validation Fail: {}", e.getMessage());
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),
- ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), e.getMessage()));
- }
- return result;
- }
-
-
- @Override
- public BaseResult runPocSQL(boolean isSaved, String option, String category) {
- BaseResult result = null;
- Map<String, Object> statistics = Maps.newLinkedHashMap();
- Map<String, Map<String, String>> mergeResult;
- Map<String, Map<String, String>> engineResult;
- Map<String, Map<String, String>> clickhouseResult;
- Map<String, Map<String, String>> druidResult;
- Map<String, Map<String, String>> hbaseResult;
- try {
- Map<String, Object> datasetResult = generateDatasets(category);
- List<Map<String, String>> dataSetlist = (ArrayList<Map<String, String>>) datasetResult.get("list");
- List<String> druidIdList = Lists.newLinkedList();
- List<String> ckIdList = Lists.newLinkedList();
- List<String> engineIdList = Lists.newLinkedList();
- List<String> hbaseIdList = Lists.newLinkedList();
-
- List<String> druidSqlList = Lists.newLinkedList();
- List<String> ckSqlList = Lists.newLinkedList();
- List<String> engineSqlList = Lists.newLinkedList();
- List<String> hbaseSqlList = Lists.newLinkedList();
- Stopwatch watch = Stopwatch.createStarted();
- if (CollectionUtil.isNotEmpty(dataSetlist)){
- for (int i = 0; i < dataSetlist.size(); i++) {
- if (dataSetlist.get(i).get(EXECUTE_ENGINE).equalsIgnoreCase(DBTypeEnum.DRUID.getValue())) {
- druidIdList.add(dataSetlist.get(i).get(IDENTIFIER_NAME));
- druidSqlList.add(dataSetlist.get(i).get(TEMPLATE));
- } else if (dataSetlist.get(i).get(EXECUTE_ENGINE).equalsIgnoreCase(DBTypeEnum.CLICKHOUSE.getValue())) {
- ckIdList.add(dataSetlist.get(i).get(IDENTIFIER_NAME));
- ckSqlList.add(dataSetlist.get(i).get(TEMPLATE));
- } else if (dataSetlist.get(i).get(EXECUTE_ENGINE).equalsIgnoreCase(DBTypeEnum.HBASE.getValue())) {
- hbaseIdList.add(dataSetlist.get(i).get(IDENTIFIER_NAME));
- hbaseSqlList.add(dataSetlist.get(i).get(TEMPLATE));
- } else {
- engineIdList.add(dataSetlist.get(i).get(IDENTIFIER_NAME));
- engineSqlList.add(dataSetlist.get(i).get(TEMPLATE));
- }
- }
- }
-
- Optional.of(new File(dialectWriter.buildPocSQL(druidSqlList, DBTypeEnum.DRUID.getValue(), category)));
- Optional.of(new File(dialectWriter.buildPocSQL(ckSqlList, DBTypeEnum.CLICKHOUSE.getValue(), category)));
- Optional.of(new File(dialectWriter.buildPocSQL(hbaseSqlList, DBTypeEnum.HBASE.getValue(), category)));
- Optional.of(new File(dialectWriter.buildPocSQL(engineSqlList, DBTypeEnum.ENGINE.getValue(), category)));
+ log.error("Schema Validation Fail:{}", e);
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.SQL_EXECUTION_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.SQL_EXECUTION_EXCEPTION.getMessage(),e.getMessage()));
- clickhouseResult = buildResult(DBTypeEnum.CLICKHOUSE.getValue(), ckIdList, isSaved, option);
- druidResult = buildResult(DBTypeEnum.DRUID.getValue(), druidIdList, isSaved, option);
- engineResult = buildResult(DBTypeEnum.ENGINE.getValue(), engineIdList, isSaved, option);
- hbaseResult = buildResult(DBTypeEnum.HBASE.getValue(), hbaseIdList, isSaved, option);
-
- mergeResult = Stream
- .concat(clickhouseResult.entrySet().stream(), druidResult.entrySet().stream())
- .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
- mergeResult = Stream.concat(mergeResult.entrySet().stream(), engineResult.entrySet().stream())
- .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
- mergeResult = Stream.concat(mergeResult.entrySet().stream(), hbaseResult.entrySet().stream())
- .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
-
- statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS));
- result = BaseResultGenerator.success("SQL Queries Success.", mergeResult, statistics);
- result.setFormatType(QueryFormatEnum.JSON.getValue());
- } catch (RuntimeException e) {
- log.error("Execute Poc SQL Fail:{}", e);
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),
- QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getMessage(),e.getMessage()));
}
return result;
}
@@ -302,380 +134,22 @@ DiagnosisServiceImpl implements DiagnosisService, EnvironmentAware {
}
statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS));
result = BaseResultGenerator.success("Get SQL Success.", dialectResult, statistics);
- result.setFormatType(QueryFormatEnum.JSON.getValue());
+ result.setOutputMode(OutputMode.JSON.getValue());
} catch (RuntimeException | IOException e) {
log.error("Get Poc SQL Fail:{}", e);
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),
- ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),e.getMessage()));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(),
+ CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(),e.getMessage()));
}
return result;
}
- @Override
- public BaseResult consistencyCheck() {
- List<Map<String, Object>> dataList = Lists.newArrayList();
- Map<String, Object> logDiff = Maps.newHashMap();
- Map tables = metadataService.getSchemaInfo(MetadataTypeEnum.TABLES.getValue(), clickHouseHttpSource.getDbName(), false);
- List<String> symbols = (List<String>) tables.get("symbols");
- logDiff.put("logType", SystemServiceImpl.LogType.TRAFFIC_LOGS.getValue());
- Map<String, Map<String, Object>> changeTTL = Maps.newHashMap();
- List<String> ipPorts = getClusterAddressOfCK();
- try {
- for (String ipPort : ipPorts) {
- if (!changeTTL.isEmpty()) {
- break;
- }
- if (ipPort.startsWith("127.0.0.1")) {
- String url = clickHouseHttpSource.getUrl();
- ipPort = url.substring(url.indexOf("//") + 2, url.lastIndexOf(":") + 1).concat(trafficPort);
- }
- for (String tableName : symbols) {
- Map schemaMap = metadataService.getSchemaInfo("fields", tableName, false);
- Object tableTTLInSchema = getTableTTLInSchema(schemaMap);
- Map<String, Object> tableDiff = getTableDiff(ipPort, tableName, tableTTLInSchema);
- List fieldDiff = getFieldDiff(ipPort, tableName, schemaMap);
- if (CollectionUtil.isNotEmpty(fieldDiff)) {
- tableDiff.put("fields", fieldDiff);
-
- }
- if (CollectionUtil.isNotEmpty(tableDiff)) {
- changeTTL.put(tableName, tableDiff);
- logDiff.put("tables", Lists.newArrayList(changeTTL));
- logDiff.put("address", Lists.newArrayList(ipPort));
- }
- }
- }
- } catch (RuntimeException ex) {
- log.error(" schema TTL check error: {}", ex);
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(),ex.getMessage()));
- }
- if (changeTTL.isEmpty()) {
- return BaseResultGenerator.success("ok", dataList);
- }
- dataList.add(logDiff);
- return BaseResultGenerator.success("Log streaming setting task not executed or failed", dataList);
- }
-
- private Object getTableTTLInSchema(Map schemaMap) {
- Object tableTTLInSchema = null;
- if (StringUtil.isNotEmpty(schemaMap)) {
- if (schemaMap.containsKey("doc")) {
- Map<String, Object> schemaHasDoc = (Map<String, Object>) schemaMap.get("doc");
- if (schemaHasDoc.containsKey("ttl")) {
- tableTTLInSchema = StringUtil.isEmpty(schemaHasDoc.get("ttl")) ? null : schemaHasDoc.get("ttl").toString();
- }
- }
- }
- return tableTTLInSchema;
- }
-
- private Map<String, Object> getTableDiff(String ipPort, String tableName, Object tableTTLInSchema) {
- String sql = String.format(Objects.requireNonNull(env.getProperty("TABLE_TTL")), tableName, clickHouseHttpSource.getDbName());
- Map<String, String> result = executeHttpGetOfCK(ipPort, sql, clickHouseHttpSource.getSystemDBName());
- if (Integer.parseInt(result.get("status")) != ResultStatusEnum.SUCCESS.getCode()) {
- log.error(" query table TTL error: {}", result.toString());
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getMessage(),result.toString()));
- }
- Object tableTTLInDB = null;
- Map<String, Object> tableDiff = Maps.newHashMap();
- Map o = (Map) JSON.parseObject(result.get("result"), Map.class);
- Map<String, Object> resultMap = (Map<String, Object>) o;
- for (Map<String, Object> datum : (List<Map<String, Object>>)resultMap.get("data")) {
- Matcher matcher = pTTL.matcher(String.valueOf(datum.get("table_ttl")));
- tableTTLInDB = matcher.find() ? matcher.group(1) : null;
- }
- if (!String.valueOf(tableTTLInDB).equals(String.valueOf(tableTTLInSchema))) {
- tableDiff.put("last_ttl", StringUtil.isEmpty(tableTTLInSchema) ? null : tableTTLInSchema.toString());
- tableDiff.put("used_ttl", StringUtil.isEmpty(tableTTLInDB) ? null : tableTTLInDB.toString());
- log.warn("table ttl: {} not consistency.", tableName);
- }
- return tableDiff;
- }
-
- private List<Map<String, Object>> getFieldDiff(String ipPort, String tableName, Map<String, Object> schemaMap) {
- List<Map<String, Object>> fieldDiff = Lists.newArrayList();
- Map<String, String> result = executeHttpGetOfCK(ipPort, "describe " + tableName + "_local", clickHouseHttpSource.getDbName());
- if (Integer.parseInt(result.get("status")) != ResultStatusEnum.SUCCESS.getCode()) {
- log.error(" query table field TTL error: {}", result.toString());
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getMessage(),result.toString()));
- }
- List<Map<String, Object>> fields = (List<Map<String, Object>>) schemaMap.get("fields");
- for (Map<String, Object> field : fields) {
- Object read = JsonPath.read(result, "$.result");
- List<Object> fieldList = JsonPath.read(JSONUtil.toJsonStr(JSONUtil.parseObj(read)), "$.data[?(@.name == \"" + field.get("name") + "\")].ttl_expression");
- Object fieldTTLInSchema = null;
- Object fieldTTLInDB;
- if (StringUtil.isNotEmpty(field.get("doc"))) {
- Map<String, Object> doc = (Map<String, Object>) field.get("doc");
- fieldTTLInSchema = StringUtil.isEmpty(doc.get("ttl")) ? null : doc.get("ttl");
- }
- Matcher matcher = pTTL.matcher(String.valueOf(fieldList.get(0)));
- fieldTTLInDB = matcher.find() ? matcher.group(1) : null;
- if (String.valueOf(fieldTTLInSchema).equals(String.valueOf(fieldTTLInDB))) {
- continue;
- }
- log.warn("field ttl: address {} {}-{} not consistency.", ipPort, tableName, field.get("name"));
- Map<String, Object> ttlDiff = Maps.newHashMap();
- ttlDiff.put("last_ttl", StringUtil.isEmpty(fieldTTLInSchema) ? null : fieldTTLInSchema.toString());
- ttlDiff.put("used_ttl", StringUtil.isEmpty(fieldTTLInDB) ? null : fieldTTLInDB.toString());
- if (fieldDiff.stream().noneMatch(o -> o.containsKey(field.get("name").toString()))) {
- Map<String, Object> item = Maps.newHashMap();
- item.put(field.get("name").toString(), ttlDiff);
- fieldDiff.add(item);
- }
- }
- return fieldDiff;
- }
-
- private List<String> getClusterAddressOfCK() {
- List<String> endpointList = new ArrayList<>();
- BaseResult baseResult = queryService.executeQuery(QueryProfile.builder().query("SELECT DISTINCT concat(host_address,':','" + trafficPort + "') as endpoint FROM clusters where cluster = 'ck_cluster'").build());
- if (baseResult.isSuccess()) {
- List<Map<String, Object>> data = (List<Map<String, Object>>) baseResult.getData();
- for (Map<String, Object> datum : data) {
- endpointList.add(String.valueOf(datum.get("endpoint")));
- }
- }
- return endpointList;
- }
-
- private Map<String, String> executeHttpGetOfCK(String ipPort, String sql, String dbName) {
- String queryURL = URLUtil.normalize("http://" + ipPort + "/?");
- StringBuilder paramBuilder = new StringBuilder("user=")
- .append(clickHouseHttpSource.getRealTimeAccountUserName()).append("&")
- .append("password=").append(clickHouseHttpSource.getRealTimeAccountPin()).append("&")
- .append("database=").append(dbName).append("&")
- .append("query=").append(sql)
- .append(" FORMAT JSON;");
- List<NameValuePair> values = URLEncodedUtils.parse(paramBuilder.toString(), StandardCharsets.UTF_8);
- int socketTimeOut = httpConfig.getCkRealTimeAccountSocketTimeOut();
- return httpClientService.httpGet(queryURL + URLEncodedUtils.format(values, "utf-8"), socketTimeOut);
- }
- /**
- *
- * @param param
- * @Description 获取Entity推荐全网占比统计信息
- * @author wanghao
- * @date 2022/7/7 15:49
- */
- @Override
- public BaseResult getMessageInfo(String param) {
- BaseResult baseResult = null;
- Map<String, String> data = Maps.newHashMap();
- Map<String, Object> statistics = Maps.newLinkedHashMap();
- Stopwatch watch = Stopwatch.createStarted();
- String timeInterval = getTimeInterval(-24 * 60);
- String datasource = "session_record";
- if (EntityQueryType.TOPSERVERIP.getType().equalsIgnoreCase(param)) {
- String message = getTopServerIpMessage(timeInterval, datasource);
- data.put("message", message);
- } else if (EntityQueryType.TOPSNI.getType().equalsIgnoreCase(param)) {
- String message = getTopSniMessage(timeInterval, datasource);
- data.put("message", message);
- }
- statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS));
- baseResult = BaseResultGenerator.success("ok", data, statistics);
- return baseResult;
- }
-
- private String getTopServerIpMessage(String timeInterval, String datasource) {
- String tcpSessionSql = String.format(Objects.requireNonNull(env.getProperty("ENTITY_TCP_SESSION")), datasource, timeInterval,
- entityConfigSource.getTopServerIPByTCPSession());
- List<Map> tcpSessionResult = getData(queryService.executeQuery(queryBuild(tcpSessionSql)));
-
- String tcpClientIpsSql = String.format(Objects.requireNonNull(env.getProperty("TOP_ENTITY_TCP_UNIQ_CLIENT_IPS")), datasource, timeInterval,
- datasource, timeInterval, entityConfigSource.getTopServerIPByTCPUniqClientIP());
- List<Map> tcpClientIpsResult = getData(queryService.executeQuery(queryBuild(tcpClientIpsSql)));
-
- String udpSessionSql = String.format(Objects.requireNonNull(env.getProperty("ENTITY_UDP_SESSION")), datasource, timeInterval,
- entityConfigSource.getTopServerIPByUDPBySession());
- List<Map> udpSessionResult = getData(queryService.executeQuery(queryBuild(udpSessionSql)));
-
- String udpClientIpsSql = String.format(Objects.requireNonNull(env.getProperty("TOP_ENTITY_UDP_UNIQ_CLIENT_IPS")), datasource, timeInterval,
- datasource, timeInterval, entityConfigSource.getTopServerIPByUDPUniqClientIP());
- List<Map> udpClientIpsResult = getData(queryService.executeQuery(queryBuild(udpClientIpsSql)));
-
- String tcpTotalSessionSql = String.format(Objects.requireNonNull(env.getProperty("TOTAL_ENTITY_TCP_SESSION")), datasource, timeInterval);
- List<Map> tcpTotalSessionResult = getData(queryService.executeQuery(queryBuild(tcpTotalSessionSql)));
-
- String tcpTotalClientIpsSql = String.format(Objects.requireNonNull(env.getProperty("TOTAL_ENTITY_TCP_UNIQ_CLIENT_IPS")), datasource, timeInterval);
- List<Map> tcpTotalClientIpResult = getData(queryService.executeQuery(queryBuild(tcpTotalClientIpsSql)));
-
- String udpTotalSessionSql = String.format(Objects.requireNonNull(env.getProperty("TOTAL_ENTITY_UDP_SESSION")), datasource, timeInterval);
- List<Map> udpTotalSessionResult = getData(queryService.executeQuery(queryBuild(udpTotalSessionSql)));
-
- String udpTotalClientIpsSql = String.format(Objects.requireNonNull(env.getProperty("TOTAL_ENTITY_UDP_UNIQ_CLIENT_IPS")), datasource, timeInterval);
- List<Map> udpTotalClientIpsResult = getData(queryService.executeQuery(queryBuild(udpTotalClientIpsSql)));
-
- long tcpSessions = 0;
- long tcpClientIps = 0;
- long udpSessions = 0;
- long udpClientIps = 0;
- long tcpTotalSessions = StringUtil.isEmpty(tcpTotalSessionResult.get(0).get("sessions")) ? 0 : Long.parseLong(tcpTotalSessionResult.get(0).get("sessions").toString());
- long tcpTotalClientIps = StringUtil.isEmpty(tcpTotalClientIpResult.get(0).get("uniq_client_ips")) ? 0 : Long.parseLong(tcpTotalClientIpResult.get(0).get("uniq_client_ips").toString());
- long udpTotalSessions = StringUtil.isEmpty(udpTotalSessionResult.get(0).get("sessions")) ? 0 : Long.parseLong(udpTotalSessionResult.get(0).get("sessions").toString());
- long udpTotalClientIps = StringUtil.isEmpty(udpTotalClientIpsResult.get(0).get("uniq_client_ips")) ? 0 : Long.parseLong(udpTotalClientIpsResult.get(0).get("uniq_client_ips").toString());
- for (Map item : tcpSessionResult) {
- tcpSessions += StringUtil.isEmpty(item.get("sessions")) ? 0 : Long.parseLong(item.get("sessions").toString());
- }
- for (Map item : tcpClientIpsResult) {
- tcpClientIps += StringUtil.isEmpty(item.get("client_ips")) ? 0 : Long.parseLong(item.get("client_ips").toString());
- }
- for (Map item : udpSessionResult) {
- udpSessions += StringUtil.isEmpty(item.get("sessions")) ? 0 : Long.parseLong(item.get("sessions").toString());
- }
- for (Map item : udpClientIpsResult) {
- udpClientIps += StringUtil.isEmpty(item.get("client_ips")) ? 0 : Long.parseLong(item.get("client_ips").toString());
- }
-
- String message = StrUtil.format("Top {} Server IPs, {} of TCP sessions; " +
- "Top {} Server IPs, {} of TCP uniq client ips; " +
- "Top {} Server IPs, {} of UDP(53,443) sessions; " +
- "Top {} Server IPs, {} of UDP uniq client ips",
- entityConfigSource.getTopServerIPByTCPSession(), tcpTotalSessions == 0 ? "-" : NumberUtil.decimalFormat("#.##%", (tcpSessions * 1.0) / tcpTotalSessions),
- entityConfigSource.getTopServerIPByTCPUniqClientIP(), tcpTotalClientIps == 0 ? "-" : NumberUtil.decimalFormat("#.##%", (tcpClientIps * 1.0) / tcpTotalClientIps),
- entityConfigSource.getTopServerIPByUDPBySession(), udpTotalSessions == 0 ? "-" : NumberUtil.decimalFormat("#.##%", (udpSessions * 1.0) / udpTotalSessions),
- entityConfigSource.getTopServerIPByUDPUniqClientIP(), udpTotalClientIps == 0 ? "-" : NumberUtil.decimalFormat("#.##%", (udpClientIps * 1.0) / udpTotalClientIps));
- return message;
- }
-
- private String getTopSniMessage(String timeInterval, String datasource) {
- String topSniSql = String.format(Objects.requireNonNull(env.getProperty("ENTITY_TOP_SNI")), datasource, timeInterval,
- entityConfigSource.getTopSNIDefaultSize());
- List<Map> topSniResult = getData(queryService.executeQuery(queryBuild(topSniSql)));
-
- String totalSniSql = String.format(Objects.requireNonNull(env.getProperty("ENTITY_SNI_TOTAL")), datasource, timeInterval);
- List<Map> totalSniResult = getData(queryService.executeQuery(queryBuild(totalSniSql)));
-
- int cdnCount = 0;
- long topSniBytes = 0;
- long topSniSessions = 0;
- long totalSniBytes = StringUtil.isEmpty(totalSniResult.get(0).get("bytes")) ? 0 : Long.parseLong(totalSniResult.get(0).get("bytes").toString());
- long totalSniSessions = StringUtil.isEmpty(totalSniResult.get(0).get("sessions")) ? 0 : Long.parseLong(totalSniResult.get(0).get("sessions").toString());
- Map<String, List<String>> cdnMap = entityService.buildDomainHashTable(getCfgCDN());
- for (Map item : topSniResult) {
- if (entityService.inCDN(cdnMap, String.valueOf(item.get("ssl_sni")))) {
- cdnCount++;
- }
- topSniBytes += StringUtil.isEmpty(item.get("bytes")) ? 0 : Long.parseLong(item.get("bytes").toString());
- topSniSessions += StringUtil.isEmpty(item.get("sessions")) ? 0 : Long.parseLong(item.get("sessions").toString());
- }
-
- String message = StrUtil.format("Top {} SNI, {} of ssl bytes, {} of ssl sessions, {} popular CDNs",
- entityConfigSource.getTopSNIDefaultSize(),
- totalSniBytes == 0 ? "-" : NumberUtil.decimalFormat("#.##%", (topSniBytes * 1.0) / totalSniBytes),
- totalSniSessions == 0 ? "-" : NumberUtil.decimalFormat("#.##%", (topSniSessions * 1.0) / totalSniSessions),
- cdnCount == 0 ? "-" : cdnCount);
-
- return message;
- }
-
-
- private String getTimeInterval(int defaultInterval) {
- Date now = new Date();
- Date someMinute = DateUtils.getSomeMinute(now, defaultInterval);
- String end = DateUtils.getFormatDate(now, DateUtils.YYYY_MM_DD_HH24_MM_SS);
- String start = DateUtils.getFormatDate(someMinute, DateUtils.YYYY_MM_DD_HH24_MM_SS);
- List<String> list = Lists.newArrayList(start.concat("/").concat(end));
- StringBuilder timeInterval = new StringBuilder();
- String[] intervals = getIntervals(list);
- timeInterval.append("recv_time >= UNIX_TIMESTAMP('").append(intervals[0]).append("') AND recv_time< UNIX_TIMESTAMP('").append(intervals[1]).append("')");
- return timeInterval.toString();
- }
-
- private String[] getIntervals(List<String> intervals) {
- return intervals.get(0).split("/");
- }
public List<Map> getData(BaseResult baseResult) {
List<Map> dataList = (List<Map>) baseResult.getData();
return dataList;
}
- private Map<String, List<String>> getCfgCDN() {
- Object codeInfo = metadataService.getCfg("public_code_info.json");
- if (StringUtil.isNotEmpty(codeInfo)) {
- Map<String, Map<String, List<String>>> data = JSON.parseObject(codeInfo.toString(), Map.class);
- return Maps.newHashMap(data.get("CDN"));
- }
- return Maps.newHashMap();
- }
-
- private Map<String, Object> generateDatasets(String category) {
- List<String> categorylist = Lists.newLinkedList();
- List<Record> list = Db.find(String.format(Objects.requireNonNull(env.getProperty("SQL_DATASETS_CATEGORY"))));
- list.forEach(record -> categorylist.add(String.valueOf(record.getColumns().get("category"))));
- if (CollectionUtil.isNotEmpty(categorylist) && StrUtil.isNotBlank(category) && !categorylist.contains(category)) {
- throw new QGWBusinessException(HttpStatus.HTTP_BAD_REQUEST, ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.DIAGNOSIS_CATEGORY_ERROR));
- }
- return sqlDatasetService.getDatasets(Lists.newArrayList(), category, null);
- }
-
- private Map<String, Map<String,String>> buildResult(String executeEngine,List<String> list,boolean isSaved, String option) {
- Map<String, Map<String, String>> resultMap = Maps.newLinkedHashMap();
- Map<String, String> infoMap = Maps.newLinkedHashMap();
- FileWriter writer = null;
- String benchFileName = executeEngine + "_queries_" + DateUtils.getCurrentDate(DateUtils.YYYYMMDD) + ".sql";
- if (isSaved && DiagnosisOptionEnum.EXECUTION.getValue().equalsIgnoreCase(option)) {
- File file = new File("benchmark" + File.separator + benchFileName + "." + IdUtil.simpleUUID() + ".dat");
- writer = FileWriter.create(file);
- }
- int index = 0;
- for (String line : list) {
- index++;
- Stopwatch watch = Stopwatch.createStarted();
- BaseResult queryResult = sqlDatasetService.getPreview(line, option);
- if (StringUtil.isEmpty(queryResult.getStatistics())) {
- Map<String, Object> statistics = Maps.newLinkedHashMap();
- statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS));
- queryResult.setStatistics(statistics);
- }
- processQueryResult(benchFileName, infoMap, writer, index, queryResult);
- }
- resultMap.put(benchFileName, buildQueryTimeMetric(infoMap));
- return resultMap;
- }
-
- private Map<String, String> buildQueryTimeMetric(Map<String, String> infoMap) {
- if (StringUtil.isNotEmpty(infoMap)) {
- double[] values = infoMap.values().stream().mapToDouble(value -> Double.valueOf(value)).sorted().toArray();
- infoMap.put("Min", String.valueOf(CommonUtil.round(StatUtils.min(values),2)));
- infoMap.put("Mean", String.valueOf(CommonUtil.round(StatUtils.mean(values),2)));
- infoMap.put("Median", String.valueOf(CommonUtil.round(StatUtils.percentile(values, 50),2)));
- infoMap.put("P95", String.valueOf(CommonUtil.round(StatUtils.percentile(values, 95),2)));
- infoMap.put("P99", String.valueOf(CommonUtil.round(StatUtils.percentile(values, 99),2)));
- infoMap.put("MAX", String.valueOf(CommonUtil.round(StatUtils.max(values),2)));
- }
- return infoMap;
- }
-
- private void processQueryResult(String name, Map<String, String> infoMap, FileWriter writer, int index, BaseResult queryResult) {
- int statusCode = Integer.parseInt(queryResult.getStatus().toString());
- if (statusCode == ResultStatusEnum.SUCCESS.getCode()) {
- if (writer != null) {
- writer.append("--Query" + index + "\t" + "Elapsed(ms):" + queryResult.getStatistics().get("elapsed") + "\n");
- writer.append(queryResult.getMeta() + "\n");
- writer.append(queryResult.getData() + "\n");
- }
- infoMap.put("Query" + index, queryResult.getStatistics().get("elapsed") + "");
- } else if (statusCode == ResultStatusEnum.GATEWAY_TIMEOUT.getCode() || statusCode == ResultStatusEnum.BAD_GATEWAY.getCode()) {
- infoMap.put("Query" + index, "status:" + queryResult.getStatus() + ",message:" + queryResult.getMessage());
-
- } else {
- throw new QGWBusinessException(statusCode, QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getMessage(),name
- + ",Query" + index + "," + queryResult.getStatus() + "," + queryResult.getMessage()));
- }
- }
-
- private QueryProfile queryBuild(String sql) {
- return QueryProfile.builder().format(QueryFormatEnum.JSON.getValue()).query(sql).build();
- }
private Map<String, String> getPocSqlByDatasource(Optional<File> file, Integer queryNo) {
Map<String, String> infoMap = Maps.newLinkedHashMap();
@@ -694,91 +168,6 @@ DiagnosisServiceImpl implements DiagnosisService, EnvironmentAware {
/**
- * 对指定数据源dbName下的schema 与数据库的字段进行对比检查
- *
- * @param dbName
- * @param tables
- * @return
- */
- private Map<String, Map<String, String>> checkMetadataByDatasource(String dbName, List<String> tables) {
-
- Map<String, Map<String, String>> resultMap = Maps.newLinkedHashMap();
- Map<String, String> infoMap = Maps.newHashMap();
- for (String tableName : tables) {
- Schema schema = metadataService.getSchemaByName(tableName);
- if (!dbName.equals(schema.getNamespace())) continue;
- BaseResult baseResult = getBaseResultByEngine(QueryProfile.builder().option(QueryOptionEnum.REAL_TIME.getValue()).query("describe " + tableName).build());
- if (ResultStatusEnum.SUCCESS.getCode() != baseResult.getStatus()) {
- log.error("The Table[" + tableName + "] Structure query failed" + baseResult.getMessage());
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),
- QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getMessage()," The Table[" + tableName + "] Structure query failed"));
- }
- List<Map<String, Object>> dbFields = JsonPath.read(JSON.toJSONString(baseResult.getData()), "$.fields.*");
-
- Map<String, Object> dbFieldMap = Maps.newHashMap();
- dbFields.stream().forEach(objectMap -> {
- dbFieldMap.put(objectMap.get("name").toString(), objectMap.get("type"));
- });
- List<Map<String, Object>> schemaFields = JsonPath.read(schema.toString(), "$.fields.*");
- Map<String, Object> schemaFieldMap = Maps.newHashMap();
- schemaFields.stream().forEach(objectMap -> {
- schemaFieldMap.put(objectMap.get("name").toString(), objectMap.get("type"));
- });
-
- MapDifference<String, Object> difference = Maps.difference(schemaFieldMap, dbFieldMap);
-
- Map<String, Object> entriesOnlyOnLeft = difference.entriesOnlyOnLeft();
-
- if (entriesOnlyOnLeft.size() > 0) {
- log.error("Schema Fields greater than Table Fields,Schema " + tableName + " exist fields :" + entriesOnlyOnLeft);
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),
- QGWErrorCode.SQL_QUERY_FEDERATION_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_QUERY_FEDERATION_EXCEPTION.getMessage(),
- " Schema Fields greater than Table Fields,Schema " + tableName + " exist fields " + entriesOnlyOnLeft));
-
- }
- Map<String, Object> entriesOnlyOnRight = difference.entriesOnlyOnRight();
- if (entriesOnlyOnRight.size() > 0) {
- log.warn("键只存在于右边Map的映射项:" + entriesOnlyOnRight);
- infoMap.put(tableName, entriesOnlyOnRight.toString());
- }
-
- }
- resultMap.put(METADATA_PREFIX + dbName, infoMap);
- return resultMap;
- }
-
- private void checkCKIndexKey(String dbName, List<String> tables) {
- for (String tableName : tables) {
- Schema schema = metadataService.getSchemaByName(tableName);
- if (!dbName.equals(schema.getNamespace())) continue;
- BaseResult indexKeyResult = getBaseResultByEngine(QueryProfile.builder().option(QueryOptionEnum.REAL_TIME.getValue()).query(String.format(Objects.requireNonNull(env.getProperty("TABLE_INDEX_KEY")), tableName, clickHouseHttpSource.getDbName())).build());
- if (ResultStatusEnum.SUCCESS.getCode() != indexKeyResult.getStatus()) {
- log.error("The Table[" + tableName + "] index key query failed" + indexKeyResult.getMessage());
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),
- QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_EXECUTION_SERVER_EXCEPTION.getMessage(), " The Table[" + tableName + "] index key query failed"));
- }
- List<Map<String, Object>> dataList = (List<Map<String, Object>>) indexKeyResult.getData();
- List<String> dbIndexKey = StringUtil.isEmpty(dataList.get(0)) ? Lists.newArrayList() : (List<String>) dataList.get(0).get("index_key");
- List<String> schemaIndexKey = metadataService.getIndexKey(tableName);
- if (schemaIndexKey.equals(dbIndexKey)) {
- continue;
- }
- log.error("{} schema index key inconsistent with DB, schema: {}, db: {}", tableName, schemaIndexKey, dbIndexKey);
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),
- QGWErrorCode.SCHEMA_WITCH_DB_INCONSISTENTY.getCode(),
- String.format(QGWErrorCode.SCHEMA_WITCH_DB_INCONSISTENTY.getMessage(),
- String.format("%s schema index key is: %s, db index key is: %s", tableName, schemaIndexKey, dbIndexKey)));
- }
- }
- private BaseResult getBaseResultByEngine(QueryProfile queryProfile) {
- return queryService.executeQuery(queryProfile);
- }
-
-
- /**
* 对指定的数据源dbName下的schema进行静态检查
*
* @param dbName
@@ -789,9 +178,8 @@ DiagnosisServiceImpl implements DiagnosisService, EnvironmentAware {
Map<String, Map<String, String>> resultMap = Maps.newLinkedHashMap();
Map<String, String> infoMap = Maps.newHashMap();
for (String tableName : tables) {
- Schema schema = metadataService.getSchemaByName(tableName);
- Map schemaMap = JSON.parseObject(schema.toString(), Map.class);
- if (!dbName.equals(schema.getNamespace())) continue;
+ LinkedHashMap<String, Object> schemaMap = databaseService.getSchemaByName(tableName);
+ if (!dbName.equals(schemaMap.get("namespace"))) continue;
List<Map<String, Object>> fields = (List<Map<String, Object>>) schemaMap.get("fields");
infoMap.put(tableName, fields.size() + " Fields");
}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/DslServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/DslServiceImpl.java
index 294b4029..0f61db36 100644
--- a/src/main/java/com/mesalab/qgw/service/impl/DslServiceImpl.java
+++ b/src/main/java/com/mesalab/qgw/service/impl/DslServiceImpl.java
@@ -11,32 +11,43 @@ import com.geedgenetworks.utils.StringUtil;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
+import com.mesalab.cn.enums.RangeTypeEnum;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.OutputMode;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.enums.QueryOption;
import com.mesalab.common.exception.BusinessException;
import com.mesalab.common.utils.TreeUtils;
-import com.mesalab.network.common.Constants;
-import com.mesalab.network.model.protocol.ProtocolTree;
-import com.mesalab.qgw.model.basic.DSLProfile;
-import com.mesalab.qgw.model.basic.QueryProfile;
-import com.mesalab.qgw.service.DslService;
-import com.mesalab.qgw.service.MetadataService;
-import com.mesalab.qgw.service.QueryService;
+import com.mesalab.knowledge.entity.*;
+import com.mesalab.knowledge.enums.MatchEnum;
+import com.mesalab.knowledge.enums.SortEnum;
+import com.mesalab.knowledge.service.KnowledgeService;
+import com.mesalab.qgw.constant.dsl.LiveChartConstants;
+import com.mesalab.qgw.model.dsl.LiveChartProtocol;
+import com.mesalab.qgw.constant.DslIdentifierNameConst;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.DSLQueryContext;
+import com.mesalab.qgw.model.basic.SQLQueryContext;
+import com.mesalab.qgw.service.DSLService;
+import com.mesalab.qgw.service.DatabaseService;
+import com.mesalab.qgw.service.SQLSyncQueryService;
import com.mesalab.services.common.property.SqlPropertySourceFactory;
import net.sf.jsqlparser.JSQLParserException;
import net.sf.jsqlparser.expression.Expression;
import net.sf.jsqlparser.expression.ExpressionVisitorAdapter;
+import net.sf.jsqlparser.expression.LongValue;
import net.sf.jsqlparser.expression.StringValue;
-import net.sf.jsqlparser.expression.operators.relational.ExpressionList;
-import net.sf.jsqlparser.expression.operators.relational.InExpression;
-import net.sf.jsqlparser.expression.operators.relational.ItemsList;
+import net.sf.jsqlparser.expression.operators.relational.*;
import net.sf.jsqlparser.parser.CCJSqlParserUtil;
+import net.sf.jsqlparser.statement.Statement;
+import net.sf.jsqlparser.statement.select.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
+import org.springframework.util.CollectionUtils;
+import org.springframework.web.bind.annotation.RequestBody;
import java.math.BigDecimal;
import java.util.*;
@@ -51,61 +62,80 @@ import java.util.stream.Collectors;
*/
@Service
@PropertySource(value = "classpath:dsl-sql-template.sql", factory = SqlPropertySourceFactory.class)
-public class DslServiceImpl implements DslService {
+public class DslServiceImpl implements DSLService {
private static final Log log = LogFactory.get();
@Autowired
Environment environment;
@Autowired
- MetadataService metadataService;
+ DatabaseService databaseService;
@Autowired
- QueryService queryService;
+ SQLSyncQueryService sqlSyncQueryService;
+ @Autowired
+ KnowledgeService knowledgeService;
private static final String TABLE_TRAFFIC_GENERAL_STAT = "traffic_general_stat";
private static final String TABLE_APPLICATION_PROTOCOL_STAT = "application_protocol_stat";
private static final String TABLE_SESSION_RECORD = "session_record";
- private static final String TABLE_RELATION_ACCOUNT_FRAMEDIP = "relation_account_framedip";
- private static final String TABLE_GTPC_KNOWLEDGE_BASE = "gtpc_knowledge_base";
+
private static final String LOGICAL_TYPE_TIMESTAMP = "timestamp";
private static final String LOGICAL_TYPE_UNIX_TIMESTAMP = "unix_timestamp";
+ @Override
+ public BaseResult execDsl(@RequestBody DSLQueryContext dslProfile, boolean isDryRun) {
+ log.info("HTTP REST DSL, params is: {}", dslProfile);
+ if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_SUMMARY.equals(dslProfile.getName())) {
+ return appAndProtocolSummary(dslProfile, isDryRun);
+ } else if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_TREE_COMPOSITION.equals(dslProfile.getName())) {
+ return appAndProtocolTreeComposition(dslProfile, isDryRun);
+ } else if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_TREE_THROUGHPUT.equals(dslProfile.getName())) {
+ return applicationAndProtocolTreeThroughput(dslProfile, isDryRun);
+ } else if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_TOP_APPS.equals(dslProfile.getName())) {
+ return applicationAndProtocolTopApp(dslProfile, isDryRun);
+ } else if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_APP_RELATED_INTERNAL_IPS.equals(dslProfile.getName())) {
+ return applicationAndProtocolAppRelatedInternalIps(dslProfile, isDryRun);
+ } else if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_APP_THROUGHPUT.equals(dslProfile.getName())) {
+ return applicationAndProtocolAppThroughput(dslProfile, isDryRun);
+ } else if (DslIdentifierNameConst.APPLICATION_AND_PROTOCOL_APP_SUMMARY.equals(dslProfile.getName())) {
+ return applicationAndProtocolAppSummary(dslProfile, isDryRun);
+ } else if (DslIdentifierNameConst.IP_LEARNING_FQDN_RELATE_IP.equals(dslProfile.getName())) {
+ return ipLearningFqdnRelateIp(dslProfile, isDryRun);
+ } else if (DslIdentifierNameConst.IP_LEARNING_ACTIVE_IP.equals(dslProfile.getName())) {
+ return ipLearningActiveIp(dslProfile, isDryRun);
+ }
+ throw new BusinessException(HttpStatusCodeEnum.NOT_FOUND.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), dslProfile.getName()));
+ }
@Override
- public BaseResult appAndProtocolSummary(DSLProfile dslProfile) {
- BaseResult asymmetricFlowsResult = queryService.executeQuery(
- QueryProfile.builder().query(dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_SUMMARY_ASYMMETRIC_FLOWS_STAT"), TABLE_TRAFFIC_GENERAL_STAT, metadataService.getPartitionKey(TABLE_TRAFFIC_GENERAL_STAT), LOGICAL_TYPE_TIMESTAMP)).build());
+ public BaseResult appAndProtocolSummary(DSLQueryContext dslProfile, boolean isDryRun) {
+ String queryOption = isDryRun ? QueryOption.SYNTAX_VALIDATION.getValue() : QueryOption.REAL_TIME.getValue();
+ BaseResult asymmetricFlowsResult = sqlSyncQueryService.executeQuery(
+ SQLQueryContext.builder().originalSQL(dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_SUMMARY_ASYMMETRIC_FLOWS_STAT"), TABLE_TRAFFIC_GENERAL_STAT, databaseService.getPartitionKey(TABLE_TRAFFIC_GENERAL_STAT), LOGICAL_TYPE_TIMESTAMP)).option(queryOption).build());
if (!asymmetricFlowsResult.isSuccess()) {
- throw new BusinessException(asymmetricFlowsResult.getStatus(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), asymmetricFlowsResult.getMessage()));
+ throw new BusinessException(asymmetricFlowsResult.getStatus(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), asymmetricFlowsResult.getMessage()));
}
- BaseResult networkTrafficStatResult = queryService.executeQuery(
- QueryProfile.builder().query(dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_SUMMARY_TRAFFIC_STAT"), TABLE_APPLICATION_PROTOCOL_STAT, metadataService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP)).build());
+ BaseResult networkTrafficStatResult = sqlSyncQueryService.executeQuery(
+ SQLQueryContext.builder().originalSQL(dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_SUMMARY_TRAFFIC_STAT"), TABLE_APPLICATION_PROTOCOL_STAT, databaseService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP)).option(queryOption).build());
if (!networkTrafficStatResult.isSuccess()) {
- throw new BusinessException(networkTrafficStatResult.getStatus(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), networkTrafficStatResult.getMessage()));
+ throw new BusinessException(networkTrafficStatResult.getStatus(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), networkTrafficStatResult.getMessage()));
}
- BaseResult networkUniqueClientIpStatResult = queryService.executeQuery(
- QueryProfile.builder().query(dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_SUMMARY_UNIQUE_CLIENT_IP_STAT"), TABLE_APPLICATION_PROTOCOL_STAT, metadataService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP)).build());
- if (!networkUniqueClientIpStatResult.isSuccess()) {
- throw new BusinessException(networkUniqueClientIpStatResult.getStatus(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), networkUniqueClientIpStatResult.getMessage()));
- }
-
- BaseResult networkTcpStatResult = queryService.executeQuery(
- QueryProfile.builder().query(dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_SUMMARY_TCP_STAT"), TABLE_APPLICATION_PROTOCOL_STAT, metadataService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP)).build());
+ BaseResult networkTcpStatResult = sqlSyncQueryService.executeQuery(
+ SQLQueryContext.builder().originalSQL(dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_SUMMARY_TCP_STAT"), TABLE_APPLICATION_PROTOCOL_STAT, databaseService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP)).option(queryOption).build());
if (!networkTcpStatResult.isSuccess()) {
- throw new BusinessException(networkTcpStatResult.getStatus(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), networkTcpStatResult.getMessage()));
+ throw new BusinessException(networkTcpStatResult.getStatus(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), networkTcpStatResult.getMessage()));
}
- BaseResult networkAppStatResult = queryService.executeQuery(
- QueryProfile.builder().query(dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_SUMMARY_APP_STAT"), TABLE_APPLICATION_PROTOCOL_STAT, metadataService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP)).build());
+ BaseResult networkAppStatResult = sqlSyncQueryService.executeQuery(
+ SQLQueryContext.builder().originalSQL(dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_SUMMARY_APP_STAT"), TABLE_APPLICATION_PROTOCOL_STAT, databaseService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP)).option(queryOption).build());
if (!networkAppStatResult.isSuccess()) {
- throw new BusinessException(networkAppStatResult.getStatus(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), networkAppStatResult.getMessage()));
+ throw new BusinessException(networkAppStatResult.getStatus(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), networkAppStatResult.getMessage()));
}
List<Map> asymmetricFlowsData = (List<Map>) asymmetricFlowsResult.getData();
List<Map> trafficStatData = (List<Map>) networkTrafficStatResult.getData();
- List<Map> uniqueClientIpStatData = (List<Map>) networkUniqueClientIpStatResult.getData();
List<Map> tcpData = (List<Map>) networkTcpStatResult.getData();
List<Map> appData = (List<Map>) networkAppStatResult.getData();
@@ -117,9 +147,6 @@ public class DslServiceImpl implements DslService {
if (!trafficStatData.isEmpty()) {
data.putAll(trafficStatData.get(0));
}
- if (!uniqueClientIpStatData.isEmpty()) {
- data.putAll(uniqueClientIpStatData.get(0));
- }
if (!tcpData.isEmpty()) {
data.putAll(tcpData.get(0));
}
@@ -128,94 +155,89 @@ public class DslServiceImpl implements DslService {
}
long asymmetricFlows = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_ASYMMETRIC_FLOWS_BYTES) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_ASYMMETRIC_FLOWS_BYTES)));
+ data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_ASYMMETRIC_FLOWS_BYTES) == null ? 0 : data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_ASYMMETRIC_FLOWS_BYTES)));
long totalSessionUseOnAsymmetricFlows = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES_USE_ON_ASYMMETRIC_FLOWS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES_USE_ON_ASYMMETRIC_FLOWS)));
+ data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES_USE_ON_ASYMMETRIC_FLOWS) == null ? 0 : data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES_USE_ON_ASYMMETRIC_FLOWS)));
- long uniqueClientIp = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_UNIQ_CLIENT_IP) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_UNIQ_CLIENT_IP)));
long totalSessions = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS)));
+ data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS) == null ? 0 : data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS)));
long totalBytes = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES)));
+ data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES) == null ? 0 : data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES)));
long totalPackets = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_PACKETS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_PACKETS)));
+ data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TOTAL_PACKETS) == null ? 0 : data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TOTAL_PACKETS)));
long fragmentationPackets = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_FRAGMENTATION_PACKETS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_FRAGMENTATION_PACKETS)));
+ data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_FRAGMENTATION_PACKETS) == null ? 0 : data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_FRAGMENTATION_PACKETS)));
long dataRate = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_DATA_RATE) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_DATA_RATE)));
+ data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_DATA_RATE) == null ? 0 : data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_DATA_RATE)));
long tcpRetransmissionsPackets = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_TCP_RETRANSMISSIONS_PACKETS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TCP_RETRANSMISSIONS_PACKETS)));
+ data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TCP_RETRANSMISSIONS_PACKETS) == null ? 0 : data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TCP_RETRANSMISSIONS_PACKETS)));
long tcpTotalPackets = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_TCP_TOTAL_PACKETS) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_TCP_TOTAL_PACKETS)));
+ data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TCP_TOTAL_PACKETS) == null ? 0 : data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TCP_TOTAL_PACKETS)));
long unknownAppBytes = Long.parseLong(String.valueOf(
- data.get(Constants.NETWORK_OVERVIEW_METRIC_UNKNOWN_APP_BYTES) == null ? 0 : data.get(Constants.NETWORK_OVERVIEW_METRIC_UNKNOWN_APP_BYTES)));
-
+ data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_UNKNOWN_APP_BYTES) == null ? 0 : data.get(LiveChartConstants.NETWORK_OVERVIEW_METRIC_UNKNOWN_APP_BYTES)));
- String unknownAppPercent = totalBytes == 0 ? String.valueOf(0)
- : NumberUtil.div(BigDecimal.valueOf(unknownAppBytes), BigDecimal.valueOf(totalBytes), 4).toPlainString();
- String asymmetricFlowsPercent = totalSessionUseOnAsymmetricFlows == 0 ? String.valueOf(0)
- : NumberUtil.div(BigDecimal.valueOf(asymmetricFlows), BigDecimal.valueOf(totalSessionUseOnAsymmetricFlows), 4).toPlainString();
- String tcpRetransmissionsPercent = tcpTotalPackets == 0 ? String.valueOf(0)
- : NumberUtil.div(BigDecimal.valueOf(tcpRetransmissionsPackets), BigDecimal.valueOf(tcpTotalPackets), 4).toPlainString();
- String fragmentationPacketsPercent = totalPackets == 0 ? String.valueOf(0)
- : NumberUtil.div(BigDecimal.valueOf(fragmentationPackets), BigDecimal.valueOf(totalPackets), 4).toPlainString();
+ double unknownAppPercent = totalBytes == 0 ? 0.0
+ : NumberUtil.div(BigDecimal.valueOf(unknownAppBytes), BigDecimal.valueOf(totalBytes), 4).doubleValue();
+ double asymmetricFlowsPercent = totalSessionUseOnAsymmetricFlows == 0 ? 0.0
+ : NumberUtil.div(BigDecimal.valueOf(asymmetricFlows), BigDecimal.valueOf(totalSessionUseOnAsymmetricFlows), 4).doubleValue();
+ double tcpRetransmissionsPercent = tcpTotalPackets == 0 ? 0.0
+ : NumberUtil.div(BigDecimal.valueOf(tcpRetransmissionsPackets), BigDecimal.valueOf(tcpTotalPackets), 4).doubleValue();
+ double fragmentationPacketsPercent = totalPackets == 0 ? 0.0
+ : NumberUtil.div(BigDecimal.valueOf(fragmentationPackets), BigDecimal.valueOf(totalPackets), 4).doubleValue();
List result = new ArrayList<>();
Map resultMap = new LinkedHashMap<>();
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_UNIQ_CLIENT_IP, uniqueClientIp);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS, totalSessions);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_DATA_RATE, dataRate);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES, totalBytes);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_TOTAL_PACKETS, totalPackets);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_UNKNOWN_APP_BYTES, unknownAppBytes);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_UNKNOWN_APP_PERCENT, unknownAppPercent);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_ASYMMETRIC_FLOWS_BYTES, asymmetricFlows);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_ASYMMETRIC_FLOWS_PERCENT, asymmetricFlowsPercent);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_TCP_RETRANSMISSIONS_PACKETS, tcpRetransmissionsPackets);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_TCP_RETRANSMISSIONS_PERCENT, tcpRetransmissionsPercent);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_FRAGMENTATION_PACKETS, fragmentationPackets);
- resultMap.put(Constants.NETWORK_OVERVIEW_METRIC_FRAGMENTATION_PERCENT, fragmentationPacketsPercent);
+ resultMap.put(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TOTAL_SESSIONS, totalSessions);
+ resultMap.put(LiveChartConstants.NETWORK_OVERVIEW_METRIC_DATA_RATE, dataRate);
+ resultMap.put(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TOTAL_BYTES, totalBytes);
+ resultMap.put(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TOTAL_PACKETS, totalPackets);
+ resultMap.put(LiveChartConstants.NETWORK_OVERVIEW_METRIC_UNKNOWN_APP_BYTES, unknownAppBytes);
+ resultMap.put(LiveChartConstants.NETWORK_OVERVIEW_METRIC_UNKNOWN_APP_PERCENT, unknownAppPercent);
+ resultMap.put(LiveChartConstants.NETWORK_OVERVIEW_METRIC_ASYMMETRIC_FLOWS_BYTES, asymmetricFlows);
+ resultMap.put(LiveChartConstants.NETWORK_OVERVIEW_METRIC_ASYMMETRIC_FLOWS_PERCENT, asymmetricFlowsPercent);
+ resultMap.put(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TCP_RETRANSMISSIONS_PACKETS, tcpRetransmissionsPackets);
+ resultMap.put(LiveChartConstants.NETWORK_OVERVIEW_METRIC_TCP_RETRANSMISSIONS_PERCENT, tcpRetransmissionsPercent);
+ resultMap.put(LiveChartConstants.NETWORK_OVERVIEW_METRIC_FRAGMENTATION_PACKETS, fragmentationPackets);
+ resultMap.put(LiveChartConstants.NETWORK_OVERVIEW_METRIC_FRAGMENTATION_PERCENT, fragmentationPacketsPercent);
result.add(resultMap);
Map statistics = asymmetricFlowsResult.getStatistics();
Map networkTrafficStatistics = networkTrafficStatResult.getStatistics();
- Map networkUniqueClientIpStatistics = networkUniqueClientIpStatResult.getStatistics();
Map networkTcpStatistics = networkTcpStatResult.getStatistics();
Map networkAppStatistics = networkAppStatResult.getStatistics();
for (Object key : statistics.keySet()) {
if ("result_rows".equals(key.toString())) {
continue;
}
- statistics.put(key, Long.parseLong(networkTrafficStatistics.get(key).toString()) + Long.parseLong(networkUniqueClientIpStatistics.get(key).toString()) + Long.parseLong(networkTcpStatistics.get(key).toString()) + Long.parseLong(networkAppStatistics.get(key).toString()));
+ statistics.put(key, Long.parseLong(networkTrafficStatistics.get(key).toString()) + Long.parseLong(networkTcpStatistics.get(key).toString()) + Long.parseLong(networkAppStatistics.get(key).toString()));
}
return BaseResultGenerator.success("ok", result, statistics);
}
@Override
- public BaseResult appAndProtocolTreeComposition(DSLProfile dslProfile) {
- String sql = dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_TREE_COMPOSITION"), TABLE_APPLICATION_PROTOCOL_STAT, metadataService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP);
- BaseResult baseResult = queryService.executeQuery(QueryProfile.builder().query(sql).build());
+ public BaseResult appAndProtocolTreeComposition(DSLQueryContext dslProfile, boolean isDryRun) {
+ String sql = dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_TREE_COMPOSITION"), TABLE_APPLICATION_PROTOCOL_STAT, databaseService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP);
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(queryBuild(sql, isDryRun));
if (!baseResult.isSuccess()) {
- throw new BusinessException(baseResult.getStatus(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
+ throw new BusinessException(baseResult.getStatus(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
}
List<Map> resultList = (List<Map>) baseResult.getData();
if (StringUtil.isEmpty(resultList)) {
return BaseResultGenerator.success("ok", resultList, baseResult.getStatistics());
}
- List<ProtocolTree> protocolTreeList = buildHierarchicalStructure(buildFlatStructure(resultList));
- return BaseResultGenerator.success("ok", protocolTreeList, baseResult.getStatistics());
+ List<LiveChartProtocol> protocols = buildHierarchicalStructure(buildFlatStructure(resultList));
+ return BaseResultGenerator.success("ok", protocols, baseResult.getStatistics());
}
@Override
- public BaseResult applicationAndProtocolTreeThroughput(DSLProfile dslProfile) {
- String sql = dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_TREE_THROUGHPUT"), TABLE_APPLICATION_PROTOCOL_STAT, metadataService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP);
- BaseResult baseResult = queryService.executeQuery(QueryProfile.builder().query(sql).build());
+ public BaseResult applicationAndProtocolTreeThroughput(DSLQueryContext dslProfile, boolean isDryRun) {
+ String sql = dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_TREE_THROUGHPUT"), TABLE_APPLICATION_PROTOCOL_STAT, databaseService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP);
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(queryBuild(sql, isDryRun));
if (!baseResult.isSuccess()) {
- throw new BusinessException(baseResult.getStatus(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
+ throw new BusinessException(baseResult.getStatus(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
}
List<Map<String, Object>> data = (List<Map<String, Object>>) baseResult.getData();
data.forEach(o -> {
@@ -228,87 +250,361 @@ public class DslServiceImpl implements DslService {
}
@Override
- public BaseResult applicationAndProtocolTopApp(DSLProfile dslProfile) {
+ public BaseResult applicationAndProtocolTopApp(DSLQueryContext dslProfile, boolean isDryRun) {
if (StrUtil.isEmpty(dslProfile.getGranularity())) {
dslProfile.setGranularity("PT15S");
}
- String sql = dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_TOP_APPS"), TABLE_APPLICATION_PROTOCOL_STAT, metadataService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP);
- BaseResult baseResult = queryService.executeQuery(QueryProfile.builder().query(sql).build());
+ String sql = dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_TOP_APPS"), TABLE_APPLICATION_PROTOCOL_STAT, databaseService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP);
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(queryBuild(sql, isDryRun));
if (!baseResult.isSuccess()) {
- throw new BusinessException(baseResult.getStatus(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
+ throw new BusinessException(baseResult.getStatus(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
}
return baseResult;
}
@Override
- public BaseResult applicationAndProtocolAppRelatedInternalIps(DSLProfile dslProfile) {
- String sql = dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_APP_RELATED_INTERNAL_IPS"), TABLE_SESSION_RECORD, metadataService.getPartitionKey(TABLE_SESSION_RECORD), LOGICAL_TYPE_UNIX_TIMESTAMP);
- BaseResult baseResult = queryService.executeQuery(QueryProfile.builder().query(sql).build());
+ public BaseResult applicationAndProtocolAppRelatedInternalIps(DSLQueryContext dslProfile, boolean isDryRun) {
+ String sql = dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_APP_RELATED_INTERNAL_IPS"), TABLE_SESSION_RECORD, databaseService.getPartitionKey(TABLE_SESSION_RECORD), LOGICAL_TYPE_UNIX_TIMESTAMP);
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(queryBuild(sql, isDryRun));
if (!baseResult.isSuccess()) {
- throw new BusinessException(baseResult.getStatus(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
+ throw new BusinessException(baseResult.getStatus(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
}
return baseResult;
}
@Override
- public BaseResult applicationAndProtocolAppThroughput(DSLProfile dslProfile) {
- String sql = dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_APP_THROUGHPUT"), TABLE_APPLICATION_PROTOCOL_STAT, metadataService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP);
- BaseResult baseResult = queryService.executeQuery(QueryProfile.builder().query(sql).build());
+ public BaseResult applicationAndProtocolAppThroughput(DSLQueryContext dslProfile, boolean isDryRun) {
+ String sql = dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_APP_THROUGHPUT"), TABLE_APPLICATION_PROTOCOL_STAT, databaseService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP);
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(queryBuild(sql, isDryRun));
if (!baseResult.isSuccess()) {
- throw new BusinessException(baseResult.getStatus(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
+ throw new BusinessException(baseResult.getStatus(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
}
return baseResult;
}
@Override
- public BaseResult applicationAndProtocolAppSummary(DSLProfile dslProfile) {
- String sql = dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_APP_SUMMARY"), TABLE_APPLICATION_PROTOCOL_STAT, metadataService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP);
- BaseResult baseResult = queryService.executeQuery(QueryProfile.builder().query(sql).build());
+ public BaseResult applicationAndProtocolAppSummary(DSLQueryContext dslProfile, boolean isDryRun) {
+ String sql = dslProfile.toSql(environment.getProperty("APPLICATION_AND_PROTOCOL_APP_SUMMARY"), TABLE_APPLICATION_PROTOCOL_STAT, databaseService.getPartitionKey(TABLE_APPLICATION_PROTOCOL_STAT), LOGICAL_TYPE_TIMESTAMP);
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(queryBuild(sql, isDryRun));
if (!baseResult.isSuccess()) {
- throw new BusinessException(baseResult.getStatus(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
+ throw new BusinessException(baseResult.getStatus(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
}
return baseResult;
}
@Override
- public BaseResult realTimeDataAnalyticsSubscriberIdRelateIp(DSLProfile dslProfile) {
+ public BaseResult ipLearningFqdnRelateIp(DSLQueryContext dslProfile, boolean isDryRun) {
+ String queryType = "iplearning";
+ String dataSource = "IP_LEARNING_VIEW";
+ DSLObject dslObject = getDslObject(dslProfile, queryType, dataSource);
+ log.info("DSLProfile parse to DSLObject: {}", JSON.toJSONString(dslObject));
+ return knowledgeService.query(dslObject);
+ }
+
+ @Override
+ public BaseResult ipLearningActiveIp(DSLQueryContext dslProfile, boolean isDryRun) {
+ String queryType = "ippool";
+ String dataSource = "IP_VIEW";
+ DSLObject dslObject = getDslObject(dslProfile, queryType, dataSource);
+ log.info("DSLProfile parse to DSLObject: {}", JSON.toJSONString(dslObject));
+ return knowledgeService.query(dslObject);
+ }
+
+ private static DSLObject getDslObject(DSLQueryContext dslProfile, String queryType, String dataSource) {
+ Parameters parameters = new Parameters();
+ List<String> intervals = dslProfile.getIntervals();
+ if (!CollectionUtils.isEmpty(intervals)) {
+ parameters.setIntervals(intervals);
+ }
+ String limit = dslProfile.getLimit();
+ if (StrUtil.isNotEmpty(limit)) {
+ parameters.setLimit(limit);
+ }
+ List<Match> matches = Lists.newArrayList();
+ List<Range> ranges = Lists.newArrayList();
+ ExpressionVisitorAdapter expressionVisitorAdapter = getExpressionVisitorAdapter(matches, ranges);
String filter = dslProfile.getFilter();
- if (StrUtil.isNotEmpty(filter) && filter.contains("subscriber_id")) {
- try {
- dslProfile.setFilter(inToLike(filter));
- } catch (JSQLParserException e) {
- log.error("InExpression to LikeExpression error: {}", e.getMessage());
- throw new BusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), e.getMessage()));
+ Expression expression = null;
+ try {
+ expression = CCJSqlParserUtil.parseExpression(filter, false);
+ } catch (JSQLParserException e) {
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), e.getMessage()));
+ }
+ expression.accept(expressionVisitorAdapter);
+ Iterator<Range> iterator = ranges.iterator();
+ while (iterator.hasNext()) {
+ Range next = iterator.next();
+ if (next.getFieldKey().equals("1") && next.getType().equals(RangeTypeEnum.EQ.getType())) {
+ iterator.remove();
}
}
- String sql = dslProfile.toSql(environment.getProperty("REAL_TIME_DATA_ANALYTICS_SUBSCRIBER_ID_RELATE_IP"), TABLE_RELATION_ACCOUNT_FRAMEDIP, "last_update_time", LOGICAL_TYPE_UNIX_TIMESTAMP);
- BaseResult baseResult = queryService.executeQuery(QueryProfile.builder().query(sql).build());
- if (!baseResult.isSuccess()) {
- throw new BusinessException(baseResult.getStatus(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
+ parameters.setMatch(matches);
+ parameters.setRange(ranges);
+
+ List<Sort> sorts = Lists.newArrayList();
+ if (StrUtil.isNotEmpty(dslProfile.getOrderBy())) {
+ buildSorts(dslProfile.getOrderBy(), sorts);
}
- return baseResult;
+ parameters.setSort(sorts);
+ DSLObject dslObject = new DSLObject();
+ DSLObject.DSLQuery dslQuery = new DSLObject.DSLQuery();
+ dslQuery.setQueryType(queryType);
+ dslQuery.setDataSource(dataSource);
+ dslQuery.setParameters(parameters);
+ dslObject.setQuery(dslQuery);
+
+ return dslObject;
}
- @Override
- public BaseResult realTimeDataAnalyticsMobileIdentityRelateTeid(DSLProfile dslProfile) {
- return BaseResultGenerator.failure();
+ private static void buildSorts(String orderByStr, List<Sort> sorts) {
+ try {
+ Statement parse = CCJSqlParserUtil.parse("select 1 from table order by " + orderByStr);
+ if (parse instanceof Select && ((Select) parse).getSelectBody() instanceof PlainSelect) {
+ List<OrderByElement> orderByElements = ((PlainSelect) ((Select) parse).getSelectBody()).getOrderByElements();
+ for (int i = 0; i < orderByElements.size(); i++) {
+ OrderByElement orderByElement = orderByElements.get(i);
+ Sort sort = new Sort();
+ sort.setFieldKey(orderByElement.getExpression().toString());
+ sort.setType(orderByElement.isAsc() ? SortEnum.ASC.getType() : SortEnum.DESC.getType());
+ sorts.add(sort);
+ }
+ }
+ } catch (JSQLParserException e) {
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), e.getMessage()));
+ }
+ }
+
+ private static ExpressionVisitorAdapter getExpressionVisitorAdapter(List<Match> matches, List<Range> ranges) {
+ ExpressionVisitorAdapter expressionVisitorAdapter = new ExpressionVisitorAdapter() {
+ @Override
+ public void visit(LikeExpression expr) {
+ String left = String.valueOf(expr.getLeftExpression());
+ String right = ((StringValue) expr.getRightExpression()).getValue();
+ String type = "";
+ if (right.startsWith("%") && right.endsWith("%")) {
+ type = MatchEnum.SUBSTRING.getType();
+ right = right.substring(1, right.length() - 1);
+ } else if (right.startsWith("%")) {
+ type = MatchEnum.SUFFIX.getType();
+ right = right.substring(1);
+ } else if (right.endsWith("%")) {
+ type = MatchEnum.PREFIX.getType();
+ right = right.substring(0, right.length() - 1);
+ } else {
+ type = MatchEnum.EXACTLY.getType();
+ }
+
+ String finalType = type;
+ if (matches.stream().anyMatch(o -> o.getFieldKey().equals(left) && o.getType().equals(finalType))) {
+ for (Match match : matches) {
+ if (match.getFieldKey().equals(left) && match.getType().equals(type)) {
+ match.getFieldValues().add(right);
+ }
+ }
+ } else {
+ Match match = new Match();
+ match.setFieldKey(left);
+ match.setType(type);
+ match.setFieldValues(Lists.newArrayList(right));
+ matches.add(match);
+ }
+ }
+
+ @Override
+ public void visit(EqualsTo expr) {
+ String left = String.valueOf(expr.getLeftExpression());
+ Object right;
+ if (expr.getRightExpression() instanceof LongValue) {
+ right = ((LongValue) expr.getRightExpression()).getValue();
+ } else {
+ right = ((StringValue) expr.getRightExpression()).getValue();
+ }
+ if (ranges.stream().anyMatch(o -> o.getFieldKey().equals(left) && o.getType().equals(RangeTypeEnum.EQ.getType()))) {
+ for (Range range : ranges) {
+ if (range.getFieldKey().equals(left) && range.getType().equals(RangeTypeEnum.EQ.getType())) {
+ range.getFieldValues().add(right);
+ }
+ }
+ } else {
+ Range range = new Range();
+ range.setFieldKey(left);
+ range.setType(RangeTypeEnum.EQ.getType());
+ range.setFieldValues(Lists.newArrayList(right));
+ ranges.add(range);
+ }
+ }
+
+ @Override
+ public void visit(InExpression expr) {
+ ItemsList rightItemsList = expr.getRightItemsList();
+ if (rightItemsList instanceof ExpressionList && ((ExpressionList) rightItemsList).getExpressions().get(0) instanceof LongValue) {
+ intParseIn(expr);
+ } else {
+ strParseIn(expr);
+ }
+ }
+
+ private void intParseIn(InExpression expr) {
+ String left = String.valueOf(expr.getLeftExpression());
+ if (ranges.stream().anyMatch(o -> o.getFieldKey().equals(left) && o.getType().equals(RangeTypeEnum.EQ.getType()))) {
+ for (Range range : ranges) {
+ if (range.getFieldKey().equals(left) && range.getType().equals(RangeTypeEnum.EQ.getType())) {
+ ItemsList rightItemsList = expr.getRightItemsList();
+ rightItemsList.accept(new ItemsListVisitorAdapter() {
+ @Override
+ public void visit(ExpressionList expressionList) {
+ for (Expression expression : expressionList.getExpressions()) {
+ if (expression instanceof StringValue) {
+ range.getFieldValues().add(((StringValue) expression).getValue());
+ } else if (expression instanceof LongValue) {
+ range.getFieldValues().add(((LongValue) expression).getValue());
+ }
+
+ }
+ }
+
+ });
+ }
+ }
+ } else {
+ Range range = new Range();
+ range.setFieldKey(left);
+ range.setFieldValues(Lists.newArrayList());
+ range.setType(RangeTypeEnum.EQ.getType());
+ ItemsList rightItemsList = expr.getRightItemsList();
+ rightItemsList.accept(new ItemsListVisitorAdapter() {
+ @Override
+ public void visit(ExpressionList expressionList) {
+ for (Expression expression : expressionList.getExpressions()) {
+ if (expression instanceof StringValue) {
+ range.getFieldValues().add(((StringValue) expression).getValue());
+ } else if (expression instanceof LongValue) {
+ range.getFieldValues().add(((LongValue) expression).getValue());
+ }
+
+ }
+ }
+ });
+ ranges.add(range);
+ }
+ }
+
+ private void strParseIn(InExpression expr) {
+ String left = String.valueOf(expr.getLeftExpression());
+ if (matches.stream().anyMatch(o -> o.getFieldKey().equals(left) && o.getType().equals(MatchEnum.EXACTLY.getType()))) {
+ for (Match match : matches) {
+ if (match.getFieldKey().equals(left) && match.getType().equals(MatchEnum.EXACTLY.getType())) {
+ ItemsList rightItemsList = expr.getRightItemsList();
+ rightItemsList.accept(new ItemsListVisitorAdapter() {
+ @Override
+ public void visit(ExpressionList expressionList) {
+ for (Expression expression : expressionList.getExpressions()) {
+ if (expression instanceof StringValue) {
+ match.getFieldValues().add(((StringValue) expression).getValue());
+ } else if (expression instanceof LongValue) {
+ match.getFieldValues().add(((LongValue) expression).getValue());
+ }
+
+ }
+ }
+
+ });
+ }
+ }
+ } else {
+ Match match = new Match();
+ match.setFieldKey(left);
+ match.setFieldValues(Lists.newArrayList());
+ match.setType(MatchEnum.EXACTLY.getType());
+ ItemsList rightItemsList = expr.getRightItemsList();
+ rightItemsList.accept(new ItemsListVisitorAdapter() {
+ @Override
+ public void visit(ExpressionList expressionList) {
+ for (Expression expression : expressionList.getExpressions()) {
+ if (expression instanceof StringValue) {
+ match.getFieldValues().add(((StringValue) expression).getValue());
+ } else if (expression instanceof LongValue) {
+ match.getFieldValues().add(((LongValue) expression).getValue());
+ }
+
+ }
+ }
+ });
+ matches.add(match);
+ }
+ }
+
+ @Override
+ public void visit(GreaterThan expr) {
+ String left = String.valueOf(expr.getLeftExpression());
+ Object right;
+ if (expr.getRightExpression() instanceof LongValue) {
+ right = ((LongValue) expr.getRightExpression()).getValue();
+ } else {
+ right = ((StringValue) expr.getRightExpression()).getValue();
+ }
+ if (ranges.stream().anyMatch(o -> o.getFieldKey().equals(left) && o.getType().equals(RangeTypeEnum.GT.getType()))) {
+ for (Range range : ranges) {
+ if (range.getFieldKey().equals(left) && range.getType().equals(RangeTypeEnum.EQ.getType())) {
+ range.getFieldValues().add(right);
+ }
+ }
+ } else {
+ Range range = new Range();
+ range.setFieldKey(left);
+ range.setType(RangeTypeEnum.GT.getType());
+ range.setFieldValues(Lists.newArrayList(right));
+ ranges.add(range);
+ }
+ }
+
+ @Override
+ public void visit(GreaterThanEquals expr) {
+ String left = String.valueOf(expr.getLeftExpression());
+ Object right;
+ if (expr.getRightExpression() instanceof LongValue) {
+ right = ((LongValue) expr.getRightExpression()).getValue();
+ } else {
+ right = ((StringValue) expr.getRightExpression()).getValue();
+ }
+ if (ranges.stream().anyMatch(o -> o.getFieldKey().equals(left) && o.getType().equals(RangeTypeEnum.EQ.getType()))) {
+ for (Range range : ranges) {
+ if (range.getFieldKey().equals(left) && range.getType().equals(RangeTypeEnum.EQ.getType())) {
+ range.getFieldValues().add(right);
+ }
+ }
+ } else {
+ Range range = new Range();
+ range.setFieldKey(left);
+ range.setType(RangeTypeEnum.EQ.getType());
+ range.setFieldValues(Lists.newArrayList(right));
+ ranges.add(range);
+ }
+ }
+ };
+ return expressionVisitorAdapter;
+ }
+
+ private SQLQueryContext queryBuild(String sql, boolean isDryRun) {
+ String queryOption = isDryRun ? QueryOption.SYNTAX_VALIDATION.getValue() : QueryOption.REAL_TIME.getValue();
+ return SQLQueryContext.builder().format(OutputMode.JSON.getValue()).originalSQL(sql).option(queryOption).build();
}
@Override
- public List<ProtocolTree> buildFlatStructure(List<Map> protocolData) {
- List<ProtocolTree> nodes = Lists.newArrayList();
- for (Map protocolMap : protocolData) {
- ProtocolTree protocolTree = convertStringToObject(JSON.toJSONString(protocolMap, JSONWriter.Feature.LargeObject));
- if (StringUtil.isNotEmpty(protocolTree)) {
- nodes.add(protocolTree);
+ public List<LiveChartProtocol> buildFlatStructure(List<Map> protocols) {
+ List<LiveChartProtocol> nodes = Lists.newArrayList();
+ for (Map protocolMap : protocols) {
+ LiveChartProtocol protocol = convertStringToObject(JSON.toJSONString(protocolMap, JSONWriter.Feature.LargeObject));
+ if (StringUtil.isNotEmpty(protocol)) {
+ nodes.add(protocol);
}
}
- ProtocolTree root = new ProtocolTree(Constants.PROTOCOL_TREE_ROOT_NAME, Constants.PROTOCOL_TREE_ROOT_NAME, null);
- Map<String, Long> protocolsAndApplications = nodes.stream().collect(Collectors.groupingBy(ProtocolTree::getName, Collectors.counting()));
- root.addMetric(Constants.PROTOCOL_TREE_METRIC_ENCAPSULATION_PATHS, nodes.size());
- root.addMetric(Constants.PROTOCOL_TREE_METRIC_PROTOCOLS_AND_APPLICATIONS, protocolsAndApplications.size());
- List<ProtocolTree> roots = nodes.stream().filter(o -> StringUtil.isBlank(o.getParentId())).collect(Collectors.toList());
+ LiveChartProtocol root = new LiveChartProtocol(LiveChartConstants.PROTOCOL_TREE_ROOT_NAME, LiveChartConstants.PROTOCOL_TREE_ROOT_NAME, null);
+ Map<String, Long> protocolsAndApplications = nodes.stream().collect(Collectors.groupingBy(LiveChartProtocol::getName, Collectors.counting()));
+ root.addMetric(LiveChartConstants.PROTOCOL_TREE_METRIC_ENCAPSULATION_PATHS, nodes.size());
+ root.addMetric(LiveChartConstants.PROTOCOL_TREE_METRIC_PROTOCOLS_AND_APPLICATIONS, protocolsAndApplications.size());
+ List<LiveChartProtocol> roots = nodes.stream().filter(o -> StringUtil.isBlank(o.getParentId())).collect(Collectors.toList());
roots.forEach(item -> {
root.setSentBytes(root.getSentBytes() + item.getSentBytes());
root.setReceivedBytes(root.getReceivedBytes() + item.getReceivedBytes());
@@ -316,16 +612,16 @@ public class DslServiceImpl implements DslService {
root.setTotalReceivedBytes(root.getTotalReceivedBytes() + item.getReceivedBytes());
});
nodes.forEach(item -> {
- item.setId(Constants.PROTOCOL_TREE_ROOT_NAME + Constants.ENCAPSULATION_PATH_SEPARATOR + item.getId());
+ item.setId(LiveChartConstants.PROTOCOL_TREE_ROOT_NAME + LiveChartConstants.ENCAPSULATION_PATH_SEPARATOR + item.getId());
});
nodes.add(root);
return nodes;
}
@Override
- public List<ProtocolTree> buildHierarchicalStructure(List<ProtocolTree> nodes) {
+ public List<LiveChartProtocol> buildHierarchicalStructure(List<LiveChartProtocol> nodes) {
- List<ProtocolTree> targetNodes = groupByName(nodes);
+ List<LiveChartProtocol> targetNodes = groupByName(nodes);
targetNodes.sort((o1, o2) -> {
long numThis = o1.getReceivedBytes() + o1.getSentBytes();
@@ -338,9 +634,9 @@ public class DslServiceImpl implements DslService {
return 0;
}
});
- Map<String, List<ProtocolTree>> protocolMap = Maps.newLinkedHashMap();
- List<ProtocolTree> roots = targetNodes.stream().filter(o -> StringUtil.isBlank(o.getParentId())).collect(Collectors.toList());
- protocolMap.put(Constants.PROTOCOLS_PARENT_ID, roots);
+ Map<String, List<LiveChartProtocol>> protocolMap = Maps.newLinkedHashMap();
+ List<LiveChartProtocol> roots = targetNodes.stream().filter(o -> StringUtil.isBlank(o.getParentId())).collect(Collectors.toList());
+ protocolMap.put(LiveChartConstants.PROTOCOLS_PARENT_ID, roots);
targetNodes.stream().filter(o -> StringUtil.isNotBlank(o.getParentId())).forEach(x -> {
if (StringUtil.isEmpty(protocolMap.get(x.getParentId()))) {
protocolMap.put(x.getParentId(), Lists.newArrayList(x));
@@ -349,18 +645,18 @@ public class DslServiceImpl implements DslService {
}
});
- List<ProtocolTree> protocolTreeList = TreeUtils.mergeTree(protocolMap, Constants.PROTOCOLS_PARENT_ID, ProtocolTree::getId, ProtocolTree::setChildrens);
- for (ProtocolTree protocolTree : protocolTreeList) {
- List<ProtocolTree> ethernetList = protocolTree.getChildrens().stream().filter(
- p -> Constants.PROTOCOL_ETHERNET_NODE.equals(p.getName())).collect(Collectors.toList());
+ List<LiveChartProtocol> protocols = TreeUtils.mergeTree(protocolMap, LiveChartConstants.PROTOCOLS_PARENT_ID, LiveChartProtocol::getId, LiveChartProtocol::setChildrens);
+ for (LiveChartProtocol protocolTree : protocols) {
+ List<LiveChartProtocol> ethernetList = protocolTree.getChildrens().stream().filter(
+ p -> LiveChartConstants.PROTOCOL_ETHERNET_NODE.equals(p.getName())).collect(Collectors.toList());
if (protocolTree.getChildrens().size() != ethernetList.size()) {
protocolTree.setChildrens(ethernetList);
- protocolTree.setReceivedBytes(StringUtil.isEmpty(ethernetList.get(0)) ? 0 : ethernetList.get(0).getReceivedBytes());
- protocolTree.setSentBytes(StringUtil.isEmpty(ethernetList.get(0)) ? 0 : ethernetList.get(0).getSentBytes());
- log.warn("Protocol Tree Exceptional data occurs and non-{} protocols are filtered out: {}", Constants.PROTOCOL_ETHERNET_NODE, protocolTree.getChildrens());
+ protocolTree.setReceivedBytes(ethernetList.isEmpty() || StringUtil.isEmpty(ethernetList.get(0)) ? 0 : ethernetList.get(0).getReceivedBytes());
+ protocolTree.setSentBytes(ethernetList.isEmpty() || StringUtil.isEmpty(ethernetList.get(0)) ? 0 : ethernetList.get(0).getSentBytes());
+ log.warn("Protocol Tree Exceptional data occurs and non-{} protocols are filtered out: {}", LiveChartConstants.PROTOCOL_ETHERNET_NODE, protocolTree.getChildrens());
}
}
- return protocolTreeList;
+ return protocols;
}
/**
@@ -369,23 +665,23 @@ public class DslServiceImpl implements DslService {
* @param nodes
* @return
*/
- private List<ProtocolTree> groupByName(List<ProtocolTree> nodes) {
- Map<String, List<ProtocolTree>> resultMap = nodes.stream()
- .collect(Collectors.groupingBy(ProtocolTree::getName, Collectors.collectingAndThen(Collectors.toList(), protocolTreeList -> {
+ private List<LiveChartProtocol> groupByName(List<LiveChartProtocol> nodes) {
+ Map<String, List<LiveChartProtocol>> resultMap = nodes.stream()
+ .collect(Collectors.groupingBy(LiveChartProtocol::getName, Collectors.collectingAndThen(Collectors.toList(), protocols -> {
- List<ProtocolTree> parentNodes = protocolTreeList.stream()
+ List<LiveChartProtocol> parentNodes = protocols.stream()
.filter(p -> StringUtil.isBlank(p.getParentId()) || !Splitter.on(".").omitEmptyStrings().splitToList(p.getParentId()).contains(p.getName()))
.collect(Collectors.toList());
Map<String, Long> summedMetrics = Maps.newLinkedHashMap();
Map<String, Long> totalBytesMetrics = Maps.newLinkedHashMap();
- totalBytesMetrics.put(Constants.PROTOCOL_TREE_METRIC_TOTAL_SENT_BYTES, 0L);
- totalBytesMetrics.put(Constants.PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_BYTES, 0L);
+ totalBytesMetrics.put(LiveChartConstants.PROTOCOL_TREE_METRIC_TOTAL_SENT_BYTES, 0L);
+ totalBytesMetrics.put(LiveChartConstants.PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_BYTES, 0L);
- parentNodes.stream().filter(p -> !p.getName().equalsIgnoreCase(Constants.PROTOCOL_TREE_ROOT_NAME)).forEach(protocolTree -> {
- totalBytesMetrics.put(Constants.PROTOCOL_TREE_METRIC_TOTAL_SENT_BYTES, totalBytesMetrics.get(Constants.PROTOCOL_TREE_METRIC_TOTAL_SENT_BYTES) + protocolTree.getSentBytes());
- totalBytesMetrics.put(Constants.PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_BYTES, totalBytesMetrics.get(Constants.PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_BYTES) + protocolTree.getReceivedBytes());
+ parentNodes.stream().filter(p -> !p.getName().equalsIgnoreCase(LiveChartConstants.PROTOCOL_TREE_ROOT_NAME)).forEach(protocolTree -> {
+ totalBytesMetrics.put(LiveChartConstants.PROTOCOL_TREE_METRIC_TOTAL_SENT_BYTES, totalBytesMetrics.get(LiveChartConstants.PROTOCOL_TREE_METRIC_TOTAL_SENT_BYTES) + protocolTree.getSentBytes());
+ totalBytesMetrics.put(LiveChartConstants.PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_BYTES, totalBytesMetrics.get(LiveChartConstants.PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_BYTES) + protocolTree.getReceivedBytes());
for (String key : protocolTree.getMetrics().keySet()) {
- String totalKey = StrUtil.upperFirstAndAddPre(key, "total");
+ String totalKey = StrUtil.addPrefixIfNot(key, "total_");
if (StringUtil.isEmpty(summedMetrics.get(totalKey))) {
summedMetrics.put(totalKey, Long.valueOf(protocolTree.getMetrics().get(key).toString()));
} else {
@@ -395,22 +691,23 @@ public class DslServiceImpl implements DslService {
}
});
- protocolTreeList.stream().filter(p -> !p.getName().equalsIgnoreCase(Constants.PROTOCOL_TREE_ROOT_NAME)).forEach(protocolTree -> {
- protocolTree.setTotalSentBytes(totalBytesMetrics.get(Constants.PROTOCOL_TREE_METRIC_TOTAL_SENT_BYTES));
- protocolTree.setTotalReceivedBytes(totalBytesMetrics.get(Constants.PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_BYTES));
+ protocols.stream().filter(p -> !p.getName().equalsIgnoreCase(LiveChartConstants.PROTOCOL_TREE_ROOT_NAME)).forEach(protocolTree -> {
+ protocolTree.setTotalSentBytes(totalBytesMetrics.get(LiveChartConstants.PROTOCOL_TREE_METRIC_TOTAL_SENT_BYTES));
+ protocolTree.setTotalReceivedBytes(totalBytesMetrics.get(LiveChartConstants.PROTOCOL_TREE_METRIC_TOTAL_RECEIVED_BYTES));
protocolTree.getMetrics().putAll(summedMetrics);
});
- return protocolTreeList;
+ return protocols;
})));
- List<ProtocolTree> protocolTreeList = Lists.newArrayList();
+ List<LiveChartProtocol> protocols = Lists.newArrayList();
resultMap.values().forEach(o -> {
- protocolTreeList.addAll(o);
+ protocols.addAll(o);
});
- return protocolTreeList;
+ return protocols;
}
+
private String inToLike(String filter) throws JSQLParserException {
List<String> params = Lists.newArrayList();
List<String> oldExpression = Lists.newArrayList();
@@ -433,7 +730,7 @@ public class DslServiceImpl implements DslService {
}
}
};
- Expression expression = CCJSqlParserUtil.parseExpression(filter);
+ Expression expression = CCJSqlParserUtil.parseExpression(filter, false);
String strExpr = expression.toString();
expression.accept(expressionVisitorAdapter);
if (params.size() > 0 && oldExpression.size() == 1) {
@@ -441,8 +738,9 @@ public class DslServiceImpl implements DslService {
}
return filter;
}
- private ProtocolTree convertStringToObject(String protocolString) {
- ProtocolTree protocolTree = null;
+
+ private LiveChartProtocol convertStringToObject(String protocolString) {
+ LiveChartProtocol protocol = null;
try {
Map<String, Object> results = JSON.parseObject(protocolString, Map.class);
String protocolId = String.valueOf(results.get("protocol_stack_id"));
@@ -451,28 +749,28 @@ public class DslServiceImpl implements DslService {
return null;
}
long sessions = Long.parseLong(Optional.ofNullable(
- results.get(Constants.PROTOCOL_TREE_METRIC_SESSIONS)).orElse(0).toString());
+ results.get(LiveChartConstants.PROTOCOL_TREE_METRIC_SESSIONS)).orElse(0).toString());
long sentBytes = Long.parseLong(Optional.ofNullable(
- results.get(Constants.PROTOCOL_TREE_METRIC_C2S_BYTES)).orElse(0).toString());
+ results.get(LiveChartConstants.PROTOCOL_TREE_METRIC_C2S_BYTES)).orElse(0).toString());
long receivedBytes = Long.parseLong(Optional.ofNullable(
- results.get(Constants.PROTOCOL_TREE_METRIC_S2C_BYTES)).orElse(0).toString());
+ results.get(LiveChartConstants.PROTOCOL_TREE_METRIC_S2C_BYTES)).orElse(0).toString());
long sentPackets = Long.parseLong(Optional.ofNullable(
- results.get(Constants.PROTOCOL_TREE_METRIC_C2S_PKTS)).orElse(0).toString());
+ results.get(LiveChartConstants.PROTOCOL_TREE_METRIC_C2S_PKTS)).orElse(0).toString());
long receivedPackets = Long.parseLong(Optional.ofNullable(
- results.get(Constants.PROTOCOL_TREE_METRIC_S2C_PKTS)).orElse(0).toString());
- List<String> protocols = Splitter.on(Constants.ENCAPSULATION_PATH_SEPARATOR).omitEmptyStrings().splitToList(protocolId);
+ results.get(LiveChartConstants.PROTOCOL_TREE_METRIC_S2C_PKTS)).orElse(0).toString());
+ List<String> protocols = Splitter.on(LiveChartConstants.ENCAPSULATION_PATH_SEPARATOR).omitEmptyStrings().splitToList(protocolId);
String protocolName = StringUtil.isNotEmpty(protocols) ? protocols.get(protocols.size() - 1) : null;
- protocolTree = new ProtocolTree(protocolId, protocolName, null);
- protocolTree.setSentBytes(sentBytes);
- protocolTree.setReceivedBytes(receivedBytes);
- protocolTree.getMetrics().put(Constants.PROTOCOL_TREE_METRIC_SENT_PACKETS, sentPackets);
- protocolTree.getMetrics().put(Constants.PROTOCOL_TREE_METRIC_RECEIVED_PACKETS, receivedPackets);
- protocolTree.getMetrics().put(Constants.PROTOCOL_TREE_METRIC_SESSIONS, sessions);
+ protocol = new LiveChartProtocol(protocolId, protocolName, null);
+ protocol.setSentBytes(sentBytes);
+ protocol.setReceivedBytes(receivedBytes);
+ protocol.getMetrics().put(LiveChartConstants.PROTOCOL_TREE_METRIC_SENT_PACKETS, sentPackets);
+ protocol.getMetrics().put(LiveChartConstants.PROTOCOL_TREE_METRIC_RECEIVED_PACKETS, receivedPackets);
+ protocol.getMetrics().put(LiveChartConstants.PROTOCOL_TREE_METRIC_SESSIONS, sessions);
} catch (BusinessException e) {
log.error("Convert Json String to Protocol Tree Object Error ", e);
}
- return protocolTree;
+ return protocol;
}
}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/HosServiceImp.java b/src/main/java/com/mesalab/qgw/service/impl/HosServiceImp.java
new file mode 100644
index 00000000..480f272d
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/impl/HosServiceImp.java
@@ -0,0 +1,250 @@
+package com.mesalab.qgw.service.impl;
+
+import cn.hutool.core.collection.CollectionUtil;
+import cn.hutool.core.map.MapUtil;
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.core.util.XmlUtil;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.entity.BaseResultGenerator;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.exception.BusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.HttpConfig;
+import com.mesalab.qgw.model.basic.HttpResponseResult;
+import com.mesalab.qgw.service.HosService;
+import com.mesalab.services.common.entity.KnowledgeConstant;
+import com.mesalab.services.configuration.HosConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+import org.springframework.web.multipart.MultipartFile;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.*;
+
+@Service(value = "hosService")
+public class HosServiceImp implements HosService {
+
+ private HosConfig hosConfig;
+ private HttpConfig httpConfig;
+ private HttpClientServiceV2 httpClientServiceV2;
+ private HttpClientService httpClientService;
+ private static final String TOKEN = "token";
+ private static final String CONTENTS = "Contents";
+ private static final String USER_DEFINED_META = "UserDefinedMeta";
+ private static final String KEY = "Key";
+ private static final String LAST_MODIFIED = "LastModified";
+ private static final String SIZE = "Size";
+ private static final String CHUNK_COUNT = "ChunkCount";
+ private static final String REQUEST_ID = "RequestId";
+ private static final String MESSAGE = "Message";
+ private static final String KEY_COUNT = "KeyCount";
+ private static final String FILE_TYPE = "FileType";
+ private static final String TASK_ID = "TaskId";
+ private static final String RULE_ID = "RuleId";
+
+ @Override
+ public BaseResult getFile(String bucketName, String fileName) {
+ Map<String, String> hosDefaultHeaders = getHosDefaultHeaders();
+ String putFileUrl = hosConfig.getUri().concat("/").concat(bucketName).concat("/").concat(fileName);
+ HttpResponseResult responseResult = httpClientServiceV2.head(putFileUrl, httpConfig.getServerResponseTimeOut(), hosDefaultHeaders);
+ if (HttpStatusCodeEnum.NOT_FOUND.getCode() == responseResult.getStatusCode()) {
+ return BaseResultGenerator.success(HttpStatusCodeEnum.NO_CONTENT.getCode(), HttpStatusCodeEnum.NO_CONTENT.getMessage(), null);
+ }
+ if (HttpStatusCodeEnum.SUCCESS.getCode() != responseResult.getStatusCode()) {
+ throw new BusinessException(responseResult.getStatusCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), "Get file failed, message is: " + responseResult.getErrorMessage());
+ }
+ Map<String, String> responseHeaders = responseResult.getResponseHeaders();
+ Map<String, Object> result = Maps.newHashMap();
+ result.put("file_name", fileName);
+ result.put("size", StrUtil.isBlankIfStr(responseHeaders.get("File-Size")) ? null : Integer.parseInt(responseHeaders.get("File-Size")));
+ result.put("file_url", hosConfig.getUri().concat("/").concat(bucketName).concat("/").concat(fileName));
+ result.put("last_modified", responseHeaders.get("Last-Modified"));
+ return BaseResultGenerator.success(Lists.newArrayList(result));
+ }
+
+ @Override
+ public BaseResult getFileList(String bucketName, String prefix, Map<String, String> udfMetaParam) {
+ Map<String, String> map = getDataByHos(bucketName, prefix, udfMetaParam);
+ Map<String, Object> stringObjectMap = XmlUtil.xmlToMap(String.valueOf(map.get("result")));
+ if (Integer.parseInt(map.get("status")) == HttpStatusCodeEnum.SUCCESS.getCode()) {
+ List<Map<String, Object>> dataList = parseXmlData(bucketName, stringObjectMap);
+ return BaseResultGenerator.success(dataList);
+ } else {
+ return BaseResultGenerator.failure(Integer.parseInt(map.get("status")), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), String.valueOf(stringObjectMap.get(REQUEST_ID)), String.valueOf(stringObjectMap.get(MESSAGE)));
+ }
+ }
+
+ @Override
+ public BaseResult uploadFile(String bucketName, String fileName, InputStream inputStream, Map<String, String> headersT) {
+ Map<String, String> headers = getHosDefaultHeaders();
+ if (headersT != null && !headers.isEmpty()) {
+ headers.putAll(headersT);
+ }
+ String putFileUrl = hosConfig.getUri().concat("/").concat(bucketName).concat("/").concat(fileName);
+ HttpResponseResult responseResult = httpClientServiceV2.put(putFileUrl, inputStream, httpConfig.getServerResponseTimeOut(), headers);
+ if (HttpStatusCodeEnum.SUCCESS.getCode() != responseResult.getStatusCode()) {
+ throw new BusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), "Upload file failed, message is: " + responseResult.getErrorMessage());
+ }
+ return BaseResultGenerator.success();
+ }
+
+ @Override
+ public BaseResult deleteFileList(String bucketName, List<String> fileNames) {
+ Map<String, String> headers = getHosDefaultHeaders();
+ headers.put(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_XML);
+ String putFileUrl = hosConfig.getUri().concat("/").concat(bucketName).concat("/?delete");
+ try {
+ DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
+ DocumentBuilder docBuilder = docFactory.newDocumentBuilder();
+ Document doc = docBuilder.newDocument();
+
+ Element deleteElement = doc.createElement("Delete");
+ doc.appendChild(deleteElement);
+
+ Element quietElement = doc.createElement("Quiet");
+ quietElement.appendChild(doc.createTextNode("true"));
+
+ deleteElement.appendChild(quietElement);
+ fileNames.forEach(fileName -> deleteElement.appendChild(createElementWithKey(doc, fileName)));
+
+ HttpResponseResult responseResult = httpClientServiceV2.post(putFileUrl, httpConfig.getServerResponseTimeOut(), XmlUtil.toStr(doc, true), headers);
+ if (HttpStatusCodeEnum.SUCCESS.getCode() != responseResult.getStatusCode()) {
+ throw new BusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), "Delete file failed, message is: " + responseResult.getErrorMessage());
+ }
+ } catch (ParserConfigurationException e) {
+ throw new BusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), e.getMessage());
+ }
+ return BaseResultGenerator.success();
+ }
+
+ private Map<String, String> getHosDefaultHeaders() {
+ Map<String, String> headers = Maps.newHashMap();
+ headers.put(KnowledgeConstant.TOKEN, hosConfig.getToken());
+ return headers;
+ }
+
+ private static Element createElementWithKey(Document doc, String key) {
+ Element keyElement = doc.createElement("Key");
+ keyElement.appendChild(doc.createTextNode(key));
+ Element result = doc.createElement("Object");
+ result.appendChild(keyElement);
+ return result;
+ }
+
+ private List<Map<String, Object>> parseXmlData(String bucketName, Map<String, Object> map) {
+ List<Map<String, Object>> dataList = Lists.newArrayList();
+ int keyCount = Integer.parseInt(String.valueOf(map.get(KEY_COUNT)));
+ switch (keyCount) {
+ case 0:
+ return dataList;
+ case 1:
+ dataList = parseSingleValue(bucketName, map);
+ break;
+ default:
+ dataList = parseMultipleValue(bucketName, map);
+ }
+ return dataList;
+ }
+
+ private List<Map<String, Object>> parseMultipleValue(String bucketName, Map<String, Object> map) {
+ List<Map<String, Object>> dataList = Lists.newArrayList();
+ List<Map<String, String>> contentList = (List<Map<String, String>>) map.get(CONTENTS);
+ if (CollectionUtil.isNotEmpty(contentList)) {
+ for (Map contents : contentList) {
+ Map<String, Object> dataMap = buildData(bucketName, contents);
+ dataList.add(dataMap);
+ }
+ }
+ return dataList;
+ }
+
+ private List<Map<String, Object>> parseSingleValue(String bucketName, Map<String, Object> map) {
+ List<Map<String, Object>> dataList = Lists.newArrayList();
+ Map<String, String> contentMap = (Map<String, String>) map.get(CONTENTS);
+ Map<String, Object> stringObjectMap = buildData(bucketName, contentMap);
+ dataList.add(stringObjectMap);
+ return dataList;
+ }
+
+ private Map<String, Object> buildData(String bucketName, Map contents) {
+ Map<String, Object> dataMap = Maps.newHashMap();
+ String key = String.valueOf(contents.get(KEY));
+ dataMap.put("file_name", key);
+ dataMap.put("size", StrUtil.isBlankIfStr(contents.get(SIZE)) ? 0 : Integer.parseInt(String.valueOf(contents.get(SIZE))));
+ dataMap.put("file_url", hosConfig.getUri().concat("/").concat(bucketName).concat("/").concat(key));
+ dataMap.put("last_modified", String.valueOf(contents.get(LAST_MODIFIED)));
+ if (MapUtil.isNotEmpty((Map<?, ?>) contents.get(USER_DEFINED_META))) {
+ Map<String, String> udfMeta = (Map<String, String>) contents.get(USER_DEFINED_META);
+ Integer chunkCount = StrUtil.isBlankIfStr(udfMeta.get(CHUNK_COUNT)) ? 0 : Integer.parseInt(udfMeta.get(CHUNK_COUNT));
+ String taskId = StrUtil.isBlankIfStr(udfMeta.get(TASK_ID)) ? "" : udfMeta.get(TASK_ID);
+ String ruleId = StrUtil.isBlankIfStr(udfMeta.get(RULE_ID)) ? "" : udfMeta.get(RULE_ID);
+ dataMap.put("task_id", taskId);
+ dataMap.put("rule_id", ruleId);
+ dataMap.put("chunk_count", chunkCount);
+ } else {
+ dataMap.put("task_id", "");
+ dataMap.put("rule_id", "");
+ dataMap.put("chunk_count", 0);
+ }
+ return dataMap;
+ }
+
+ private Map<String, String> getDataByHos(String bucketName, String prefix, Map<String, String> udfMetaParam) {
+ String url;
+ if (StrUtil.isNotBlank(prefix) || MapUtil.isNotEmpty(udfMetaParam)) {
+ url = urlBuilder(bucketName, prefix, udfMetaParam);
+ } else {
+ url = hosConfig.getUri().concat("/").concat(bucketName);
+ }
+ Map<String, String> result = httpClientService.httpGet(url, getHosDefaultHeaders(), 60000);
+ return result;
+ }
+
+ private String urlBuilder(String bucketName, String prefix, Map<String, String> udfMetaParam) {
+ StringBuffer sb = new StringBuffer();
+ sb.append(hosConfig.getUri().concat("/").concat(bucketName)).append("?");
+ if (StrUtil.isNotBlank(prefix)) {
+ sb.append("prefix=").append(prefix).append("&");
+ }
+ if (MapUtil.isNotEmpty(udfMetaParam)) {
+ for (Map.Entry<String, String> entry : udfMetaParam.entrySet()) {
+ String paramKey = StrUtil.toSymbolCase(StrUtil.toCamelCase(entry.getKey()), '-');
+ String paramValue = StrUtil.toSymbolCase(entry.getValue(), '-');
+ sb.append(paramKey).append("=").append(paramValue).append("&");
+ }
+ }
+ sb.setLength(sb.length() - 1);
+ return sb.toString();
+ }
+
+ @Autowired
+ private void setHosConfig(HosConfig hosConfig) {
+ this.hosConfig = hosConfig;
+ }
+
+ @Autowired
+ private void setHttpClientService(HttpClientService httpClientService) {
+ this.httpClientService = httpClientService;
+ }
+
+ @Autowired
+ private void setHttpClientServiceV2(HttpClientServiceV2 httpClientServiceV2) {
+ this.httpClientServiceV2 = httpClientServiceV2;
+ }
+
+ @Autowired
+ private void setHttpConfig(HttpConfig httpConfig) {
+ this.httpConfig = httpConfig;
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/HttpClientService.java b/src/main/java/com/mesalab/qgw/service/impl/HttpClientService.java
index ccf1dc90..9194d471 100644
--- a/src/main/java/com/mesalab/qgw/service/impl/HttpClientService.java
+++ b/src/main/java/com/mesalab/qgw/service/impl/HttpClientService.java
@@ -3,7 +3,7 @@ package com.mesalab.qgw.service.impl;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.google.common.collect.Maps;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.common.exception.BusinessException;
import com.mesalab.qgw.model.basic.HttpConfig;
import com.geedgenetworks.utils.StringUtil;
@@ -45,6 +45,7 @@ import java.util.HashMap;
import java.util.Map;
@Service
+@Deprecated
public class HttpClientService {
private static final Log log = LogFactory.get();
@@ -223,19 +224,19 @@ public class HttpClientService {
resultMap.put("result", EntityUtils.toString(entity, "UTF-8"));
} catch (ClientProtocolException e) {
log.error("协议错误: {}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.SERVICE_UNAVAILABLE.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.SERVICE_UNAVAILABLE.getCode()));
resultMap.put("message", e.getMessage());
} catch (ParseException e) {
log.error("解析错误: {}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.SERVICE_UNAVAILABLE.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.SERVICE_UNAVAILABLE.getCode()));
resultMap.put("message", e.getMessage());
} catch (IOException e) {
log.error("IO错误: {}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.BAD_GATEWAY.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.BAD_GATEWAY.getCode()));
resultMap.put("message", e.getMessage());
} catch (Exception e) {
log.error("其它错误: {}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.SERVER_ERROR.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.SERVER_ERROR.getCode()));
resultMap.put("message", e.getMessage());
} finally {
if (null != response) {
@@ -271,19 +272,19 @@ public class HttpClientService {
resultMap.put("result", EntityUtils.toString(entity, "UTF-8"));
} catch (ClientProtocolException e) {
log.error("ClientProtocolException:{}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.SERVICE_UNAVAILABLE.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.SERVICE_UNAVAILABLE.getCode()));
resultMap.put("message", e.getMessage());
} catch (ParseException e) {
log.error("ParseException:{}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.SERVICE_UNAVAILABLE.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.SERVICE_UNAVAILABLE.getCode()));
resultMap.put("message", e.getMessage());
} catch (IOException e) {
log.error("IOException:{}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.BAD_GATEWAY.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.BAD_GATEWAY.getCode()));
resultMap.put("message", e.getMessage());
} catch (Exception e) {
log.error("Exception:{}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.SERVER_ERROR.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.SERVER_ERROR.getCode()));
resultMap.put("message", e.getMessage());
} finally {
if (null != response) {
@@ -323,19 +324,19 @@ public class HttpClientService {
resultMap.put("result", EntityUtils.toString(entity, "UTF-8"));
} catch (ClientProtocolException e) {
log.error("ClientProtocolException:{}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.SERVICE_UNAVAILABLE.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.SERVICE_UNAVAILABLE.getCode()));
resultMap.put("message", e.getMessage());
} catch (ParseException e) {
log.error("ParseException:{}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.SERVICE_UNAVAILABLE.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.SERVICE_UNAVAILABLE.getCode()));
resultMap.put("message", e.getMessage());
} catch (IOException e) {
log.error("IOException:{}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.BAD_GATEWAY.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.BAD_GATEWAY.getCode()));
resultMap.put("message", e.getMessage());
} catch (Exception e) {
log.error("Exception:{}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.SERVER_ERROR.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.SERVER_ERROR.getCode()));
resultMap.put("message", e.getMessage());
} finally {
if (null != response) {
@@ -372,19 +373,19 @@ public class HttpClientService {
resultMap.put("result", EntityUtils.toString(entity, "UTF-8"));
} catch (ClientProtocolException e) {
log.error("ClientProtocolException: {}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.SERVICE_UNAVAILABLE.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.SERVICE_UNAVAILABLE.getCode()));
resultMap.put("message", e.getMessage());
} catch (ParseException e) {
log.error("ParseException: {}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.SERVICE_UNAVAILABLE.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.SERVICE_UNAVAILABLE.getCode()));
resultMap.put("message", e.getMessage());
} catch (IOException e) {
log.error("IOException: {}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.BAD_GATEWAY.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.BAD_GATEWAY.getCode()));
resultMap.put("message", e.getMessage());
} catch (Exception e) {
log.error("Exception: {}", e.getMessage());
- resultMap.put("status", String.valueOf(ResultStatusEnum.SERVER_ERROR.getCode()));
+ resultMap.put("status", String.valueOf(HttpStatusCodeEnum.SERVER_ERROR.getCode()));
resultMap.put("message", e.getMessage());
} finally {
if (null != response) {
@@ -457,7 +458,7 @@ public class HttpClientService {
HttpEntity entity = response.getEntity();
// 获取响应信息
msg = EntityUtils.toString(entity, "UTF-8");
- if (statusCode != ResultStatusEnum.SUCCESS.getCode()) {
+ if (statusCode != HttpStatusCodeEnum.SUCCESS.getCode()) {
throw new BusinessException("Http get content is :" + msg);
}
} catch (URISyntaxException e) {
@@ -512,7 +513,7 @@ public class HttpClientService {
HttpEntity entity = response.getEntity();
// 获取响应信息
msg = EntityUtils.toString(entity, "UTF-8");
- if (statusCode != ResultStatusEnum.SUCCESS.getCode() && statusCode != ResultStatusEnum.CREATED.getCode()) {
+ if (statusCode != HttpStatusCodeEnum.SUCCESS.getCode() && statusCode != HttpStatusCodeEnum.CREATED.getCode()) {
throw new BusinessException(msg);
}
} catch (URISyntaxException e) {
@@ -596,7 +597,7 @@ public class HttpClientService {
// 执行请求
response = httpClient.execute(httpGet);
int code = response.getStatusLine().getStatusCode();
- if (code != ResultStatusEnum.SUCCESS.getCode()) {
+ if (code != HttpStatusCodeEnum.SUCCESS.getCode()) {
log.error("current file: {}, get InputStream error, status code:{}, entity: {}", url, code, EntityUtils.toString(response.getEntity()));
} else {
// 获取响应实体
@@ -623,8 +624,8 @@ public class HttpClientService {
}
}
- return result;
}
+ return result;
}
/**
diff --git a/src/main/java/com/mesalab/qgw/service/impl/HttpClientServiceV2.java b/src/main/java/com/mesalab/qgw/service/impl/HttpClientServiceV2.java
new file mode 100644
index 00000000..5ff13fc1
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/impl/HttpClientServiceV2.java
@@ -0,0 +1,366 @@
+package com.mesalab.qgw.service.impl;
+
+import cn.hutool.core.exceptions.ExceptionUtil;
+import com.geedgenetworks.utils.StringUtil;
+import com.google.common.collect.Maps;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.exception.BusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.HttpConfig;
+import com.mesalab.qgw.model.basic.HttpResponseResult;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.io.IOUtils;
+import org.apache.http.*;
+import org.apache.http.client.ClientProtocolException;
+import org.apache.http.client.HttpRequestRetryHandler;
+import org.apache.http.client.config.RequestConfig;
+import org.apache.http.client.methods.*;
+import org.apache.http.client.protocol.HttpClientContext;
+import org.apache.http.config.Registry;
+import org.apache.http.config.RegistryBuilder;
+import org.apache.http.conn.ConnectTimeoutException;
+import org.apache.http.conn.ConnectionKeepAliveStrategy;
+import org.apache.http.conn.socket.ConnectionSocketFactory;
+import org.apache.http.conn.socket.PlainConnectionSocketFactory;
+import org.apache.http.conn.ssl.NoopHostnameVerifier;
+import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
+import org.apache.http.entity.ByteArrayEntity;
+import org.apache.http.entity.InputStreamEntity;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
+import org.apache.http.message.BasicHeaderElementIterator;
+import org.apache.http.protocol.HTTP;
+import org.apache.http.util.EntityUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.stereotype.Service;
+import javax.annotation.PostConstruct;
+import javax.net.ssl.*;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InterruptedIOException;
+import java.net.SocketTimeoutException;
+import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
+import java.security.KeyManagementException;
+import java.security.NoSuchAlgorithmException;
+import java.security.cert.X509Certificate;
+import java.util.HashMap;
+import java.util.Map;
+@Service
+@Slf4j
+public class HttpClientServiceV2 {
+ private HttpConfig httpConfig;
+ private PoolingHttpClientConnectionManager connectionManager;
+ private static final String CONTENT_TYPE = "Content-Type";
+ private static final String APPLICATION_JSON = "application/json";
+
+ /**
+ * Init PoolingHttpClientConnectionManager, support http and https
+ */
+ @PostConstruct
+ private void initConnectionManager() {
+ try {
+ // Use TrustManager to trust all certificates
+ X509TrustManager trustManager = new X509TrustManager() {
+ @Override
+ public X509Certificate[] getAcceptedIssuers() {
+ return null;
+ }
+ @Override
+ public void checkClientTrusted(X509Certificate[] xcs, String str) {
+ }
+ @Override
+ public void checkServerTrusted(X509Certificate[] xcs, String str) {
+ }
+ };
+ SSLContext ctx = SSLContext.getInstance(SSLConnectionSocketFactory.TLS);
+ ctx.init(null, new TrustManager[]{trustManager}, null);
+ SSLConnectionSocketFactory socketFactory = new SSLConnectionSocketFactory(ctx, NoopHostnameVerifier.INSTANCE);
+ Registry<ConnectionSocketFactory> socketFactoryRegistry = RegistryBuilder.<ConnectionSocketFactory>create()
+ .register("http", PlainConnectionSocketFactory.INSTANCE)
+ .register("https", socketFactory).build();
+
+ if (connectionManager == null) {
+ connectionManager = new PoolingHttpClientConnectionManager(socketFactoryRegistry);
+ connectionManager.setMaxTotal(httpConfig.getMaxConnectionNum());
+ connectionManager.setDefaultMaxPerRoute(httpConfig.getMaxPerRoute());
+ }
+ log.info("Initializing PoolingHttpClientConnectionManager Complete");
+ } catch (KeyManagementException e) {
+ log.error("KeyManagementException", e);
+ throw new BusinessException(e.getMessage());
+ } catch (NoSuchAlgorithmException e) {
+ log.error("NoSuchAlgorithmException", e);
+ throw new BusinessException(e.getMessage());
+ }
+ }
+
+ @Autowired
+ public void setHttpConfig(@Qualifier("httpConfig") HttpConfig httpConfig) {
+ this.httpConfig = httpConfig;
+ }
+
+
+ /**
+ * Specify the timeout time for each request and returned the corresponding CloseableHttpClient
+ * @param socketTimeOut
+ * @return CloseableHttpClient instance
+ */
+ private CloseableHttpClient getHttpClient(int socketTimeOut) {
+ RequestConfig requestConfig = RequestConfig.custom()
+ .setConnectionRequestTimeout(httpConfig.getServerRequestTimeOut())
+ .setConnectTimeout(httpConfig.getServerConnectTimeOut())
+ .setSocketTimeout(socketTimeOut) // Setting the socket timeout time
+ .build();
+ //Http request for retry mechanism. if return false, no retry, otherwise retry.
+ HttpRequestRetryHandler retry = (exception, executionCount, context) -> {
+ if (executionCount >= 3) { // Maximum number of retries
+ return false;
+ }
+ // If the server has not responded, retry
+ if (exception instanceof NoHttpResponseException) {
+ return true;
+ }
+ //no retry for SSLHandshakeException
+ if (exception instanceof SSLHandshakeException) {
+ return false;
+ }
+ if(exception instanceof SocketTimeoutException) {
+ if (exception.getMessage().contains("Read timed out")) {
+ return false;
+ }
+ }
+ //No retry for UnknownHostException, indicates the host is unreachable
+ if (exception instanceof UnknownHostException) {
+ return false;
+ }
+ // No retry for ConnectTimeoutException, indicates the connection is refused
+ if (exception instanceof ConnectTimeoutException) {
+ return false;
+ }
+ // No retry for SSLException, indicates the SSL handshake error
+ if (exception instanceof SSLException) {
+ return false;
+ }
+ // If the request has been sent, but the client has been interrupted, retry
+ if (exception instanceof InterruptedIOException) {
+ return true;
+ }
+
+ HttpClientContext clientContext = HttpClientContext.adapt(context);
+ HttpRequest request = clientContext.getRequest();
+ // If the request is idempotent, retry
+ if (!(request instanceof HttpEntityEnclosingRequest)) {
+ return true;
+ }
+ return false;
+ };
+
+
+ ConnectionKeepAliveStrategy myStrategy = (response, context) -> {
+ HeaderElementIterator it = new BasicHeaderElementIterator
+ (response.headerIterator(HTTP.CONN_KEEP_ALIVE));
+ while (it.hasNext()) {
+ HeaderElement headerElement = it.nextElement();
+ String param = headerElement.getName();
+ String value = headerElement.getValue();
+ if (value != null && param.equalsIgnoreCase("timeout")) {
+ return Long.parseLong(value) * 1000;
+ }
+ }
+ return 60 * 1000; // if not specified, default keep alive is 60 seconds
+ };
+
+ //Create a CloseableHttpClient instance
+ return HttpClients.custom()
+ // Set the timeout time for each request
+ .setDefaultRequestConfig(requestConfig)
+ // Set the retry mechanism
+ .setRetryHandler(retry)
+ .setKeepAliveStrategy(myStrategy)
+ // set the connection pool
+ .setConnectionManager(connectionManager)
+ .build();
+ }
+ public HttpResponseResult get(String url) {
+ return get(url, httpConfig.getServerResponseTimeOut(), null);
+ }
+ public HttpResponseResult get(String url, int socketTimeout) {
+ return get(url, socketTimeout, null);
+ }
+
+ public HttpResponseResult get(String url, int socketTimeout, Map<String, String> headers) {
+ return executeHttpRequest(new HttpGet(url), socketTimeout, headers);
+ }
+
+ public HttpResponseResult head(String url, int socketTimeout) {
+ return executeHttpRequest(new HttpHead(url), socketTimeout, null);
+ }
+
+ public HttpResponseResult head(String url, int socketTimeout, Map<String, String> headers) {
+ return executeHttpRequest(new HttpHead(url), socketTimeout, headers);
+ }
+
+ public HttpResponseResult post(String url, int socketTimeout, String requestBody, Map<String, String> headers) {
+ if (StringUtil.isEmpty(headers)) {
+ headers = Maps.newHashMap();
+ }
+ if(headers.get(CONTENT_TYPE) == null) {
+ headers.put(CONTENT_TYPE, APPLICATION_JSON);
+ }
+ HttpPost httpPost = new HttpPost(url);
+ httpPost.setEntity(new ByteArrayEntity(requestBody.getBytes(StandardCharsets.UTF_8)));
+ return executeHttpRequest(httpPost, socketTimeout, headers);
+ }
+
+ public HttpResponseResult post(String url, int socketTimeout, String requestBody){
+ return post(url, socketTimeout, requestBody , null);
+ }
+
+ public HttpResponseResult post(String url, String requestBody, Map<String, String> headers){
+ return post(url, httpConfig.getServerResponseTimeOut(), requestBody , headers);
+ }
+
+ public HttpResponseResult post(String url, String requestBody){
+ return post(url, httpConfig.getServerResponseTimeOut(), requestBody , null);
+ }
+
+ public HttpResponseResult delete(String url) {
+ return executeHttpRequest(new HttpDelete(url), httpConfig.getServerResponseTimeOut(), null);
+ }
+
+ public HttpResponseResult delete(String url, int socketTimeout) {
+ return executeHttpRequest(new HttpDelete(url), socketTimeout, null);
+ }
+
+
+ public HttpResponseResult delete(String url, int socketTimeout, Map<String, String> headers){
+ return executeHttpRequest(new HttpDelete(url), socketTimeout, headers);
+ }
+
+
+ public HttpResponseResult put(String url, InputStream in, int socketTimeout, Map<String, String> headers) {
+ HttpPut httpPut = new HttpPut(url);
+ if (StringUtil.isEmpty(headers)) {
+ headers = Maps.newHashMap();
+ }
+ if(headers.get(CONTENT_TYPE) == null) {
+ headers.put(CONTENT_TYPE, APPLICATION_JSON);
+ }
+ if (in != null) {
+ httpPut.setEntity(new InputStreamEntity(in));
+ }
+ return executeHttpRequest(httpPut, socketTimeout, headers);
+ }
+
+ public Map<String, String> getHttpPostResponseHeader(String url, int socketTimeOut, Map<String, String> headers) {
+ CloseableHttpResponse response = null;
+ HashMap<String, String> map = Maps.newHashMap();
+ try {
+ HttpPost httpPost = new HttpPost(url);
+ headers.forEach(httpPost::setHeader);
+ response = getHttpClient(socketTimeOut).execute(httpPost);
+ Header[] Headers = response.getAllHeaders();
+ for (Header h : Headers) {
+ map.put(h.getName().toUpperCase(), h.getValue());
+ }
+ } catch (Exception e) {
+ throw new BusinessException(handleException(e), CommonErrorCode.HTTP_REQUEST_EXCEPTION.getCode(), e.getMessage());
+ } finally {
+ closeResponse(response);
+ }
+ return map;
+ }
+
+ public InputStream getInputStream(String url, int socketTimeout, Map<String, String> headers) {
+ CloseableHttpResponse response = null;
+ InputStream result = null;
+ try {
+ HttpGet httpGet = new HttpGet(url);
+ if (headers != null) {
+ headers.forEach(httpGet::setHeader);
+ }
+ response = getHttpClient(socketTimeout).execute(httpGet);
+ result = IOUtils.toBufferedInputStream(response.getEntity().getContent());
+ } catch (Exception e) {
+ throw new BusinessException(handleException(e), CommonErrorCode.HTTP_REQUEST_EXCEPTION.getCode(), e.getMessage());
+ } finally {
+ closeResponse(response);
+ }
+ return result;
+ }
+
+
+ private HttpResponseResult executeHttpRequest(HttpRequestBase request, int socketTimeout, Map<String, String> headers) {
+ HttpResponseResult responseResult = new HttpResponseResult();
+ CloseableHttpResponse response = null;
+ try {
+ if (headers != null) {
+ headers.forEach(request::setHeader);
+ }
+ response = getHttpClient(socketTimeout).execute(request);
+ HttpEntity entity = response.getEntity();
+ responseResult.setStatusCode(response.getStatusLine().getStatusCode());
+ if (entity != null) {
+ responseResult.setResponseBody(EntityUtils.toString(entity, StandardCharsets.UTF_8));
+ }
+ if (response.getAllHeaders() != null) {
+ Map<String, String> headersMap = Maps.newHashMap();
+ for (Header header : response.getAllHeaders()) {
+ headersMap.put(header.getName(), header.getValue());
+ }
+ responseResult.setResponseHeaders(headersMap);
+ }
+ if (responseResult.getStatusCode() >= 300) {
+ log.error("Http request failed, url:{}, statusCode:{}, responseBody:{}",
+ request.getURI(), responseResult.getStatusCode(), responseResult.getResponseBody());
+ responseResult.setErrorMessage(responseResult.getResponseBody());
+ }
+ } catch (Exception e) {
+ responseResult.setStatusCode(handleException(e));
+ responseResult.setErrorMessage(ExceptionUtil.getRootCauseMessage(e));
+ } finally {
+ closeResponse(response);
+ }
+ return responseResult;
+ }
+
+ private int handleException(Exception e) {
+ if (e instanceof ClientProtocolException || e instanceof ParseException) {
+ log.error("Protocol or Parsing error:{}", e.getMessage());
+ return HttpStatusCodeEnum.SERVICE_UNAVAILABLE.getCode();
+ } else if (e instanceof SocketTimeoutException) {
+ log.error("Socket timeout error:{}", e.getMessage());
+ return HttpStatusCodeEnum.GATEWAY_TIMEOUT.getCode();
+ } else if (e instanceof ConnectTimeoutException) {
+ log.error("Connect timeout error:{}", e.getMessage());
+ return HttpStatusCodeEnum.GATEWAY_TIMEOUT.getCode();
+ } else if (e instanceof UnknownHostException) {
+ log.error("Unknown host error:{}", e.getMessage());
+ return HttpStatusCodeEnum.BAD_GATEWAY.getCode();
+ } else if (e instanceof InterruptedIOException) {
+ log.error("Interrupted error:{}", e.getMessage());
+ return HttpStatusCodeEnum.BAD_GATEWAY.getCode();
+ } else if (e instanceof IOException) {
+ log.error("IO error:{}", e.getMessage());
+ return HttpStatusCodeEnum.BAD_GATEWAY.getCode();
+ } else {
+ log.error("Other error:{}", e.getMessage());
+ return HttpStatusCodeEnum.SERVER_ERROR.getCode();
+ }
+ }
+
+ private void closeResponse(CloseableHttpResponse response) {
+ if (response != null) {
+ try {
+ EntityUtils.consumeQuietly(response.getEntity());
+ response.close();
+ } catch (IOException e) {
+ log.error("Close Connection Exception: {}", e.getMessage());
+ }
+ }
+ }
+
+}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/MetadataServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/MetadataServiceImpl.java
deleted file mode 100644
index 5eff14b3..00000000
--- a/src/main/java/com/mesalab/qgw/service/impl/MetadataServiceImpl.java
+++ /dev/null
@@ -1,575 +0,0 @@
-package com.mesalab.qgw.service.impl;
-
-import cn.hutool.core.collection.CollectionUtil;
-import cn.hutool.core.util.NumberUtil;
-import cn.hutool.core.util.StrUtil;
-import cn.hutool.json.JSONException;
-import cn.hutool.json.JSONUtil;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.alibaba.fastjson2.JSON;
-import com.alibaba.fastjson2.JSONWriter;
-import com.alibaba.nacos.api.config.ConfigService;
-import com.alibaba.nacos.api.config.annotation.NacosValue;
-import com.alibaba.nacos.api.config.listener.AbstractListener;
-import com.alibaba.nacos.api.exception.NacosException;
-import com.google.common.base.Splitter;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.jayway.jsonpath.JsonPath;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.qgw.constant.QGWMessageConst;
-import com.mesalab.common.enums.DBTypeEnum;
-import com.mesalab.common.enums.MetadataTypeEnum;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.common.nacos.NacosConfig;
-import com.mesalab.common.nacos.NacosConst;
-import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.model.basic.QueryProfile;
-import com.mesalab.qgw.model.basic.ClickHouseHttpSource;
-import com.mesalab.qgw.model.metadata.MetadataBean;
-import com.mesalab.qgw.model.metadata.MetadataConfig;
-import com.mesalab.qgw.service.QueryService;
-import com.mesalab.qgw.service.MetadataService;
-import com.geedgenetworks.utils.StringUtil;
-import org.apache.avro.Schema;
-import org.apache.avro.SchemaParseException;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.core.env.Environment;
-import org.springframework.stereotype.Service;
-
-import javax.annotation.PostConstruct;
-import java.util.*;
-
-/**
- * @Date: 2021-03-11 15:48
- * @Author : liuyongqiang
- * @ClassName : NacosMetadataServiceImpl
- * @Description : 获取Nacos注册中心元数据实现类
- */
-@Service("metadataService")
-public class MetadataServiceImpl implements MetadataService {
-
- private static final Log log = LogFactory.get();
- @Autowired
- private NacosConfig nacosConfig;
- @Autowired
- private ConfigService systemConfigService;
- @Autowired
- private ConfigService pubConfigService;
- @Autowired
- private ClickHouseHttpSource clickHouseHttpSource;
- @Autowired
- private QueryService queryService;
- @Autowired
- public MetadataConfig metadataConfig;
- @Autowired
- Environment environment;
- @NacosValue(value = "${switch.version.schema}", autoRefreshed = true)
- private String schemaVersion;
-
- private Map<String, Schema> schemaCache = Maps.newHashMap();
- private Map<String, Object> originalCache = Maps.newHashMap();
- private final static String KEY_REF = "$ref";
- private final static String KEY = "key";
- private final static String VALUE = "value";
- private final static String CODE = "code";
- private final static String INDEX_KEY = "index_key";
-
- @PostConstruct
- public void init() {
- addMetadataListener();
- log.info("Initializing Schema Tables Complete");
- }
-
- @Override
- public Map<String, Object> getSchemaInfo(String type, String name, boolean displayStorageSize) {
- if (MetadataTypeEnum.TABLES.getValue().equals(type)) {
- return loadTables(name);
- } else if (MetadataTypeEnum.FIELDS.getValue().equals(type)) {
- return loadFields(name, displayStorageSize);
- }
- throw new QGWBusinessException(ResultStatusEnum.NOT_FOUND.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.SCHEMA_URL_NOT_FOUND));
- }
-
- @Override
- public String getPartitionKey(String tableName) {
- String partitionKey = StringUtil.EMPTY;
- Schema schema = getSchemaByName(tableName);
- Map doc = (Map) JSON.parseObject(schema.getDoc(), Map.class);
- if (StringUtil.isNotEmpty(doc)) {
- Object partition_key = doc.get("partition_key");
- partitionKey = StringUtil.isEmpty(partition_key) ? StringUtil.EMPTY : partition_key.toString();
- }
- return partitionKey;
- }
-
- @Override
- public List<String> getIndexKey(String tableName) {
- Map schemaJsonMap = loadFields(tableName, false);
- if (StringUtil.isEmpty(schemaJsonMap) || StringUtil.isEmpty(schemaJsonMap.get("doc"))) {
- return Lists.newArrayList();
- }
- Map doc = (Map) schemaJsonMap.get("doc");
- Object indexKey = doc.get(INDEX_KEY);
- return StringUtil.isEmpty(indexKey) ? Lists.newArrayList() : (List<String>) indexKey;
- }
-
- @Override
- public String getValueByKeyInSchemaDoc(String tableName, String key) {
- Schema schema = getSchemaByName(tableName);
- Map doc = (Map) JSON.parseObject(schema.getDoc(), Map.class);
- if (StringUtil.isNotEmpty(doc)) {
- Object value = doc.get(key);
- if (value instanceof Map) {
- return JSONUtil.toJsonStr(JSONUtil.parseObj(value));
- } else {
- return StringUtil.isEmpty(value) ? StringUtil.EMPTY : value.toString();
- }
- }
- return StringUtil.EMPTY;
- }
-
- @Override
- public String getDBTypeByTableName(String tableName) {
- Schema schema = getSchemaByName(tableName);
- String data = StringUtil.EMPTY;
- if (StringUtil.isEmpty(schema)) {
- return data;
- }
- for (MetadataBean meta : metadataConfig.getMetadata()) {
- if (meta.getTables().contains(tableName)) {
- return getDBTypeByGroup(meta.getGroup());
- }
- }
- return data;
- }
-
- private String getDBTypeByGroup(String group) {
- String type;
- switch (group) {
- case "CLICKHOUSE_GROUP":
- type = DBTypeEnum.CLICKHOUSE.getValue();
- break;
- case "HBASE_GROUP":
- type = DBTypeEnum.HBASE.getValue();
- break;
- case "DRUID_GROUP":
- type = DBTypeEnum.DRUID.getValue();
- break;
- default:
- type = StringUtil.EMPTY;
- }
- return type;
- }
-
- @Override
- public String getDBNameByTableName(String tableName) {
- Schema schema = getSchemaByName(tableName);
- return StringUtil.isNotEmpty(schema) ? schema.getNamespace() : clickHouseHttpSource.getDbName();
- }
-
- @Override
- public List<String> getAllTable() {
- List<String> list = new ArrayList<>();
- metadataConfig.getMetadata().forEach(o -> {
- if (StringUtil.isNotEmpty(o.getTables())) {
- list.addAll(o.getTables());
- }
- });
- return list;
- }
-
- @Override
- public Schema getSchemaByName(String tableName) {
- if (!getAllTable().contains(tableName)) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),
- ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),
- tableName + ":"+QGWMessageConst.TABLE_NOT_EXIST));
- }
- Schema schema;
- if (!StrUtil.isBlankIfStr(schema = schemaCache.get(tableName))) {
- return schema;
- }
- try {
- String content = systemConfigService.getConfig(tableName.concat(NacosConst.JSON_SUFFIX), nacosConfig.getGroup(), 3000);
- if (StrUtil.isBlankIfStr(content)) {
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),
- ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),tableName + ":"+QGWMessageConst.QUERY_SCHEMA_ERROR));
- }
- schemaCache.put(tableName, schema = parseSchema(content));
- systemConfigService.addListener(tableName.concat(NacosConst.JSON_SUFFIX), nacosConfig.getGroup(), new AbstractListener() {
- @Override
- public void receiveConfigInfo(String configInfo) {
- log.info("ReceiveConfigInfo Schema Fields {}", configInfo);
- schemaCache.remove(tableName);
- schemaCache.put(tableName, parseSchema(configInfo));
- }
- });
- } catch (NacosException e) {
- log.error("NacosException:{}", e);
- }
- return schema;
- }
-
- @Override
- public BaseResult updateSchema(String name, Map<String, Object> paramMap) {
- try {
- String content = systemConfigService.getConfig(name.concat(".json"), nacosConfig.getGroup(), 3000);
- updateSchema(name, paramMap, content);
- String indexTable = getValueByKeyInSchemaDoc(name, "index_table");
- if (StringUtil.isNotBlank(indexTable)) {
- List<String> list = Splitter.on(",").omitEmptyStrings().splitToList(indexTable);
- for (String item : list) {
- String config = systemConfigService.getConfig(item.concat(".json"), nacosConfig.getGroup(), 3000);
- updateSchema(item, paramMap, config);
- }
- }
- return BaseResultGenerator.success4Message("ok");
- } catch (NacosException e) {
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- e.getMessage());
- }
- }
-
- private boolean updateSchema(String name, Map<String, Object> paramMap, String content) throws NacosException {
- if (StringUtil.isNotEmpty(content)) {
- Map<String, Object> data = JSON.parseObject(content, Map.class);
- data.put("doc", rewriteDoc("ttl", data.get("doc"), paramMap.get("doc")));
- List<Map<String, Object>> fields = (List<Map<String, Object>>) data.get("fields");
- List<String> indexKey = getIndexKey(name);
- for (Map<String, Object> field : fields) {
- if (indexKey.contains(field.get("name"))) {
- Map<String, Object> ttlNull = Maps.newHashMap();
- ttlNull.put("ttl", null);
- field.put("doc", rewriteDoc("ttl", field.get("doc"), ttlNull));
- } else {
- field.put("doc", rewriteDoc("ttl", field.get("doc"), JsonPath.read(paramMap, "$.fields[?(@.name == \"" + field.get("name") + "\")].doc")));
- }
- field.put("doc", rewriteDoc("visibility", field.get("doc"), JsonPath.read(paramMap, "$.fields[?(@.name == \"" + field.get("name") + "\")].doc")));
- }
- content = JSON.toJSONString(data, JSONWriter.Feature.WriteNulls);
- log.info("push Schema, content is: {}", content);
- return systemConfigService.publishConfig(name.concat(".json"), nacosConfig.getGroup(), JSONUtil.formatJsonStr(content));
- } else {
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(),QGWMessageConst.QUERY_SCHEMA_ERROR));
- }
- }
-
- /**
- * Desc: 重写[tables |field].doc
- * @param key
- * @param original
- * @param param
- * @return {@link Map< String, Object>}
- * @created by wWei
- * @date 2022/5/17 10:02 上午
- */
- private Map<String, Object> rewriteDoc(String key, Object original, Object param) {
- Map<String, Object> doc = Maps.newHashMap();
- if (StringUtil.isNotEmpty(original)) {
- doc = (Map<String, Object>) original;
- }
- Map<String, Object> paramDoc = Maps.newHashMap();
- if (param instanceof Map) {
- paramDoc = (Map<String, Object>) param;
- } else if (param instanceof List) {
- List item = (List) param;
- paramDoc = (Map<String, Object>) item.get(0);
- }
- if (paramDoc.containsKey(key)) {
- Object value = paramDoc.get(key);
- if (NumberUtil.isNumber(String.valueOf(value))) {
- doc.put(key, Long.parseLong(value.toString()));
- } else {
- doc.put(key, value);
- }
- } else {
- doc.remove(key);
- }
- return doc;
- }
-
- private void addMetadataListener() {
- try {
- systemConfigService.addListener(NacosConst.META_DATA_ID, nacosConfig.getGroup(), new AbstractListener() {
- @Override
- public void receiveConfigInfo(String configInfo) {
- log.info("ReceiveConfigInfo metadata {}", configInfo);
- schemaCache = Maps.newHashMap();
- }
- });
- } catch (NacosException e) {
- log.error("NacosException: ", e);
- }
- }
-
- private Map loadFields(String name, boolean displayStorageSize) {
- Schema schema = getSchemaByName(name);
- if (StringUtil.isEmpty(schema)) {
- throw new QGWBusinessException(ResultStatusEnum.NOT_FOUND.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(), QGWMessageConst.SCHEMA_URL_NOT_FOUND);
- }
- LinkedHashMap resultMap = JSON.parseObject(String.valueOf(schema), LinkedHashMap.class);
- LinkedHashMap schemaDoc = JSON.parseObject(schema.getDoc(), LinkedHashMap.class);
- if (StringUtil.isEmpty(schemaDoc) &&
- StringUtil.isNotEmpty(schema.getDoc())) {
- log.error("{} schema's doc isn't jsonString and won't convert: {}", name, schema.getDoc());
- }
- List<Map> data = Lists.newArrayList();
- if (displayStorageSize) {
- data = getStorageSize(name);
- schemaDoc = (LinkedHashMap) fillSchemaStorageSize(schemaDoc, data);
- }
- if (StringUtil.isNotEmpty(schemaDoc)) {
- resultMap.put("doc", schemaDoc);
- }
- List<Map> fields = (List<Map>) resultMap.get("fields");
- LinkedHashMap mapDoc;
- for (Map next : fields) {
- mapDoc = JSON.parseObject((String) next.get("doc"), LinkedHashMap.class);
- mapDoc = (LinkedHashMap) fillFieldStorageSize(data, mapDoc, next);
- if (StringUtil.isNotEmpty(mapDoc)) {
- next.put("doc", mapDoc);
- }
- if (StringUtil.isEmpty(mapDoc) && !StringUtil.isEmpty(next.get("doc"))) {
- log.error("{} field's doc isn't jsonString and won't convert: {}", next.get("name"), next.get("doc"));
- }
- }
- fillReference(resultMap, name);
- return resultMap;
- }
-
- private Map fillSchemaStorageSize(LinkedHashMap schemaDoc, List<Map> data) {
- if (CollectionUtil.isEmpty(data)) {
- return schemaDoc;
- }
- if (StringUtil.isEmpty(schemaDoc)) {
- schemaDoc = new LinkedHashMap();
- }
- long total = 0;
- for (Map datum : data) {
- total += Long.parseLong(String.valueOf(datum.get("bytes")));
- }
- schemaDoc.put("size", total);
- return schemaDoc;
- }
-
- private Map fillFieldStorageSize(List<Map> data, LinkedHashMap mapDoc, Map next) {
- if (CollectionUtil.isEmpty(data)) {
- return mapDoc;
- }
- if (StringUtil.isEmpty(mapDoc)) {
- mapDoc = new LinkedHashMap();
- }
- try {
- List<Object> name = JsonPath.read(data, "$.[?(@.field == \"" + next.get("name") + "\")].bytes");
- mapDoc.put("size", Long.parseLong(String.valueOf(name.get(0))));
- } catch (RuntimeException e) {
- log.error("fill {} field storage size error, message is: {}", next.get("name"), e.getMessage());
- }
- return mapDoc;
- }
-
- private List<Map> getStorageSize(String name) {
- try {
- List<String> symbols = (List<String>) loadTables(clickHouseHttpSource.getDbName()).get("symbols");
- String sql = null;
- if (symbols.contains(name)) {
- sql = String.format(Objects.requireNonNull(environment.getProperty("SCHEMA_STORAGE_SIZE")), name, clickHouseHttpSource.getDbName());
- }
- BaseResult baseResult = StringUtil.isBlank(sql) ? BaseResultGenerator.success(Lists.newArrayList()) : queryService.executeQuery(QueryProfile.builder().query(sql).build());
- if (baseResult.isSuccess()) {
- return (List<Map>) baseResult.getData();
- } else {
- log.error("get {} schema storage size error on DB.", name);
- }
- } catch (RuntimeException ex) {
- log.error("get {} schema storage size error on DB, message is {}", name, ex.getMessage());
- }
- return Lists.newArrayList();
- }
-
- /**
- * @param map 需操作对象
- * @param cfgName 当前文件名称
- * @return void
- * @Description $ref实际引用部分赋值
- * @author wanghao
- * @date 2021/9/1 17:02
- */
-
- private void fillReference(Map map, String cfgName) {
- for (Object mapKey : map.keySet()) {
- Object keyObj = map.get(mapKey);
- if (keyObj instanceof Map) {
- Map mapValue = (Map) keyObj;
- if (mapValue.containsKey(KEY_REF)) {
- if (INDEX_KEY.equals(String.valueOf(mapKey))) {
- String s = String.valueOf(mapValue.get(KEY_REF));
- mapValue.put(KEY_REF, s.contains("public_schema_info_2311.json") ? s.concat("/" + schemaVersion) : s);
- }
- Object refValue = getRefValue(cfgName, mapValue);
- map.put(mapKey, regexValue(refValue, mapValue));
- } else {
- fillReference(mapValue, cfgName);
- }
- } else if (keyObj instanceof List) {
- List listValue = (List) keyObj;
- for (Object next : listValue) {
- if (next instanceof Map) {
- fillReference((Map) next, cfgName);
- }
- }
- }
- }
- }
-
- @Override
- public Object getCfg(String cfgName) {
- if (originalCache.containsKey(cfgName)) {
- return originalCache.get(cfgName);
- }
- String content = null;
- try {
- content = getPubCfg(cfgName);
- if (!StrUtil.isBlankIfStr(content)) {
- originalCache.put(cfgName, content);
- return content;
- }
- content = systemConfigService.getConfig(cfgName, nacosConfig.getGroup(), 3000);
- originalCache.put(cfgName, content);
- systemConfigService.addListener(cfgName, nacosConfig.getGroup(), new AbstractListener() {
- @Override
- public void receiveConfigInfo(String configInfo) {
- log.info("ReceiveConfigInfo Schema Fields {}", configInfo);
- originalCache.put(cfgName, configInfo);
- }
- });
- } catch (NacosException e) {
- log.error("NacosException:{}", e);
- }
- return content;
- }
-
- private String getPubCfg(String cfgName) throws NacosException {
- String content = pubConfigService.getConfig(cfgName, NacosConst.DEFAULT_GROUP, 3000);
- pubConfigService.addListener(cfgName, NacosConst.DEFAULT_GROUP, new AbstractListener() {
- @Override
- public void receiveConfigInfo(String configInfo) {
- log.info("ReceiveConfigInfo Schema Fields {}", configInfo);
- originalCache.put(cfgName, configInfo);
- }
- });
- return content;
- }
-
- private Object getRefValue(String schemaName, Map map) {
- Object data = null;
- String[] split = getRefStrArray(map);
- String jsonPath = getJsonPath(split);
- try {
- Object cfg = getCfg(getFileName(split, schemaName));
- data = JsonPath.read(String.valueOf(cfg), jsonPath);
- } catch (RuntimeException e) {
- log.warn("read reference schema error: {}", e);
- }
- return data;
- }
-
- private Object regexValue(Object obj, Map<String, String> map) {
- if (map.size() <= 1) {
- return obj;
- }
- List<Object> key = Lists.newArrayList();
- List<Object> value = Lists.newArrayList();
- try {
- key.addAll(JsonPath.read(obj, map.get(KEY)));
- value.addAll(JsonPath.read(obj, map.get(VALUE)));
- } catch (RuntimeException ex) {
- log.warn("parse reference schema error: JsonPath.read error");
- return Lists.newArrayList();
- }
- if (key.size() != value.size()) {
- log.warn("parse reference schema error: key and value non-correspondence");
- return Lists.newArrayList();
- }
- List<Map<String, Object>> list = Lists.newArrayList();
- for (int i = 0; i < key.size(); i++) {
- HashMap<String, Object> item = Maps.newLinkedHashMap();
- item.put(CODE, key.get(i));
- item.put(VALUE, value.get(i));
- list.add(item);
- }
- return list;
- }
-
- private String[] getRefStrArray(Map refMap) {
- String v = (String) refMap.get(KEY_REF);
- return v.split("#", 2);
- }
-
- private String getJsonPath(String[] strArray) {
- return "$".concat(strArray[1].replace("/", "."));
- }
-
- private String getFileName(String[] strArray, String fileName) {
- return StrUtil.isEmpty(strArray[0]) ? fileName.concat(NacosConst.JSON_SUFFIX) : strArray[0];
- }
-
- /**
- * @param name
- * @Description: 从Nacos配置中心获取Tables
- * @Author: liuyongqiang
- * @Date: 2021/3/11 18:17
- * @return: com.mesalab.common.base.BaseResult
- **/
- private Map<String, Object> loadTables(String name) {
- Map<String, Object> date = new LinkedHashMap<>();
- List<String> tables = new ArrayList<>();
- metadataConfig.getMetadata().forEach(o -> {
- if (o.getNamespace().equalsIgnoreCase(name)) {
- tables.addAll(o.getTables());
- }
- }
- );
- if (tables.isEmpty()) {
- throw new QGWBusinessException(ResultStatusEnum.NOT_FOUND.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(), QGWMessageConst.SCHEMA_URL_NOT_FOUND);
- }
- date.put("type", "enum");
- date.put("name", name);
- date.put("symbols", tables);
- return date;
- }
-
- private Schema parseSchema(String configInfo) {
- if (StrUtil.isBlank(configInfo)) return null;
- Map schemaMap = null;
- try {
- schemaMap = JSON.parseObject(configInfo, Map.class);
- Object docSchema;
- if (StringUtil.isNotEmpty(docSchema = schemaMap.get("doc"))) {
- schemaMap.put("doc", JSON.toJSONString(docSchema, JSONWriter.Feature.WriteNulls));
- }
- Object fieldsObj;
- if (StringUtil.isNotEmpty(fieldsObj = schemaMap.get("fields"))) {
- List<Map> fields = (List<Map>) fieldsObj;
- for (Map next : fields) {
- Object docField;
- if (StringUtil.isEmpty(docField = next.get("doc"))) {
- continue;
- }
- next.put("doc", JSON.toJSONString(docField, JSONWriter.Feature.WriteNulls));
- }
- }
- } catch (JSONException | SchemaParseException | ClassCastException e) {
- log.error("update Schema error: {}", e);
- }
- return new Schema.Parser().parse(JSON.toJSONString(schemaMap, JSONWriter.Feature.WriteNulls));
- }
-}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/PacketCombineDslServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/PacketCombineDslServiceImpl.java
new file mode 100644
index 00000000..d7d178ad
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/impl/PacketCombineDslServiceImpl.java
@@ -0,0 +1,268 @@
+package com.mesalab.qgw.service.impl;
+
+import cn.hutool.core.bean.BeanUtil;
+import cn.hutool.core.util.ArrayUtil;
+import cn.hutool.json.JSONArray;
+import cn.hutool.json.JSONObject;
+import cn.hutool.json.JSONUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.google.common.collect.Maps;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.enums.QueryOption;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import com.mesalab.qgw.model.basic.DSLQueryContext;
+import com.mesalab.qgw.model.basic.DSLQueryRequestParam;
+import com.mesalab.qgw.model.basic.EngineConfigSource;
+import com.mesalab.qgw.model.basic.SQLQueryContext;
+import com.mesalab.qgw.service.*;
+import com.mesalab.services.common.property.SqlPropertySourceFactory;
+import lombok.SneakyThrows;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.core.env.Environment;
+import org.springframework.stereotype.Service;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.*;
+
+/**
+ * TODO
+ *
+ * @Classname PacketCombineDslService
+ * @Date 2024/4/19 16:16
+ * @Author wWei
+ */
+@Service
+@PropertySource(value = "classpath:dsl-sql-template.sql", factory = SqlPropertySourceFactory.class)
+public class PacketCombineDslServiceImpl implements PacketCombineDslService {
+
+ private static final String DATAPATH_TELEMETRY_RECORD = "DATAPATH_TELEMETRY_RECORD";
+ private static final String X_HOS_META_FILE_TYPE_KEY = "x-hos-meta-file-type";
+ private static final String X_HOS_META_FILE_TYPE_VALUE_PCAPNG = "pcapng";
+ private static final String TROUBLESHOOTING_FILE_BUCKET = "troubleshooting_file_bucket";
+ private static final String FILED_MEASUREMENTS = "measurements";
+ private static final String FILED_PACKET = "packet";
+ private static final String FILED_TIMESTAMP_US = "timestamp_us";
+ private static final String FILED_PACKET_LENGTH = "packet_length";
+ private static final String SLED_IP = "sled_ip";
+ private static final String DEVICE_GROUP = "device_group";
+ private static final String TRAFFIC_LINK_ID = "traffic_link_id";
+
+ private static final Log log = LogFactory.get();
+ private Environment environment;
+ private DatabaseService databaseService;
+ private SQLSyncQueryService sqlSyncQueryService;
+ private HosService hosService;
+ private EngineConfigSource engineConfigSource;
+
+ @Override
+ public BaseResult run(DSLQueryRequestParam dslQueryRequestParam) {
+ if (engineConfigSource.getPacketCombineDelaySeconds() > 0) {
+ threadBlock(engineConfigSource.getPacketCombineDelaySeconds());
+ }
+ String sql = buildSQL(dslQueryRequestParam);
+ List<Map<String, Object>> packets = getPackets(sql);
+ byte[] bytes = packetCombine(packets);
+ InputStream inputStream = new ByteArrayInputStream(bytes);
+ return uploadFile(dslQueryRequestParam.getId(), inputStream);
+ }
+
+ @SneakyThrows
+ private void threadBlock(int delaySeconds) {
+ Thread.sleep(delaySeconds * 1000L);
+ }
+
+ @Override
+ public List<Map<String, Object>> getPackets(String sql) {
+ BaseResult<List<Map<String, Object>>> baseResult = sqlSyncQueryService.executeQuery(
+ SQLQueryContext.builder()
+ .originalSQL(sql)
+ .option(QueryOption.REAL_TIME.getValue())
+ .build());
+ if (baseResult.isSuccess()) {
+ return baseResult.getData();
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "get packet records failed"));
+ }
+
+ @Override
+ public byte[] packetCombine(List<Map<String, Object>> records) {
+ List<byte[]> packetList = new ArrayList<>();
+ for (Map<String, Object> map : records) {
+ String measurements = JSONUtil.toJsonStr(map.get(FILED_MEASUREMENTS));
+ Object sledIp = map.get(SLED_IP);
+ Object deviceGroup = map.get(DEVICE_GROUP);
+ Object trafficLinkId = map.get(TRAFFIC_LINK_ID);
+ JSONObject jsonObject = new JSONObject();
+ jsonObject.set(SLED_IP, sledIp);
+ jsonObject.set(DEVICE_GROUP, deviceGroup);
+ jsonObject.set(TRAFFIC_LINK_ID, trafficLinkId);
+ int packetLen = Integer.parseInt(String.valueOf(map.get(FILED_PACKET_LENGTH)));
+ long timestampUS = Long.parseLong(String.valueOf(map.get(FILED_TIMESTAMP_US)));
+ String base64EncodedString = String.valueOf(map.get(FILED_PACKET));
+ byte[] decodedBytes = Base64.getDecoder().decode(base64EncodedString);
+ byte[] bytes = generatePacketBlock(timestampUS, decodedBytes, packetLen, measurements, jsonObject);
+ packetList.add(bytes);
+ }
+ return mergeByte(packetList);
+ }
+
+ @Override
+ public BaseResult uploadFile(String fileName, InputStream inputStream) {
+ Map<String, String> objectObjectHashMap = Maps.newHashMap();
+ objectObjectHashMap.put(X_HOS_META_FILE_TYPE_KEY, X_HOS_META_FILE_TYPE_VALUE_PCAPNG);
+ BaseResult baseResult = hosService.uploadFile(TROUBLESHOOTING_FILE_BUCKET, fileName, inputStream, objectObjectHashMap);
+ if (!baseResult.isSuccess()) {
+ return baseResult;
+ }
+ return hosService.getFile(TROUBLESHOOTING_FILE_BUCKET, fileName);
+ }
+
+ private String buildSQL(DSLQueryRequestParam dslQueryRequestParam) {
+ String dataSource = dslQueryRequestParam.getDataSource();
+ DSLQueryContext dslQueryContext = BeanUtil.copyProperties(dslQueryRequestParam, DSLQueryContext.class);
+ return dslQueryContext.toSql(
+ environment.getProperty(DATAPATH_TELEMETRY_RECORD)
+ , dataSource
+ , databaseService.getPartitionKey(dataSource)
+ , "unix_timestamp");
+ }
+
+ private static byte[] mergeByte(List<byte[]> packetList) {
+ //拼接文件头信息和所有增强数据包块
+ List<byte[]> pcapFileDataList = new ArrayList<>();
+ //添加文件头信息
+ pcapFileDataList.add(generatePCAPNGHeader());
+ //添加所有增强数据包块
+ pcapFileDataList.addAll(packetList);
+ //合并所有数据
+ byte[][] newBytes = new byte[pcapFileDataList.size()][];
+ pcapFileDataList.toArray(newBytes);
+ return ArrayUtil.addAll(newBytes);
+ }
+
+ private static byte[] generatePacketBlock(long timestamp, byte[] blockData, int packetLen, String measurements, JSONObject jsonObject) {
+ //计算注释的长度
+ List<byte[]> commentsList = new ArrayList<>();
+ int commentsLength = 0;
+ //获取注释的json数组
+ JSONArray measurementsJsonArray = JSONUtil.parseArray(measurements);
+ measurementsJsonArray.add(0, jsonObject);
+ for (Object measurement : measurementsJsonArray) {
+ byte[] comment = ((JSONObject) measurement).toJSONString(2).getBytes();
+ if (comment.length > 65535) {
+ comment = "The comment exceeds the length limit of 65535 characters and cannot be displayed.".getBytes();
+ }
+ //获取注释长度,补齐4字节
+ int commentPadding = 0;
+ if (comment.length % 4 != 0) {
+ commentPadding = 4 - comment.length % 4;
+ }
+ int commentLength = comment.length + commentPadding;
+ commentsLength += commentLength + 4;
+ commentsList.add(comment);
+ }
+ //计算数据包块的填充字节数
+ int blockDataLength = blockData.length;
+ int packetPadding = 0;
+ if (blockDataLength % 4 != 0) {
+ packetPadding = 4 - blockDataLength % 4;
+ }
+ //计算增强数据包块长度
+ int packetBlockLength = commentsLength + blockDataLength + packetPadding + 32;
+ //生成增强数据包块
+ ByteBuffer packetBlockBuffer = ByteBuffer.allocate(packetBlockLength);
+ packetBlockBuffer.order(ByteOrder.LITTLE_ENDIAN);
+ packetBlockBuffer.putInt(0x00000006);//增强数据包块块类型
+ packetBlockBuffer.putInt(packetBlockLength);//增强数据包块长度
+ packetBlockBuffer.putInt(0);//接口ID,从IDB来,默认为0
+ packetBlockBuffer.putInt((int) (timestamp >> 32));//时间戳高32位
+ packetBlockBuffer.putInt((int) (timestamp & 0xFFFFFFFFL));//时间戳低32位
+ packetBlockBuffer.putInt(blockDataLength);//捕获的数据长度
+ packetBlockBuffer.putInt(packetLen);//原始数据包长度
+ packetBlockBuffer.put(blockData);//数据包
+ //补齐4字节
+ for (int i = 0; i < packetPadding; i++) {
+ packetBlockBuffer.put((byte) 0);
+ }
+ //注释数据
+ for (byte[] comment : commentsList) {
+ packetBlockBuffer.putChar((char) 1);//选项类型,注释固定为1
+ packetBlockBuffer.putChar((char) comment.length);//注释长度
+ packetBlockBuffer.put(comment);//注释内容
+ //计算注释的填充字节数
+ int commentPadding = 0;
+ if (comment.length % 4 != 0) {
+ commentPadding = 4 - comment.length % 4;
+ }
+ //补齐4字节
+ for (int i = 0; i < commentPadding; i++) {
+ packetBlockBuffer.put((byte) 0);
+ }
+ }
+ packetBlockBuffer.putInt(packetBlockLength);//增强数据包块长度
+ return packetBlockBuffer.array();
+ }
+
+ private static byte[] generatePCAPNGHeader() {
+ ByteBuffer buffer = ByteBuffer.allocate(48);
+ buffer.order(ByteOrder.LITTLE_ENDIAN);
+ //节标题块标识
+ buffer.putInt(0x0A0D0D0A);
+ //节标题块长度
+ buffer.putInt(28);
+ //块的数据顺序,大端或者小端模式,0x4D3C2B1A为小端,0x1A2B3C4D为大端
+ buffer.putInt(0x1A2B3C4D);
+ //主版本信息
+ buffer.putShort((short) 1);
+ //次版本信息
+ buffer.putShort((short) 0);
+ //下一块长度,-1表示未指定
+ buffer.putLong(-1);
+ //节标题块长度
+ buffer.putInt(28);
+ //接口描述块标识
+ buffer.putInt(1);
+ //接口描述块长度
+ buffer.putInt(20);
+ //链路类型
+ buffer.putInt(1);
+ //从每个数据包转储的最大字节数。每个数据包超过此值的部分将不会存储在文件中。
+ buffer.putInt(0);
+ //接口描述块长度
+ buffer.putInt(20);
+ return buffer.array();
+ }
+
+ @Autowired
+ public void setEnvironment(Environment environment) {
+ this.environment = environment;
+ }
+
+ @Autowired
+ public void setDatabaseService(DatabaseService databaseService) {
+ this.databaseService = databaseService;
+ }
+
+ @Autowired
+ public void setSqlSyncQueryService(SQLSyncQueryService sqlSyncQueryService) {
+ this.sqlSyncQueryService = sqlSyncQueryService;
+ }
+
+ @Autowired
+ public void setHosService(HosService hosService) {
+ this.hosService = hosService;
+ }
+
+ @Autowired
+ public void setEngineConfigSource(EngineConfigSource engineConfigSource) {
+ this.engineConfigSource = engineConfigSource;
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/QueryJobServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/QueryJobServiceImpl.java
new file mode 100644
index 00000000..93c5fe80
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/impl/QueryJobServiceImpl.java
@@ -0,0 +1,494 @@
+package com.mesalab.qgw.service.impl;
+
+import cn.hutool.core.bean.BeanUtil;
+import cn.hutool.core.date.DatePattern;
+import cn.hutool.core.util.BooleanUtil;
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.json.JSONUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.geedgenetworks.utils.DateUtils;
+import com.geedgenetworks.utils.StringUtil;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.jfinal.plugin.activerecord.Db;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.entity.BaseResultGenerator;
+import com.mesalab.common.enums.*;
+import com.mesalab.common.utils.HazelcastInstanceMapUtil;
+import com.mesalab.common.utils.RandomUtil;
+import com.mesalab.common.utils.sqlparser.AutoPeriodHelper;
+import com.mesalab.qgw.constant.QGWMessageConst;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.*;
+import com.mesalab.qgw.service.*;
+import com.mesalab.services.common.enums.MetricFunction;
+import com.mesalab.services.common.enums.MetricType;
+import com.mesalab.services.common.property.SqlPropertySourceFactory;
+import com.mesalab.services.configuration.JobConfig;
+import com.mesalab.services.service.JobService;
+import com.mesalab.services.service.impl.JobExecuteService;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.EnvironmentAware;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.core.env.Environment;
+import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
+import org.springframework.stereotype.Service;
+
+import java.util.*;
+import java.util.concurrent.*;
+import java.util.stream.Collectors;
+
+/**
+ * TODO
+ *
+ * @Classname QueryJobServiceImpl
+ * @Date 2023/12/6 18:10
+ * @Author wWei
+ */
+@Service
+@PropertySource(value = "classpath:job-sql-template.sql", factory = SqlPropertySourceFactory.class)
+public class QueryJobServiceImpl implements QueryJobService, EnvironmentAware {
+ private static final Log log = LogFactory.get();
+ private Environment env;
+ private SQLSyncQueryService sqlSyncQueryService;
+ private JobService jobService;
+ private JobExecuteService jobExecuteService;
+ private DSLService dslService;
+ private TrafficSpectrumDslService trafficSpectrumDslService;
+ private EngineConfigSource engineConfigSource;
+ private JobConfig jobConfig;
+
+
+ @Override
+ public BaseResult createSQLSavedQuery(SqlQueryRequestParam request) {
+ initAndValidationID(request);
+ syntaxParseSql(request.getStatement());
+ String sql = JSONUtil.quote(request.getStatement(), false).replace("'", "\\'");
+ long currentTime = System.currentTimeMillis() / 1000;
+ Db.update(String.format(Objects.requireNonNull(env.getProperty("SAVED_QUERY_JOB_INIT")), request.getId(), sql, currentTime, currentTime));
+ log.info("Committed a SQL Saved Query Job, id is: {}", request.getId());
+ return BaseResultGenerator.successCreate(buildJobInfoOfCreated(request.getId()));
+ }
+
+ @Override
+ public BaseResult createSQLAdHocQuery(SqlQueryRequestParam sqlQueryRequestParam) {
+ initAndValidationID(sqlQueryRequestParam);
+ String option = sqlQueryRequestParam.isDryRun()
+ ? QueryOption.SYNTAX_VALIDATION.getValue() : QueryOption.REAL_TIME.getValue();
+ long start = System.currentTimeMillis();
+ ExecutionMode execMode = sqlQueryRequestParam.getExecutionMode();
+ if (ExecutionMode.ONESHOT.equals(execMode)) {
+ return sqlSyncQueryService.executeQuery(SQLQueryContext.builder()
+ .originalSQL(sqlQueryRequestParam.getStatement())
+ .option(option)
+ .format(sqlQueryRequestParam.getOutputMode().getValue())
+ .build());
+ } else if (ExecutionMode.NORMAL.equals(execMode)) {
+ HazelcastInstanceMapUtil.put(sqlQueryRequestParam.getId(), new QueryCache(sqlQueryRequestParam.getId()));
+ jobExecuteService.addExecutorSql(sqlQueryRequestParam);
+ return BaseResultGenerator.successCreate(buildJobInfoOfCreated(sqlQueryRequestParam.getId()));
+ } else if (ExecutionMode.BLOCKING.equals(execMode)) {
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(SQLQueryContext.builder().originalSQL(sqlQueryRequestParam.getStatement()).option(option).format(sqlQueryRequestParam.getOutputMode().getValue()).build());
+ QueryCache queryCache = buildQueryCacheOfDone(sqlQueryRequestParam.getId(), start, baseResult);
+ HazelcastInstanceMapUtil.put(sqlQueryRequestParam.getId(), queryCache);
+ return BaseResultGenerator.successCreate(buildJobInfoOfCreated(sqlQueryRequestParam.getId()));
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getMessage(), "not Supported"));
+ }
+
+ @Override
+ public BaseResult createDSLAdHocQuery(DSLQueryRequestParam request) {
+ initAndValidationID(request);
+ AutoPeriodHelper.buildDslGranularity(request);
+ long start = System.currentTimeMillis();
+ ExecutionMode execMode = request.getExecutionMode();
+ if (ExecutionMode.ONESHOT.equals(execMode)) {
+ if (JobConfig.FIELD_DISCOVERY.equals(request.getName())) {
+ execAdnWaitFieldDiscoveryDone(request.getId(), request);
+ QueryCache queryCache = rebuildFieldDiscoveryQueryCache(getAdhocQueryCacheAndRefresh(request.getId()));
+ HazelcastInstanceMapUtil.remove(request.getId());
+ BaseResult<Object> baseResult = queryCache.getBaseResult();
+ Map<String, Object> job = baseResult.getJob();
+ if (!BooleanUtil.toBoolean(String.valueOf(job.get(JobConfig.IS_CANCELED)))
+ && !BooleanUtil.toBoolean(String.valueOf(job.get(JobConfig.IS_FAILED)))
+ && BooleanUtil.toBoolean(String.valueOf(job.get(JobConfig.IS_DONE)))) {
+ return BaseResultGenerator.success(baseResult.getStatistics(), null, baseResult.getOutputMode(), baseResult.getMeta(), baseResult.getData());
+ }
+ return BaseResultGenerator.error(baseResult.getMessage());
+ } else if (JobConfig.DATAPATH_PACKET_COMBINE.equals(request.getName())) {
+ return jobExecuteService.addDslExecutorPacketCombineWithoutCache(request);
+ } else if (JobConfig.TRAFFIC_SPECTRUM_SUMMARY.equals(request.getName())
+ || JobConfig.TRAFFIC_SPECTRUM_UNIQUE_CLIENT_AND_SERVER_IPS.equals(request.getName())
+ || JobConfig.TRAFFIC_SPECTRUM_APP_DISTRIBUTION.equals(request.getName())
+ || JobConfig.TRAFFIC_SPECTRUM_NETWORK_THROUGHPUT_TREND.equals(request.getName())
+ || JobConfig.TRAFFIC_SPECTRUM_CLIENT_IP_CONNECT_APPLICATION_USAGE.equals(request.getName())) {
+ QueryCache queryCache = new QueryCache(request.getId());
+ queryCache.setType(request.getName());
+ HazelcastInstanceMapUtil.put(request.getId(), queryCache);
+ CountDownLatch countDownLatch = new CountDownLatch(1);
+ jobExecuteService.addDslExecutorTrafficSpectrumWithCache(request, countDownLatch);
+ try {
+ boolean await = countDownLatch.await(jobConfig.getExecutionTimeout(), TimeUnit.MILLISECONDS);
+ BaseResult baseResult = getAdHocQueryResultById(request.getId());
+ HazelcastInstanceMapUtil.remove(request.getId());
+ if (await) {
+ Map<String, Object> job = baseResult.getJob();
+ if (!BooleanUtil.toBoolean(String.valueOf(job.get(JobConfig.IS_CANCELED)))
+ && !BooleanUtil.toBoolean(String.valueOf(job.get(JobConfig.IS_FAILED)))
+ && BooleanUtil.toBoolean(String.valueOf(job.get(JobConfig.IS_DONE)))) {
+ return BaseResultGenerator.success(baseResult.getStatistics(), null, baseResult.getOutputMode(), baseResult.getMeta(), baseResult.getData());
+ }
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Job was timeout."));
+
+ } catch (InterruptedException e) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Job was Interrupted. message: " + e.getMessage()));
+ }
+ } else {
+ DSLQueryContext dslQueryContext = BeanUtil.copyProperties(request, DSLQueryContext.class);
+ return dslService.execDsl(dslQueryContext, request.isDryRun());
+ }
+ } else if (ExecutionMode.NORMAL.equals(execMode)) {
+ QueryCache queryCache = new QueryCache(request.getId());
+ if (JobConfig.FIELD_DISCOVERY.equals(request.getName())) {
+ queryCache.setType(JobConfig.FIELD_DISCOVERY);
+ validFieldDiscovery(request);
+ HazelcastInstanceMapUtil.put(request.getId(), queryCache);
+ jobExecuteService.addExecutorFieldDiscovery(request);
+ } else if (JobConfig.DATAPATH_PACKET_COMBINE.equals(request.getName())) {
+ HazelcastInstanceMapUtil.put(request.getId(), queryCache);
+ jobExecuteService.addExecutorDslPacketCombineWithCache(request, null);
+ } else if (JobConfig.TRAFFIC_SPECTRUM_SUMMARY.equals(request.getName())
+ || JobConfig.TRAFFIC_SPECTRUM_UNIQUE_CLIENT_AND_SERVER_IPS.equals(request.getName())
+ || JobConfig.TRAFFIC_SPECTRUM_APP_DISTRIBUTION.equals(request.getName())
+ || JobConfig.TRAFFIC_SPECTRUM_NETWORK_THROUGHPUT_TREND.equals(request.getName())
+ || JobConfig.TRAFFIC_SPECTRUM_CLIENT_IP_CONNECT_APPLICATION_USAGE.equals(request.getName())) {
+ queryCache.setType(request.getName());
+ HazelcastInstanceMapUtil.put(request.getId(), queryCache);
+ jobExecuteService.addDslExecutorTrafficSpectrumWithCache(request, null);
+ } else {
+ HazelcastInstanceMapUtil.put(request.getId(), queryCache);
+ jobExecuteService.addExecutorDsl(request);
+ }
+ return BaseResultGenerator.successCreate(buildJobInfoOfCreated(request.getId()));
+ } else if (ExecutionMode.BLOCKING.equals(execMode)) {
+ if (JobConfig.FIELD_DISCOVERY.equals(request.getName())) {
+ execAdnWaitFieldDiscoveryDone(request.getId(), request);
+ return BaseResultGenerator.successCreate(buildJobInfoOfCreated(request.getId()));
+ } else if (JobConfig.DATAPATH_PACKET_COMBINE.equals(request.getName())) {
+ QueryCache queryCache = new QueryCache(request.getId());
+ HazelcastInstanceMapUtil.put(request.getId(), queryCache);
+ CountDownLatch countDownLatch = new CountDownLatch(1);
+ jobExecuteService.addExecutorDslPacketCombineWithCache(request, countDownLatch);
+ try {
+ boolean await = countDownLatch.await(jobConfig.getExecutionTimeout(), TimeUnit.MILLISECONDS);
+ if (await) {
+ return BaseResultGenerator.successCreate(buildJobInfoOfCreated(request.getId()));
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Job was timeout."));
+ } catch (InterruptedException e) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Job was Interrupted. message: " + e.getMessage()));
+ }
+ } else if (JobConfig.TRAFFIC_SPECTRUM_SUMMARY.equals(request.getName())
+ || JobConfig.TRAFFIC_SPECTRUM_UNIQUE_CLIENT_AND_SERVER_IPS.equals(request.getName())
+ || JobConfig.TRAFFIC_SPECTRUM_APP_DISTRIBUTION.equals(request.getName())
+ || JobConfig.TRAFFIC_SPECTRUM_NETWORK_THROUGHPUT_TREND.equals(request.getName())
+ || JobConfig.TRAFFIC_SPECTRUM_CLIENT_IP_CONNECT_APPLICATION_USAGE.equals(request.getName())) {
+ QueryCache queryCache = new QueryCache(request.getId());
+ queryCache.setType(request.getName());
+ HazelcastInstanceMapUtil.put(request.getId(), queryCache);
+ CountDownLatch countDownLatch = new CountDownLatch(1);
+ jobExecuteService.addDslExecutorTrafficSpectrumWithCache(request, countDownLatch);
+ try {
+ boolean await = countDownLatch.await(jobConfig.getExecutionTimeout(), TimeUnit.MILLISECONDS);
+ if (await) {
+ return BaseResultGenerator.successCreate(buildJobInfoOfCreated(request.getId()));
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Job was timeout."));
+ } catch (InterruptedException e) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Job was Interrupted. message: " + e.getMessage()));
+ }
+ } else {
+ DSLQueryContext dslQueryContext = BeanUtil.copyProperties(request, DSLQueryContext.class);
+ BaseResult baseResult = dslService.execDsl(dslQueryContext, request.isDryRun());
+ QueryCache queryCache = buildQueryCacheOfDone(request.getId(), start, baseResult);
+ HazelcastInstanceMapUtil.put(request.getId(), queryCache);
+ return BaseResultGenerator.successCreate(buildJobInfoOfCreated(request.getId()));
+ }
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getMessage(), "not Supported"));
+ }
+
+ private void execAdnWaitFieldDiscoveryDone(String id, DSLQueryRequestParam request) {
+ QueryCache queryCacheStart = new QueryCache(id);
+ queryCacheStart.setType(JobConfig.FIELD_DISCOVERY);
+ validFieldDiscovery(request);
+ HazelcastInstanceMapUtil.put(id, queryCacheStart);
+ Future<Boolean> booleanFuture = jobExecuteService.addExecutorFieldDiscovery(request);
+ while (true) {
+ if (!booleanFuture.isDone()) {
+ continue;
+ }
+ break;
+ }
+ }
+
+ private QueryCache buildQueryCacheOfDone(String id, long start, BaseResult baseResult) {
+ QueryCache queryCache = new QueryCache(id);
+ Map<String, Object> jobInfo = queryCache.getBaseResult().getJob();
+ jobInfo.put(JobConfig.IS_DONE, true);
+ jobInfo.put(JobConfig.DONE_PROGRESS, 1);
+ if (!baseResult.isSuccess()) {
+ jobInfo.put(JobConfig.IS_FAILED, false);
+ jobInfo.put(JobConfig.REASON, baseResult.getMessage());
+ }
+ jobInfo.put(JobConfig.START_TIME, DateUtils.convertTimestampToString(start / 1000, DatePattern.UTC_PATTERN));
+ jobInfo.put(JobConfig.END_TIME, DateUtils.convertTimestampToString(System.currentTimeMillis() / 1000, DatePattern.UTC_PATTERN));
+ baseResult.setJob(jobInfo);
+ queryCache.setBaseResult(baseResult);
+ return queryCache;
+ }
+
+ @Override
+ public BaseResult getAdHocQueryResultById(String id) {
+ QueryCache queryCache = getAdhocQueryCacheAndRefresh(id);
+ if (queryCache == null) {
+ return BaseResultGenerator.success(Lists.newArrayList());
+ }
+ if (JobConfig.FIELD_DISCOVERY.equals(queryCache.getType())) {
+ queryCache = rebuildFieldDiscoveryQueryCache(queryCache);
+ } else if (JobConfig.TRAFFIC_SPECTRUM_CLIENT_IP_CONNECT_APPLICATION_USAGE.equals(queryCache.getType())) {
+ queryCache = rebuildTrafficSpectrumCIPConnectAppUsageQueryCache(queryCache);
+ }
+ BaseResult<Object> baseResult = queryCache.getBaseResult();
+ return BaseResultGenerator.success(baseResult.getStatistics(), baseResult.getJob(), baseResult.getOutputMode(), baseResult.getMeta(), baseResult.getData());
+ }
+
+ @Override
+ public BaseResult getSavedQueryResultById(String id) {
+ return jobService.getSavedQueryResult(id);
+ }
+
+ @Override
+ public BaseResult getAdHocQueryStatusById(String id) {
+ QueryCache queryCache = getAdhocQueryCacheAndRefresh(id);
+ if (queryCache == null) {
+ return BaseResultGenerator.success(Lists.newArrayList());
+ }
+ return BaseResultGenerator.success(null, queryCache.getBaseResult().getJob(), null, null, null);
+ }
+
+ @Override
+ public BaseResult getSavedQueryStatusById(String id) {
+ return jobService.getSavedQueryStatus(id);
+ }
+
+ @Override
+ public BaseResult getSavedQueryResult(List<String> ids) {
+ List<Map<String, Object>> result = jobService.batchSavedQueryResult(ids);
+ return BaseResultGenerator.success(result);
+ }
+
+ @Override
+ public BaseResult getAdHocQueryResult(List<String> ids) {
+ List<Object> results = Lists.newArrayList();
+ for (String id : ids) {
+ BaseResult<Object> baseResult;
+ QueryCache queryCache = getAdhocQueryCacheAndRefresh(id);
+ if (queryCache == null) {
+ continue;
+ }
+ if (JobConfig.FIELD_DISCOVERY.equals(queryCache.getType())) {
+ queryCache = rebuildFieldDiscoveryQueryCache(queryCache);
+ }
+ baseResult = queryCache.getBaseResult();
+ Map<String, Object> resultItem = Maps.newLinkedHashMap();
+ resultItem.put(JobConfig.STATISTICS, baseResult.getStatistics());
+ resultItem.put(JobConfig.JOB, baseResult.getJob());
+ resultItem.put(JobConfig.META, baseResult.getMeta());
+ resultItem.put(JobConfig.DATA, baseResult.getData());
+ results.add(resultItem);
+ }
+ return BaseResultGenerator.success(results);
+ }
+
+ @Override
+ public BaseResult getSavedQueryStatus(List<String> ids) {
+ List<Map<String, Object>> result = jobService.batchSavedQueryStatus(ids);
+ return BaseResultGenerator.success(result);
+ }
+
+ @Override
+ public BaseResult getAdHocQueryStatus(List<String> ids) {
+ List<Object> results = Lists.newArrayList();
+ for (String id : ids) {
+ QueryCache queryCache = getAdhocQueryCacheAndRefresh(id);
+ if (queryCache == null) {
+ continue;
+ }
+ results.add(queryCache.getBaseResult().getJob());
+ }
+ return BaseResultGenerator.success(results);
+ }
+
+ @Override
+ public BaseResult deleteSavedQueryById(String id) {
+ return jobService.cancelSavedQuery(id);
+ }
+
+ private void validFieldDiscovery(DSLQueryRequestParam request) {
+ Object filter = request.getFilter();
+ if (StrUtil.isEmptyIfStr(filter)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.QUERY_CUSTOM_FIELD_DISCOVERY_ERROR));
+ }
+
+ Object metric = request.getCustomRequestParam().get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_METRIC);
+ if (StringUtil.isEmpty(metric)) {
+ return;
+ }
+ boolean validMetric = MetricType.isValid(String.valueOf(metric));
+ boolean validFn = MetricFunction.isValid(String.valueOf(request.getCustomRequestParam().get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_METRIC_FN)));
+ if (!validMetric || !validFn) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.QUERY_CUSTOM_FIELD_DISCOVERY_ERROR));
+ }
+ Object fields = request.getCustomRequestParam().get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_FIELDS);
+ if (StrUtil.isEmptyIfStr(fields)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.QUERY_CUSTOM_FIELD_DISCOVERY_ERROR));
+ }
+ List<String> fieldList = (List<String>) request.getCustomRequestParam().get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_FIELDS);
+ request.getCustomRequestParam().put(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_FIELDS, fieldList.stream().distinct().collect(Collectors.toList()));
+ }
+
+ private void syntaxParseSql(String sql) {
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(SQLQueryContext.builder().originalSQL(sql).option(QueryOption.SYNTAX_PARSE.getValue()).build());
+ if (!baseResult.isSuccess()) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), baseResult.getMessage()));
+ }
+ }
+
+ private static String generateJobId() {
+ return RandomUtil.getUUID();
+ }
+
+ private static Map<String, Object> buildJobInfoOfCreated(String id) {
+ Map<String, Object> result = Maps.newHashMap();
+ result.put(JobConfig.JOB_ID, id);
+ return result;
+ }
+
+ private static QueryCache getAdhocQueryCacheAndRefresh(String id) {
+ try {
+ HazelcastInstanceMapUtil.retrieveMap().lock(id);
+ QueryCache queryCache = HazelcastInstanceMapUtil.get(id);
+ if (queryCache == null) {
+ return null;
+ }
+ queryCache.setLatestQueryTimeMs(System.currentTimeMillis());
+ HazelcastInstanceMapUtil.put(id, queryCache);
+ return queryCache;
+ } finally {
+ HazelcastInstanceMapUtil.retrieveMap().unlock(id);
+ }
+ }
+
+ private static QueryCache rebuildFieldDiscoveryQueryCache(QueryCache queryCache) {
+ BaseResult<Object> baseResult = queryCache.getBaseResult();
+ Object dataObj = baseResult.getData();
+ if (!StrUtil.isEmptyIfStr(dataObj)) {
+ List<Map> dataList = (List<Map>) dataObj;
+ for (Map map : dataList) {
+ for (Object itemObj : map.keySet()) {
+ Map<String, Object> itemMap = (Map) map.get(itemObj);
+ Object topkObj = itemMap.get("topk");
+ if (!StrUtil.isEmptyIfStr(topkObj)) {
+ List<Map<String, Object>> topkList = (List<Map<String, Object>>) topkObj;
+ List<Map<String, Object>> list = topkList.subList(0, Math.min(10, topkList.size()));
+ itemMap.put("topk", Lists.newArrayList(list));
+ }
+ }
+ }
+ return queryCache;
+ }
+ return queryCache;
+ }
+
+ private QueryCache rebuildTrafficSpectrumCIPConnectAppUsageQueryCache(QueryCache queryCache) {
+ BaseResult<Object> baseResult = queryCache.getBaseResult();
+ Object dataObj = baseResult.getData();
+ if (StringUtil.isNotEmpty(baseResult.getData())) {
+ List<Map> dataList = (List<Map>) dataObj;
+ Map internalExternalBipartiteGraph = trafficSpectrumDslService.getInternalExternalBipartiteGraph(dataList.get(0), engineConfigSource.getTrafficSpectrumClientIPAppResultNum());
+ queryCache.getBaseResult().setData(Lists.newArrayList(internalExternalBipartiteGraph));
+ return queryCache;
+ }
+ return queryCache;
+ }
+
+ private static void initAndValidationID(CommonRequestParam request) {
+ String id = StrUtil.isBlank(request.getId()) ? generateJobId() : request.getId();
+ QueryCache queryCache = HazelcastInstanceMapUtil.get(id);
+ if (queryCache != null) {
+ Map<String, Object> job = queryCache.getBaseResult().getJob();
+ boolean isFailed = BooleanUtil.toBoolean(String.valueOf(job.get(JobConfig.IS_FAILED)));
+ if (!isFailed) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.REQ_FORBIDDEN.getCode(), CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_PARAM_SYNTAX_EXCEPTION.getMessage(), "Job id already exists"));
+ }
+ }
+ request.setId(id);
+ }
+
+ @Override
+ public void setEnvironment(Environment environment) {
+ this.env = environment;
+ }
+
+ @Autowired
+ public void setJobService(JobService jobService) {
+ this.jobService = jobService;
+ }
+
+ @Autowired
+ public void setJobExecuteService(JobExecuteService jobExecuteService) {
+ this.jobExecuteService = jobExecuteService;
+ }
+
+ @Autowired
+ public void setSqlSyncQueryService(SQLSyncQueryService sqlSyncQueryService) {
+ this.sqlSyncQueryService = sqlSyncQueryService;
+ }
+
+ @Autowired
+ public void setDslService(DSLService dslService) {
+ this.dslService = dslService;
+ }
+
+ @Autowired
+ public void setTrafficSpectrumDslService(TrafficSpectrumDslService trafficSpectrumDslService) {
+ this.trafficSpectrumDslService = trafficSpectrumDslService;
+ }
+
+ @Autowired
+ public void setEngineConfigSource(EngineConfigSource engineConfigSource) {
+ this.engineConfigSource = engineConfigSource;
+ }
+
+ @Autowired
+ public void setJobConfig(JobConfig jobConfig) {
+ this.jobConfig = jobConfig;
+ }
+} \ No newline at end of file
diff --git a/src/main/java/com/mesalab/qgw/service/impl/QueryServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/QueryServiceImpl.java
deleted file mode 100644
index 6c11cac9..00000000
--- a/src/main/java/com/mesalab/qgw/service/impl/QueryServiceImpl.java
+++ /dev/null
@@ -1,618 +0,0 @@
-package com.mesalab.qgw.service.impl;
-
-import cn.hutool.core.exceptions.ExceptionUtil;
-import cn.hutool.core.text.StrFormatter;
-import cn.hutool.core.util.BooleanUtil;
-import cn.hutool.core.util.StrUtil;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.google.common.base.CaseFormat;
-import com.google.common.base.Joiner;
-import com.google.common.collect.Maps;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.entity.SchemaBase;
-import com.mesalab.qgw.constant.QGWMessageConst;
-import com.mesalab.common.enums.*;
-import com.mesalab.common.utils.sqlparser.SQLFunctionUtil;
-import com.mesalab.common.utils.sqlparser.SQLHelper;
-import com.mesalab.common.utils.sqlparser.SQLSyntaxParserUtil;
-import com.mesalab.qgw.dialect.Dialect;
-import com.mesalab.qgw.dialect.FederationDialect;
-import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
-import com.mesalab.qgw.model.basic.*;
-import com.mesalab.qgw.model.basic.udf.UDF;
-import com.mesalab.qgw.model.basic.udf.UDFElements;
-import com.mesalab.qgw.service.QueryService;
-import com.mesalab.qgw.service.MetadataService;
-import com.mesalab.qgw.service.SystemService;
-import com.geedgenetworks.utils.StringUtil;
-import net.sf.jsqlparser.JSQLParserException;
-import net.sf.jsqlparser.expression.BinaryExpression;
-import net.sf.jsqlparser.expression.Expression;
-import net.sf.jsqlparser.expression.Function;
-import net.sf.jsqlparser.parser.CCJSqlParserUtil;
-import net.sf.jsqlparser.statement.DescribeStatement;
-import net.sf.jsqlparser.statement.ExplainStatement;
-import net.sf.jsqlparser.statement.ShowStatement;
-import net.sf.jsqlparser.statement.Statement;
-import net.sf.jsqlparser.statement.select.*;
-import net.sf.jsqlparser.util.TablesNamesFinder;
-import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-import javax.annotation.PostConstruct;
-import java.lang.reflect.Constructor;
-import java.util.*;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-@Service("queryService")
-public class QueryServiceImpl implements QueryService {
- private static final Log log = LogFactory.get();
- @Autowired
- private MetadataService metadataServiceTemp;
- @Autowired
- private SystemService systemServiceTemp;
-
- private static MetadataService metadataService;
- private static SystemService systemService;
-
- @PostConstruct
- private void initMetadataService() {
- metadataService = this.metadataServiceTemp;
- systemService = this.systemServiceTemp;
- }
-
- public static final int MAX_PARSER_LEVEL = 5;
- public static final String PARSER_EXCEPTION_CLASS_NAME = "net.sf.jsqlparser.parser.ParseException:";
- public static final String PARSER_EXCEPTION_IDENTIFICATION = "Was expecting one of:";
- public static final Pattern PATTERN_ANGLE_BRACKET = Pattern.compile("<(.*?)>", Pattern.CASE_INSENSITIVE);
-
-
- @Override
- public BaseResult executeQuery(QueryProfile queryProfile) {
- validateAndSetDefaultValue(queryProfile);
- init(queryProfile);
- Statement statement;
- try {
- statement = CCJSqlParserUtil.parse(queryProfile.getQuery());
- } catch (JSQLParserException e) {
- log.error("SQL Syntax Error: {}", e);
- String message = getJSQLParserSimpleErrorMessage(e);
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(),message));
- }
- BaseResult baseResult;
- log.debug("Request option :{}, SQL :{}", queryProfile.getOption(), queryProfile.getQuery());
- if (statement instanceof Select) {
- baseResult = executeSelectStatement(queryProfile);
- } else if (statement instanceof DescribeStatement) {
- DescribeStatement describeStatement = (DescribeStatement) statement;
- baseResult = executeDescStatement(queryProfile, describeStatement);
- } else if (statement instanceof ShowStatement) {
- baseResult = executeShowStatement(queryProfile);
- } else if (statement instanceof ExplainStatement) {
- baseResult = executeExplainStatement(queryProfile);
- } else {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(),QGWMessageConst.ONLY_SUPPORT_STATEMENT_OPERATIONS_FOR_SELECT));
- }
- return baseResult;
- }
-
- private void init(QueryProfile param) {
- if(SQLHelper.getTableName(param.getQuery()).isEmpty()){
- return;
- }
- String tableName = SQLHelper.getTableName(param.getQuery()).get(0);
- param.setDbType(metadataService.getDBTypeByTableName(tableName));
- param.setDialectDBType(SQLHelper.getDialectDBType(param.getDbType()));
- }
-
- private void validateAndSetDefaultValue(QueryProfile param) {
- if (StringUtil.isBlank(param.getOption())) {
- param.setOption(QueryOptionEnum.REAL_TIME.getValue());
- }
- if (StringUtil.isBlank(param.getFormat())) {
- param.setFormat(QueryFormatEnum.JSON.getValue());
- }
- if (StringUtil.isBlank(param.getQuery())) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),QGWMessageConst.SQL_MUST_BE_EXISTS));
- }
- if (Arrays.stream(QueryOptionEnum.values()).noneMatch(o -> o.getValue().equalsIgnoreCase(param.getOption()))) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),QGWMessageConst.OPTION_TYPE_ERROR));
- }
- if (Arrays.stream(QueryFormatEnum.values()).noneMatch(o -> o.getValue().equalsIgnoreCase(param.getFormat()))) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),QGWMessageConst.FORMAT_TYPE_ERROR));
- }
- }
-
-
- /**
- * Desc: 简化JSQLParser语法异常信息
- *
- * @param e
- * @return {@link String}
- * @created by wWei
- * @date 2021/4/26 9:49 上午
- */
- private String getJSQLParserSimpleErrorMessage(Throwable e) {
- String message = Joiner.on(" ").skipNulls().join(ExceptionUtil.getSimpleMessage(e), ExceptionUtil.getRootCauseMessage(e));
- message = message.replace(PARSER_EXCEPTION_CLASS_NAME, StringUtil.EMPTY);
- int end = message.indexOf(PARSER_EXCEPTION_IDENTIFICATION);
- message = message.substring(0, end < 0 ? message.length() : end);
- message = StrUtil.removeAllLineBreaks(message);
- Matcher matcher = PATTERN_ANGLE_BRACKET.matcher(message);
- if (matcher.find()) {
- message = matcher.replaceAll(StringUtil.EMPTY);
- }
- String[] split = message.split(" ");
- StringBuilder stringBuilder = new StringBuilder(message.length());
- for (String c : split) {
- if (StringUtil.isBlank(c)) {
- continue;
- }
- stringBuilder.append(" ").append(c);
- }
- return stringBuilder.toString().trim();
- }
-
- /**
- * 1.设置默认queryID
- * 2.为Select操作选择不同的执行动作,包含SQL解析(Syntax-parse)、SQL验证(Syntax-validation)、实时查询(Real-time)、长查询(Long-term)
- * @param param 请求参数
- * @return BaseResult 执行结果
- */
- private BaseResult executeSelectStatement(QueryProfile param) {
- if (QueryOptionEnum.SYNTAX_PARSE.getValue().equalsIgnoreCase(param.getOption())) {
- return executeSyntaxParse(param);
- } else if (QueryOptionEnum.SYNTAX_VALIDATION.getValue().equalsIgnoreCase(param.getOption())) {
- return executeSyntaxValidation(param);
- } else if (QueryOptionEnum.LONG_TERM.getValue().equalsIgnoreCase(param.getOption())) {
- param.setQueryId(systemService.getCustomQueryId(param.getResultId(), param.getQuery()));
- return executeLongTermQuery(param);
- } else {
- return executeRealTimeQuery(param);
- }
-
- }
-
- private BaseResult executeSyntaxParse(QueryProfile param) {
- return BaseResultGenerator.success(SQLSyntaxParserUtil.syntaxParse(param.getQuery()));
- }
-
- private BaseResult executeSyntaxValidation(QueryProfile param) {
- return new FederationDialect(param, getDBDialect(param).executeSyntaxCheck()).executeSyntaxCheck();
- }
-
- private BaseResult executeLongTermQuery(QueryProfile param) {
- return new FederationDialect(param, getDBDialect(param).executeQuery()).executeQuery();
- }
-
-
- private BaseResult executeRealTimeQuery(QueryProfile param) {
- if (BooleanUtil.isTrue(param.getSampled())) {
- return new FederationDialect(param, getDBDialect(param).executeSampleQuery()).executeSampleQuery();
- } else {
- return new FederationDialect(param, getDBDialect(param).executeQuery()).executeQuery();
- }
- }
-
-
- private Dialect getDBDialect(QueryProfile param) {
- convertQueryRecursive(param, parserSQLByAst(param.getQuery()));
- return getReflectiveDialectObject(param);
- }
-
-
- private Dialect getReflectiveDialectObject(QueryProfile param) {
- Dialect dialect;
- try {
- Class dialectClazz = Class.forName("com.mesalab.qgw.dialect." +
- CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_CAMEL, param.getDbType()) + "Dialect");
- Constructor constructor = dialectClazz.getConstructor(QueryProfile.class);
- dialect = (Dialect) constructor.newInstance(param);
- } catch (ReflectiveOperationException | RuntimeException e) {
- log.error("Dialect conversion instance exception:{}",e);
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(),e.getMessage()));
- }
- return dialect;
- }
-
-
-
-
-
- /**
- * 递归解析SQLQuerySource ,拆分为数据库执行SQL与引擎执行的SQL source
- *
- * @param sqlQuerySource SQL解析对象
- */
- public static void convertQueryRecursive(QueryProfile param, SQLQuerySource sqlQuerySource) {
- SQLQuerySource engineQuerySource = null;
- SQLQuerySource dbQuerySource = null;
-
- SQLQuerySource indexUpSqlQuerySource = null;
- SQLQuerySource indexSqlQuerySource = sqlQuerySource;
- for (int i = 0; i < MAX_PARSER_LEVEL; i++) {
- if (indexSqlQuerySource.getUdfSet().size() > 0) {
- dbQuerySource = indexSqlQuerySource;
- break;
- }
- if (StringUtil.isEmpty(indexSqlQuerySource.getSubSelect())) {
- dbQuerySource = sqlQuerySource;
- indexUpSqlQuerySource = null;
- break;
- } else {
- indexUpSqlQuerySource = indexSqlQuerySource;
- indexSqlQuerySource = indexSqlQuerySource.getSubSqlQuerySources().get(0);
- if (indexSqlQuerySource.getUdfSet().size() > 0) {
- dbQuerySource = indexSqlQuerySource;
- break;
- }
- }
- }
-
- if (StringUtil.isNotEmpty(indexUpSqlQuerySource)) {
- engineQuerySource = new SQLQuerySource();
- String tableName = StringUtil.isEmpty(indexUpSqlQuerySource.getSubSelect().getAlias()) ? indexUpSqlQuerySource.getTableNames().get(0) : indexUpSqlQuerySource.getSubSelect().getAlias().getName();
- String replace = sqlQuerySource.getSqlBody().replace(indexUpSqlQuerySource.getSubSelect().toString(), tableName);
- engineQuerySource.setSqlBody(replace);
- engineQuerySource.getTableNames().add(0,tableName);
- engineQuerySource.setLimit(sqlQuerySource.getLimit());
- }
-
- param.setDbQuerySource(dbQuerySource);
- param.setEngineQuerySource(engineQuerySource);
- }
-
-
- /**
- * 解析SQL
- *
- * @param sql
- * @return
- */
- public static SQLQuerySource parserSQLByAst(String sql) {
-
-
- SQLQuerySource sqlQuerySource = new SQLQuerySource();
- try {
-
- Statement statement = CCJSqlParserUtil.parse(sql);
- Select selectStatement = null;
- if (statement instanceof Select) {
- selectStatement = (Select) statement;
- } else if (statement instanceof ExplainStatement) {
- selectStatement = ((ExplainStatement) statement).getStatement();
- } else {
- log.error("Not support DML Parser");
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), QGWMessageConst.SQL_PARSE_ONLY_SUPPORT_SELECT));
- }
-
- sqlQuerySource.setSqlBody(String.valueOf(selectStatement.getSelectBody()));
- TablesNamesFinder tablesNamesFinder = new TablesNamesFinder();
- List<String> tableList = tablesNamesFinder.getTableList(selectStatement);
- tableList.forEach(o -> {
- int i = o.lastIndexOf(".");
- sqlQuerySource.getTableNames().add(i < 0 ? o : o.substring(i + 1));
- });
- sqlQuerySource.setPartitionKey(metadataService.getPartitionKey(sqlQuerySource.getTableNames().get(0)));
- SelectBody body = selectStatement.getSelectBody();
- if (body instanceof PlainSelect) { //单条查询
-
- PlainSelect select = (PlainSelect) body;
-
- FromItem fromItem = select.getFromItem();
- sqlQuerySource.setFromItem(fromItem);
- sqlQuerySource.setJoins(select.getJoins());
- Expression where = select.getWhere();
-
- if (where != null) {
- sqlQuerySource.setWhereExpression(where);
- }
-
- List<SelectItem> selectItemsList = select.getSelectItems();
- sqlQuerySource.setSelectItems(selectItemsList);
-
- for (SelectItem item : selectItemsList) {
- if (item instanceof SelectExpressionItem) {
- SelectExpressionItem expressionItem = ((SelectExpressionItem) item);
- if (StringUtil.isNotEmpty(expressionItem.getAlias())) {
- String aliasName = expressionItem.getAlias().getName();
- if ((aliasName.startsWith("\"") && aliasName.endsWith("\""))
- || ((aliasName.startsWith("`") && aliasName.endsWith("`")))) {
- aliasName = aliasName.substring(1, aliasName.length() - 1);
- }
- sqlQuerySource.getAliasFields().put(aliasName, expressionItem.getExpression().toString());
- } else {
- sqlQuerySource.getAliasFields().put(expressionItem.getExpression().toString(), expressionItem.getExpression().toString());
- }
- addUDFSet(sqlQuerySource.getUdfSet(), expressionItem.getExpression());//伪代码:后期需要在SQLQuerySource where对象中提取
-
- }
- }
-
- GroupByElement groupBy;
- if (StringUtil.isNotEmpty(groupBy = select.getGroupBy())) {
- sqlQuerySource.setGroupByElement(groupBy);
- }
- sqlQuerySource.setOrderByElements(select.getOrderByElements());
- FromItem subItem = select.getFromItem();
-
- if (subItem instanceof SubSelect) {
- SubSelect subSelect = (SubSelect) subItem;
- log.debug("存在子查询为: {}", subSelect);
- sqlQuerySource.setSubSelect(subSelect);
- sqlQuerySource.getSubSqlQuerySources().add(0, parserSQLByAst(String.valueOf(subSelect.getSelectBody())));
- }
-
-
- Limit limit = select.getLimit();
-
-
- if (limit != null) {
-
- if (StringUtil.isNotEmpty(limit.getOffset())) {
- sqlQuerySource.setLimit(StringUtil.setDefaultIfEmpty(limit.getOffset(), 0) + "," + limit.getRowCount());
- } else {
- sqlQuerySource.setLimit(String.valueOf(limit.getRowCount()));
- }
- }
-
-
- } else if (body instanceof SetOperationList) { // 连接查询
- SetOperationList setOperationList = (SetOperationList) body;
- List<SelectBody> selects = setOperationList.getSelects();
- //暂时只解析第一个结构,不接受不相同的where
- if (StringUtil.isNotEmpty(selects)) {
- SQLQuerySource parseSql = parserSQLByAst(selects.get(0).toString());
- sqlQuerySource.setUdfSet(parseSql.getUdfSet());
- sqlQuerySource.setWhereExpression(parseSql.getWhereExpression());
- sqlQuerySource.setGroupByElement(parseSql.getGroupByElement());
- sqlQuerySource.setAliasFields(parseSql.getAliasFields());
- }
- sqlQuerySource.setLimit(SQLHelper.INVALID_LIMIT_DESC);
- } else { //其它暂不支持
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), QGWMessageConst.SQL_PARSE_ONLY_SUPPORT_SELECT));
- }
-
-
- } catch (JSQLParserException | RuntimeException e) {
- log.error("sqlParser error: {}", e);
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(),e.getMessage()));
-
- }
-
- return sqlQuerySource;
- }
-
-
- /**
- * 自定义函数封装
- *
- * @param udfSet
- * @param expr
- */
- private static void addUDFSet(Set<UDF> udfSet, Expression expr) {
- parserExpressionForFun(udfSet, expr);
- }
-
- private static void parserExpressionForFun(Set<UDF> udfSet, Expression expr) {
- if (expr instanceof Function) {
- Function fun = (Function) expr;
- if (SQLFunctionUtil.functions.keySet().contains(fun.getName().toUpperCase())) {
- List<Expression> expressions = fun.getParameters().getExpressions();
- UDF udf = getUDF(fun, expressions);
- udfSet.add(udf);
- }
- if (fun.getParameters() == null) {
- return;
- }
- for (Expression expression : fun.getParameters().getExpressions()) {
- parserExpressionForFun(udfSet, expression);
- }
- } else if (expr instanceof BinaryExpression) {
- BinaryExpression binary = (BinaryExpression) expr;
- Expression leftExpression = binary.getLeftExpression();
- parserExpressionForFun(udfSet, leftExpression);
- Expression rightExpression = binary.getRightExpression();
- parserExpressionForFun(udfSet, rightExpression);
- }
- }
-
- /**
- * 获取自定义函数类
- *
- * @param fun
- * @param expressions
- * @return
- */
- private static UDF getUDF(Function fun, List<Expression> expressions) {
- UDF udf;
- String funName = CaseFormat.UPPER_UNDERSCORE.to(CaseFormat.UPPER_UNDERSCORE, fun.getName());
- try {
- Class dialectClazz = Class.forName("com.mesalab.qgw.model.basic.udf." + CaseFormat.UPPER_UNDERSCORE.to(CaseFormat.UPPER_UNDERSCORE, fun.getName().toUpperCase()));
- Constructor constructor = dialectClazz.getConstructor(UDFElements.class);
- UDFElements udfElements = new UDFElements(funName, expressions);
- udf = (UDF) constructor.newInstance(udfElements);
- } catch (ReflectiveOperationException | RuntimeException e) {
- log.error("Custom function conversion instance exception:{}",e);
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(),QGWMessageConst.CUSTOM_FUNCTION_CONVERSION_INSTANCE_EXCEPTION));
- }
- return udf;
- }
-
-
-
- /**
- * 执行describe语句
- *
- * @param param
- * @param statement
- * @return
- */
- private BaseResult executeDescStatement(QueryProfile param, DescribeStatement statement) {
- String tableName = statement.getTable().getName();
- String dbType = param.getDbType();
- if (DBTypeEnum.CLICKHOUSE.getValue().equalsIgnoreCase(dbType)) {
- param.setDbType(DBTypeEnum.CLICKHOUSE.getValue());
- param.setDialectDBType(DBTypeEnum.CLICKHOUSE.getValue());
- } else if (DBTypeEnum.DRUID.getValue().equalsIgnoreCase(dbType)) {
- param.setDbType(DBTypeEnum.DRUID.getValue());
- param.setDialectDBType(DBTypeEnum.DRUID.getValue());
- param.setQuery("SELECT COLUMN_NAME as name, DATA_TYPE as type FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '" + tableName + "'");
- } else if (DBTypeEnum.HBASE.getValue().equalsIgnoreCase(dbType)) {
- param.setDbType(DBTypeEnum.HBASE.getValue());
- param.setDialectDBType(DBTypeEnum.HBASE.getValue());
- param.setQuery(StrFormatter.format("SELECT COLUMN_NAME as \"name\", DATA_TYPE as \"type\" FROM SYSTEM.CATALOG WHERE TABLE_NAME= '{}' AND COLUMN_NAME IS NOT NULL", tableName));
- } else {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(),
- tableName + ":" + QGWMessageConst.TABLE_NOT_EXIST));
- }
-
- param.setFormat(QueryFormatEnum.JSON.getValue());
- SQLQuerySource dbQuerySource = new SQLQuerySource();
- dbQuerySource.setSqlBody(param.getQuery());
- param.setDbQuerySource(dbQuerySource);
- Dialect dialect = getReflectiveDialectObject(param);
- BaseResult baseResult = dataEncapsulationOfSchema(dialect.executeAdministrativeQuery(), param);
-
- if (!baseResult.isSuccess()) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),QGWMessageConst.CHECK_TABLE));
- }
- return baseResult;
- }
-
- private BaseResult executeExplainStatement(QueryProfile param) {
- return getDBDialect(param).executeExplainPlan();
- }
-
- /**
- * 执行show语句
- *
- * @param param
- * @return
- */
- private BaseResult executeShowStatement(QueryProfile param) {
- return getAllDatabaseTables(param);
- }
-
-
- /**
- * 获取所有表信息
- *
- * @return
- */
- private BaseResult getAllDatabaseTables(QueryProfile queryProfile) {
- Map<String, Object> dataMap = new LinkedHashMap<>();
- dataMap.put("type", "enum");
- Arrays.stream(DBTypeEnum.values()).forEach(dbTypeEnum -> {
- QueryProfile dbParam = new QueryProfile();
- if (dbTypeEnum.getValue().equalsIgnoreCase(DBTypeEnum.CLICKHOUSE.getValue())) {
- dbParam.setQuery("show tables");
- dbParam.setDbType(DBTypeEnum.CLICKHOUSE.getValue());
- dbParam.setDialectDBType(SQLHelper.getDialectDBType(DBTypeEnum.CLICKHOUSE.getValue()));
- } else if (dbTypeEnum.getValue().equalsIgnoreCase(DBTypeEnum.DRUID.getValue())) {
- dbParam.setQuery("SELECT TABLE_NAME AS name FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE = 'TABLE'");
- dbParam.setDbType(DBTypeEnum.DRUID.getValue());
- dbParam.setDialectDBType(SQLHelper.getDialectDBType(DBTypeEnum.DRUID.getValue()));
- } else {
- return;
- }
- dbParam.setFormat(QueryFormatEnum.JSON.getValue());
- Dialect dialect = getReflectiveDialectObject(dbParam);
- BaseResult queryResult = dialect.executeAdministrativeQuery();
- if (queryResult.getStatus() == null || queryResult.getStatus() != ResultStatusEnum.SUCCESS.getCode()) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),ResultCodeEnum.UNKNOWN_EXCEPTION.getCode() , queryResult.getMessage());
- }
- List tables = (ArrayList) queryResult.getData();
- List<String> list = new ArrayList<>();
- Iterator iterator = tables.iterator();
- while (iterator.hasNext()) {
- Map<String, String> next = (Map) iterator.next();
- list.add(next.get("name"));
- }
- BaseResult result = encapsulationShowTables(list, dbParam.getDbType());
-
- if (result.getStatus() != null && result.getStatus() == ResultStatusEnum.SUCCESS.getCode()) {
- Map dbResultMap = (Map) result.getData();
-
- if (StringUtil.isEmpty(dataMap.get("name"))) {
- dataMap.put("name", dbResultMap.get("name"));
- } else {
- dataMap.put("name", Joiner.on(",").join(dataMap.get("name"),dbResultMap.get("name")));
- }
-
- if (StringUtil.isEmpty(dataMap.get("symbols"))) {
- dataMap.put("symbols", dbResultMap.get("symbols"));
- } else {
- dataMap.put("symbols", Lists.newArrayList(dataMap.get("symbols"), dbResultMap.get("symbols")));
- }
- }
-
-
- });
-
- return BaseResultGenerator.success("ok", dataMap);
- }
-
-
- /**
- * 表描述信息结果封装
- *
- * @param result
- * @param param
- * @return
- */
- public BaseResult dataEncapsulationOfSchema(BaseResult result, QueryProfile param) {
- SchemaBase schema = new SchemaBase();
- schema.setName(SQLHelper.getTableName(param.getQuery()).get(0));
- List<Map> fields = new ArrayList<>();
- List<Map> list = (List<Map>) result.getData();
- for (Map resultMap : list) {
- Map schemaMap = Maps.newHashMap();
- schemaMap.put("name", resultMap.get("name"));
- schemaMap.put("type", resultMap.get("type"));
- fields.add(schemaMap);
- }
- schema.setFields(fields);
- result.setData(schema);
- return result;
- }
-
- /**
- * 库中包含表表名查询结果处理
- *
- * @param tables
- * @return
- */
- private BaseResult encapsulationShowTables(List tables, String database) {
- List<String> list = new ArrayList<>();
- Map<String, Object> date = new LinkedHashMap<>();
- date.put("type", "enum");
- date.put("name", database);
- Iterator iterator = tables.iterator();
- while (iterator.hasNext()) {
- list.add(String.valueOf(iterator.next()));
- }
- date.put("symbols", list);
- BaseResult<Map> baseResult = BaseResultGenerator.success("ok", null);
- baseResult.setData(date);
- return baseResult;
- }
-
-}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/SQLSyncQueryServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/SQLSyncQueryServiceImpl.java
new file mode 100644
index 00000000..0ba15064
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/impl/SQLSyncQueryServiceImpl.java
@@ -0,0 +1,666 @@
+package com.mesalab.qgw.service.impl;
+
+import cn.hutool.core.exceptions.ExceptionUtil;
+import cn.hutool.core.text.StrFormatter;
+import cn.hutool.core.util.BooleanUtil;
+import cn.hutool.core.util.StrUtil;
+import com.google.common.base.CaseFormat;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Maps;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.entity.BaseResultGenerator;
+import com.mesalab.common.entity.SchemaBase;
+import com.mesalab.common.utils.sqlparser.AutoPeriodHelper;
+import com.mesalab.qgw.constant.QGWMessageConst;
+import com.mesalab.common.enums.*;
+import com.mesalab.common.utils.sqlparser.SQLFunctionUtil;
+import com.mesalab.common.utils.sqlparser.SQLHelper;
+import com.mesalab.common.utils.sqlparser.SQLSyntaxParserUtil;
+import com.mesalab.qgw.dialect.Dialect;
+import com.mesalab.qgw.dialect.FederationDialect;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.*;
+import com.mesalab.qgw.model.basic.udf.ROLLUP;
+import com.mesalab.qgw.model.basic.udf.TIME_FLOOR_WITH_FILL;
+import com.mesalab.qgw.model.basic.udf.UDF;
+import com.mesalab.qgw.model.basic.udf.UDFElements;
+import com.mesalab.qgw.service.SQLSyncQueryService;
+import com.mesalab.qgw.service.DatabaseService;
+import com.geedgenetworks.utils.StringUtil;
+import lombok.extern.slf4j.Slf4j;
+import net.sf.jsqlparser.JSQLParserException;
+import net.sf.jsqlparser.expression.BinaryExpression;
+import net.sf.jsqlparser.expression.Expression;
+import net.sf.jsqlparser.expression.Function;
+import net.sf.jsqlparser.parser.CCJSqlParserUtil;
+import net.sf.jsqlparser.statement.DescribeStatement;
+import net.sf.jsqlparser.statement.ExplainStatement;
+import net.sf.jsqlparser.statement.ShowStatement;
+import net.sf.jsqlparser.statement.Statement;
+import net.sf.jsqlparser.statement.select.*;
+import net.sf.jsqlparser.util.TablesNamesFinder;
+import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import java.lang.reflect.Constructor;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+@Service("sqlSyncQueryService")
+@Slf4j
+public class SQLSyncQueryServiceImpl implements SQLSyncQueryService {
+ private DatabaseService databaseService;
+
+ private final int MAX_PARSER_LEVEL = 5;
+ private final String PARSER_EXCEPTION_CLASS_NAME = "net.sf.jsqlparser.parser.ParseException:";
+ private final String PARSER_EXCEPTION_IDENTIFICATION = "Was expecting one of:";
+ private final Pattern PATTERN_ANGLE_BRACKET = Pattern.compile("<(.*?)>", Pattern.CASE_INSENSITIVE);
+
+
+ @Override
+ public BaseResult executeQuery(SQLQueryContext queryContext) {
+ checkQueryContextParam(queryContext);
+ parseAndSetConnectorType(queryContext);
+ Statement statement;
+ try {
+ statement = CCJSqlParserUtil.parse(queryContext.getOriginalSQL());
+ } catch (JSQLParserException e) {
+ log.error("Error Parsing SQL: {}", ExceptionUtil.getRootCauseMessage(e));
+ String message = getJSQLParserSimpleErrorMessage(e);
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), message));
+ }
+ BaseResult baseResult;
+ log.debug("Request SQL Statement: {}", queryContext.getOriginalSQL());
+ queryContext.setOriginalSQL(AutoPeriodHelper.buildSqlGranularity(statement).toString());
+ if (statement instanceof Select) {
+ baseResult = executeSelectStatement(queryContext);
+ } else if (statement instanceof DescribeStatement) {
+ DescribeStatement describeStatement = (DescribeStatement) statement;
+ baseResult = executeDescStatement(queryContext, describeStatement);
+ } else if (statement instanceof ShowStatement) {
+ baseResult = executeShowStatement(queryContext);
+ } else if (statement instanceof ExplainStatement) {
+ baseResult = executeExplainStatement(queryContext);
+ } else {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), QGWMessageConst.ONLY_SUPPORT_STATEMENT_OPERATIONS_FOR_SELECT));
+ }
+ return baseResult;
+ }
+
+ private void parseAndSetConnectorType(SQLQueryContext queryContext) {
+ if (SQLHelper.getTableName(queryContext.getOriginalSQL()).isEmpty()) {
+ return;
+ }
+ String tableName = SQLHelper.getTableName(queryContext.getOriginalSQL()).get(0);
+ queryContext.setDbEngine(databaseService.getDBEngineByTableName(tableName));
+ queryContext.setSqlDialect(SQLHelper.getDialectType(queryContext.getDbEngine()));
+ }
+
+ private void checkQueryContextParam(SQLQueryContext queryContext) {
+ if (StringUtil.isBlank(queryContext.getOption())) {
+ queryContext.setOption(QueryOption.REAL_TIME.getValue());
+ }
+ if (StringUtil.isBlank(queryContext.getFormat())) {
+ queryContext.setFormat(OutputMode.JSON.getValue());
+ }
+ if (StringUtil.isBlank(queryContext.getOriginalSQL())) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.SQL_MUST_BE_EXISTS));
+ }
+ if (Arrays.stream(QueryOption.values()).noneMatch(o -> o.getValue().equalsIgnoreCase(queryContext.getOption()))) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.OPTION_TYPE_ERROR));
+ }
+ if (Arrays.stream(OutputMode.values()).noneMatch(o -> o.getValue().equalsIgnoreCase(queryContext.getFormat()))) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.FORMAT_TYPE_ERROR));
+ }
+ }
+
+
+ /**
+ * Desc: 简化JSQLParser语法异常信息
+ *
+ * @param e
+ * @return {@link String}
+ * @created by wWei
+ * @date 2021/4/26 9:49 上午
+ */
+ private String getJSQLParserSimpleErrorMessage(Throwable e) {
+ String message = Joiner.on(" ").skipNulls().join(ExceptionUtil.getSimpleMessage(e), ExceptionUtil.getRootCauseMessage(e));
+ message = message.replace(PARSER_EXCEPTION_CLASS_NAME, StringUtil.EMPTY);
+ int end = message.indexOf(PARSER_EXCEPTION_IDENTIFICATION);
+ message = message.substring(0, end < 0 ? message.length() : end);
+ message = StrUtil.removeAllLineBreaks(message);
+ Matcher matcher = PATTERN_ANGLE_BRACKET.matcher(message);
+ if (matcher.find()) {
+ message = matcher.replaceAll(StringUtil.EMPTY);
+ }
+ String[] split = message.split(" ");
+ StringBuilder stringBuilder = new StringBuilder(message.length());
+ for (String c : split) {
+ if (StringUtil.isBlank(c)) {
+ continue;
+ }
+ stringBuilder.append(" ").append(c);
+ }
+ return stringBuilder.toString().trim();
+ }
+
+ /**
+ * 1.设置默认queryID
+ * 2.为Select操作选择不同的执行动作,包含SQL解析(Syntax-parse)、SQL验证(Syntax-validation)、实时查询(Real-time)、长查询(Long-term)
+ *
+ * @param queryContext 请求参数
+ * @return BaseResult 执行结果
+ */
+ private BaseResult executeSelectStatement(SQLQueryContext queryContext) {
+ if (QueryOption.SYNTAX_PARSE.getValue().equalsIgnoreCase(queryContext.getOption())) {
+ return executeSyntaxParse(queryContext);
+ } else if (QueryOption.SYNTAX_VALIDATION.getValue().equalsIgnoreCase(queryContext.getOption())) {
+ return executeSyntaxValidation(queryContext);
+ } else if (QueryOption.LONG_TERM.getValue().equalsIgnoreCase(queryContext.getOption())) {
+ queryContext.setQueryId(databaseService.getCustomQueryId(queryContext.getResultId(), queryContext.getOriginalSQL()));
+ return executeLongTermQuery(queryContext);
+ } else {
+ return executeRealTimeQuery(queryContext);
+ }
+
+ }
+
+ private BaseResult executeSyntaxParse(SQLQueryContext param) {
+ return BaseResultGenerator.success(SQLSyntaxParserUtil.syntaxParse(param.getOriginalSQL()));
+ }
+
+ private BaseResult executeSyntaxValidation(SQLQueryContext queryContext) {
+ return new FederationDialect(queryContext, getDialect(queryContext).executeSyntaxValidation()).executeSyntaxValidation();
+ }
+
+ private BaseResult executeLongTermQuery(SQLQueryContext param) {
+ return new FederationDialect(param, getDialect(param).executeQuery()).executeQuery();
+ }
+
+
+ private BaseResult executeRealTimeQuery(SQLQueryContext queryContext) {
+ if (BooleanUtil.isTrue(queryContext.getSampled())) {
+ return new FederationDialect(queryContext, getDialect(queryContext).executeSampleQuery()).executeSampleQuery();
+ } else {
+ return new FederationDialect(queryContext, getDialect(queryContext).executeQuery()).executeQuery();
+ }
+ }
+
+
+ private Dialect getDialect(SQLQueryContext queryContext) {
+ buildSelectStatement(queryContext, parserSQLByAst(queryContext.getOriginalSQL()));
+ return getReflectiveDialectObject(queryContext);
+ }
+
+
+ private Dialect getReflectiveDialectObject(SQLQueryContext queryContext) {
+ Dialect dialect;
+ try {
+ Class dialectClazz = Class.forName("com.mesalab.qgw.dialect." +
+ CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_CAMEL, queryContext.getDbEngine()) + "Dialect");
+ Constructor constructor = dialectClazz.getConstructor(SQLQueryContext.class);
+ dialect = (Dialect) constructor.newInstance(queryContext);
+ } catch (ReflectiveOperationException | RuntimeException e) {
+ log.error("Dialect conversion instance exception:{}", e);
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage()));
+ }
+ return dialect;
+ }
+
+ /**
+ * Parse a nested SQL statement into dbSelectStatement and FederationStatement object
+ *
+ * @param sqlQueryContext query context
+ * @param originalSelectStatement original SQL statement
+ */
+ public void buildSelectStatement(SQLQueryContext sqlQueryContext, SelectStatement originalSelectStatement) {
+ SelectStatement federationSelectStatement = null;
+ SelectStatement dbSelectStatement = null;
+ SelectStatement parentSelectStatement = null;
+ SelectStatement cursorSelectStatement = originalSelectStatement;
+ for (int i = 0; i < MAX_PARSER_LEVEL; i++) {
+ if (hasUDF(cursorSelectStatement)) {
+ dbSelectStatement = cursorSelectStatement;
+ break;
+ }
+
+ if (isEmptySubSelect(cursorSelectStatement)) {
+ dbSelectStatement = originalSelectStatement;
+ parentSelectStatement = null;
+ break;
+ } else {
+ parentSelectStatement = cursorSelectStatement;
+ cursorSelectStatement = cursorSelectStatement.getSubSqlQuerySources().get(0);
+ if (hasUDF(cursorSelectStatement)) {
+ dbSelectStatement = cursorSelectStatement;
+ break;
+ }
+ }
+ }
+
+ if (StringUtil.isNotEmpty(parentSelectStatement)) {
+ federationSelectStatement = new SelectStatement();
+ String tableName = StringUtil.isEmpty(parentSelectStatement.getSubSelect().getAlias()) ? parentSelectStatement.getTableNames().get(0) : parentSelectStatement.getSubSelect().getAlias().getName();
+ String replace = originalSelectStatement.getSqlBody().replace(parentSelectStatement.getSubSelect().toString(), tableName);
+ federationSelectStatement.setSqlBody(replace);
+ federationSelectStatement.getTableNames().add(0, tableName);
+ federationSelectStatement.setLimit(originalSelectStatement.getLimit());
+ }
+
+ sqlQueryContext.setDbSelectStatement(dbSelectStatement);
+ sqlQueryContext.setFederationSelectStatement(federationSelectStatement);
+ }
+
+ private boolean hasUDF(SelectStatement selectStatement) {
+ return selectStatement != null && selectStatement.getUdfSet() != null && !selectStatement.getUdfSet().isEmpty();
+ }
+
+ private boolean isEmptySubSelect(SelectStatement selectStatement) {
+ return selectStatement == null || StringUtil.isEmpty(selectStatement.getSubSelect());
+ }
+
+
+ /**
+ * 解析SQL
+ *
+ * @param sql
+ * @return
+ */
+ private SelectStatement parserSQLByAst(String sql) {
+ SelectStatement sqlQuerySource = new SelectStatement();
+ try {
+
+ Statement statement = CCJSqlParserUtil.parse(sql);
+ Select selectStatement = null;
+ if (statement instanceof Select) {
+ selectStatement = (Select) statement;
+ } else if (statement instanceof ExplainStatement) {
+ selectStatement = ((ExplainStatement) statement).getStatement();
+ } else {
+ log.error("Not support DML Parser");
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), QGWMessageConst.SQL_PARSE_ONLY_SUPPORT_SELECT));
+ }
+
+ sqlQuerySource.setSqlBody(String.valueOf(selectStatement.getSelectBody()));
+ List<String> tableList = SQLHelper.getTableName(selectStatement);
+ tableList.forEach(o -> {
+ sqlQuerySource.getTableNames().add(o);
+ });
+ sqlQuerySource.setPartitionKey(databaseService.getPartitionKey(sqlQuerySource.getTableNames().get(0)));
+ SelectBody body = selectStatement.getSelectBody();
+ if (body instanceof PlainSelect) { //单条查询
+
+ PlainSelect select = (PlainSelect) body;
+
+ FromItem fromItem = select.getFromItem();
+ sqlQuerySource.setFromItem(fromItem);
+ sqlQuerySource.setJoins(select.getJoins());
+ Expression where = select.getWhere();
+
+ if (where != null) {
+ sqlQuerySource.setWhereExpression(where);
+ }
+
+ List<SelectItem> selectItemsList = select.getSelectItems();
+ sqlQuerySource.setSelectItems(selectItemsList);
+
+ for (SelectItem item : selectItemsList) {
+ if (item instanceof SelectExpressionItem) {
+ SelectExpressionItem expressionItem = ((SelectExpressionItem) item);
+ if (StringUtil.isNotEmpty(expressionItem.getAlias())) {
+ String aliasName = expressionItem.getAlias().getName();
+ aliasName = SQLHelper.removeQuotesAndBackticks(aliasName);
+ sqlQuerySource.getAliasFields().put(aliasName, expressionItem.getExpression().toString());
+ } else {
+ sqlQuerySource.getAliasFields().put(expressionItem.getExpression().toString(), expressionItem.getExpression().toString());
+ }
+ addUDFSet(sqlQuerySource.getUdfSet(), expressionItem.getExpression());//伪代码:后期需要在SQLQuerySource where对象中提取
+
+ }
+ }
+
+ GroupByElement groupBy;
+ if (StringUtil.isNotEmpty(groupBy = select.getGroupBy())) {
+ sqlQuerySource.setGroupByElement(groupBy);
+ if (isDrilldown(select, sqlQuerySource.getAliasFields())) {
+ addRollUpToUDF(sqlQuerySource.getUdfSet(), (Function) groupBy.getGroupByExpressionList().getExpressions().get(0));
+ sqlQuerySource.getUdfSet().removeIf(udf -> udf instanceof TIME_FLOOR_WITH_FILL);
+ }
+ }
+ sqlQuerySource.setOrderByElements(select.getOrderByElements());
+ FromItem subItem = select.getFromItem();
+
+ if (subItem instanceof SubSelect) {
+ SubSelect subSelect = (SubSelect) subItem;
+ sqlQuerySource.setSubSelect(subSelect);
+ sqlQuerySource.getSubSqlQuerySources().add(0, parserSQLByAst(String.valueOf(subSelect.getSelectBody())));
+ }
+ Limit limit = select.getLimit();
+ if (limit != null) {
+
+ if (StringUtil.isNotEmpty(limit.getOffset())) {
+ sqlQuerySource.setLimit(StringUtil.setDefaultIfEmpty(limit.getOffset(), 0) + "," + limit.getRowCount());
+ } else {
+ sqlQuerySource.setLimit(String.valueOf(limit.getRowCount()));
+ }
+ }
+
+
+ } else if (body instanceof SetOperationList) { // 连接查询
+ SetOperationList setOperationList = (SetOperationList) body;
+ List<SelectBody> selects = setOperationList.getSelects();
+ //暂时只解析第一个结构,不接受不相同的where
+ if (StringUtil.isNotEmpty(selects)) {
+ SelectStatement parseSql = parserSQLByAst(selects.get(0).toString());
+ sqlQuerySource.setUdfSet(parseSql.getUdfSet());
+ sqlQuerySource.setWhereExpression(parseSql.getWhereExpression());
+ sqlQuerySource.setGroupByElement(parseSql.getGroupByElement());
+ sqlQuerySource.setAliasFields(parseSql.getAliasFields());
+ }
+ sqlQuerySource.setLimit(SQLHelper.INVALID_LIMIT_DESC);
+ } else { //其它暂不支持
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), QGWMessageConst.SQL_PARSE_ONLY_SUPPORT_SELECT));
+ }
+
+
+ } catch (JSQLParserException | RuntimeException e) {
+ log.error("sqlParser error: {}", e);
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage()));
+
+ }
+
+ return sqlQuerySource;
+ }
+
+ private static boolean isDrilldown(PlainSelect select, Map<String, String> aliasFields) {
+ if (select.getGroupBy() == null) {
+ return false;
+ }
+ List<Expression> groupByExpressionsList = select.getGroupBy().getGroupByExpressionList().getExpressions();
+ if (groupByExpressionsList.size() != 1) {
+ return false;
+ }
+ Expression groupByExpression = groupByExpressionsList.get(0);
+ if (!(groupByExpression instanceof Function)) {
+ return false;
+ }
+ Function groupByFunction = (Function) groupByExpression;
+ if (!groupByFunction.getName().equalsIgnoreCase(SQLFunctionUtil.ROLLUP) || groupByFunction.getParameters() == null || groupByFunction.getParameters().getExpressions().size() != 2) {
+ return false;
+ }
+ if (select.getOrderByElements() == null || select.getOrderByElements().size() != 1 || select.getOrderByElements().get(0).isAsc()) {
+ return false;
+ }
+ Expression orderByExpr = select.getOrderByElements().get(0).getExpression();
+ if (orderByExpr instanceof Function) {
+ Function orderByFunction = (Function) orderByExpr;
+ if (orderByFunction.isDistinct()) {
+ return false;
+ }
+ if (!"count".equalsIgnoreCase(orderByFunction.getName()) && !"sum".equalsIgnoreCase(orderByFunction.getName())) {
+ return false;
+ }
+ } else {
+ String orderByKey = SQLHelper.removeQuotesAndBackticks(orderByExpr.toString());
+ if (!aliasFields.containsKey(orderByKey)) {
+ return false;
+ }
+ String orderByExprTmp = String.valueOf(aliasFields.get(orderByKey)).toLowerCase();
+ if (orderByExprTmp.startsWith("count(distinct")) {
+ return false;
+ }
+ if (!orderByExprTmp.startsWith("count(") && !orderByExprTmp.startsWith("sum(")) {
+ return false;
+ }
+ }
+ List<Expression> expressions = groupByFunction.getParameters().getExpressions();
+ for (Expression expression : expressions) {
+ String rollupArg = SQLHelper.removeQuotesAndBackticks(expression.toString());
+ if (!aliasFields.containsKey(rollupArg) && !aliasFields.containsValue(rollupArg)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private static void addRollUpToUDF(Set<UDF> udfSet, Function fun) {
+ List<Expression> expressions = fun.getParameters().getExpressions();
+ UDFElements udfElements = new UDFElements(fun.getName(), expressions);
+ ROLLUP rollup = new ROLLUP(udfElements);
+ udfSet.add(rollup);
+ }
+
+ /**
+ * 自定义函数封装
+ *
+ * @param udfSet
+ * @param expr
+ */
+ private static void addUDFSet(Set<UDF> udfSet, Expression expr) {
+ parserExpressionForFun(udfSet, expr);
+ }
+
+ private static void parserExpressionForFun(Set<UDF> udfSet, Expression expr) {
+ if (expr instanceof Function) {
+ Function fun = (Function) expr;
+ if (SQLFunctionUtil.federationUDFFunctions.containsKey(fun.getName().toUpperCase())) {
+ List<Expression> expressions = fun.getParameters().getExpressions();
+ UDF udf = getUDF(fun, expressions);
+ udfSet.add(udf);
+ }
+ if (fun.getParameters() == null) {
+ return;
+ }
+ for (Expression expression : fun.getParameters().getExpressions()) {
+ parserExpressionForFun(udfSet, expression);
+ }
+ } else if (expr instanceof BinaryExpression) {
+ BinaryExpression binary = (BinaryExpression) expr;
+ Expression leftExpression = binary.getLeftExpression();
+ parserExpressionForFun(udfSet, leftExpression);
+ Expression rightExpression = binary.getRightExpression();
+ parserExpressionForFun(udfSet, rightExpression);
+ }
+ }
+
+ /**
+ * 获取自定义函数类
+ *
+ * @param fun
+ * @param expressions
+ * @return
+ */
+ private static UDF getUDF(Function fun, List<Expression> expressions) {
+ UDF udf;
+ String funName = CaseFormat.UPPER_UNDERSCORE.to(CaseFormat.UPPER_UNDERSCORE, fun.getName());
+ try {
+ Class dialectClazz = Class.forName("com.mesalab.qgw.model.basic.udf." + CaseFormat.UPPER_UNDERSCORE.to(CaseFormat.UPPER_UNDERSCORE, fun.getName().toUpperCase()));
+ Constructor constructor = dialectClazz.getConstructor(UDFElements.class);
+ UDFElements udfElements = new UDFElements(funName, expressions);
+ udf = (UDF) constructor.newInstance(udfElements);
+ } catch (ReflectiveOperationException | RuntimeException e) {
+ log.error("Custom function conversion instance exception:{}", e);
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), QGWMessageConst.CUSTOM_FUNCTION_CONVERSION_INSTANCE_EXCEPTION));
+ }
+ return udf;
+ }
+
+
+ /**
+ * 执行describe语句
+ *
+ * @param queryContext
+ * @param statement
+ * @return
+ */
+ private BaseResult executeDescStatement(SQLQueryContext queryContext, DescribeStatement statement) {
+ String tableName = statement.getTable().getName();
+ String dbEngineType = queryContext.getDbEngine();
+ if (DBEngineType.CLICKHOUSE.getValue().equalsIgnoreCase(dbEngineType)) {
+ queryContext.setDbEngine(DBEngineType.CLICKHOUSE.getValue());
+ queryContext.setSqlDialect(DBEngineType.CLICKHOUSE.getValue());
+ } else if (DBEngineType.DRUID.getValue().equalsIgnoreCase(dbEngineType)) {
+ queryContext.setDbEngine(DBEngineType.DRUID.getValue());
+ queryContext.setSqlDialect(DBEngineType.DRUID.getValue());
+ queryContext.setOriginalSQL("SELECT COLUMN_NAME as name, DATA_TYPE as type FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = '" + tableName + "'");
+ } else if (DBEngineType.HBASE.getValue().equalsIgnoreCase(dbEngineType)) {
+ queryContext.setDbEngine(DBEngineType.HBASE.getValue());
+ queryContext.setSqlDialect(DBEngineType.HBASE.getValue());
+ queryContext.setOriginalSQL(StrFormatter.format("SELECT COLUMN_NAME as \"name\", DATA_TYPE as \"type\" FROM SYSTEM.CATALOG WHERE TABLE_NAME= '{}' AND COLUMN_NAME IS NOT NULL", tableName));
+ } else {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(),
+ tableName + ":" + QGWMessageConst.TABLE_NOT_EXIST));
+ }
+
+ queryContext.setFormat(OutputMode.JSON.getValue());
+ SelectStatement dbSelectStatement = new SelectStatement();
+ dbSelectStatement.setSqlBody(queryContext.getOriginalSQL());
+ queryContext.setDbSelectStatement(dbSelectStatement);
+ Dialect dialect = getReflectiveDialectObject(queryContext);
+ BaseResult baseResult = dataEncapsulationOfSchema(dialect.executeAdministrativeQuery(), queryContext);
+
+ if (!baseResult.isSuccess()) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.CHECK_TABLE));
+ }
+ return baseResult;
+ }
+
+ private BaseResult executeExplainStatement(SQLQueryContext param) {
+ return getDialect(param).executeExplainPlan();
+ }
+
+ /**
+ * 执行show语句
+ *
+ * @param param
+ * @return
+ */
+ private BaseResult executeShowStatement(SQLQueryContext param) {
+ return getAllDatabaseTables(param);
+ }
+
+
+ /**
+ * 获取所有表信息
+ *
+ * @return
+ */
+ private BaseResult getAllDatabaseTables(SQLQueryContext queryContext) {
+ Map<String, Object> dataMap = new LinkedHashMap<>();
+ dataMap.put("type", "enum");
+ Arrays.stream(DBEngineType.values()).forEach(dbTypeEnum -> {
+ SQLQueryContext dbParam = new SQLQueryContext();
+ if (dbTypeEnum.getValue().equalsIgnoreCase(DBEngineType.CLICKHOUSE.getValue())) {
+ dbParam.setOriginalSQL("show tables");
+ dbParam.setDbEngine(DBEngineType.CLICKHOUSE.getValue());
+ dbParam.setSqlDialect(SQLHelper.getDialectType(DBEngineType.CLICKHOUSE.getValue()));
+ } else if (dbTypeEnum.getValue().equalsIgnoreCase(DBEngineType.DRUID.getValue())) {
+ dbParam.setOriginalSQL("SELECT TABLE_NAME AS name FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE = 'TABLE'");
+ dbParam.setDbEngine(DBEngineType.DRUID.getValue());
+ dbParam.setSqlDialect(SQLHelper.getDialectType(DBEngineType.DRUID.getValue()));
+ } else {
+ return;
+ }
+ dbParam.setFormat(OutputMode.JSON.getValue());
+ Dialect dialect = getReflectiveDialectObject(dbParam);
+ BaseResult queryResult = dialect.executeAdministrativeQuery();
+ if (queryResult.getStatus() == null || queryResult.getStatus() != HttpStatusCodeEnum.SUCCESS.getCode()) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), queryResult.getMessage());
+ }
+ List tables = (ArrayList) queryResult.getData();
+ List<String> list = new ArrayList<>();
+ Iterator iterator = tables.iterator();
+ while (iterator.hasNext()) {
+ Map<String, String> next = (Map) iterator.next();
+ list.add(next.get("name"));
+ }
+ BaseResult result = encapsulationShowTables(list, dbParam.getDbEngine());
+
+ if (result.getStatus() != null && result.getStatus() == HttpStatusCodeEnum.SUCCESS.getCode()) {
+ Map dbResultMap = (Map) result.getData();
+
+ if (StringUtil.isEmpty(dataMap.get("name"))) {
+ dataMap.put("name", dbResultMap.get("name"));
+ } else {
+ dataMap.put("name", Joiner.on(",").join(dataMap.get("name"), dbResultMap.get("name")));
+ }
+
+ if (StringUtil.isEmpty(dataMap.get("symbols"))) {
+ dataMap.put("symbols", dbResultMap.get("symbols"));
+ } else {
+ dataMap.put("symbols", Lists.newArrayList(dataMap.get("symbols"), dbResultMap.get("symbols")));
+ }
+ }
+
+
+ });
+
+ return BaseResultGenerator.success("ok", dataMap);
+ }
+
+
+ /**
+ * 表描述信息结果封装
+ *
+ * @param result
+ * @param param
+ * @return
+ */
+ public BaseResult dataEncapsulationOfSchema(BaseResult result, SQLQueryContext param) {
+ SchemaBase schema = new SchemaBase();
+ schema.setName(SQLHelper.getTableName(param.getOriginalSQL()).get(0));
+ List<Map> fields = new ArrayList<>();
+ List<Map> list = (List<Map>) result.getData();
+ for (Map resultMap : list) {
+ Map schemaMap = Maps.newHashMap();
+ schemaMap.put("name", resultMap.get("name"));
+ schemaMap.put("type", resultMap.get("type"));
+ fields.add(schemaMap);
+ }
+ schema.setFields(fields);
+ result.setData(schema);
+ return result;
+ }
+
+ /**
+ * 库中包含表表名查询结果处理
+ *
+ * @param tables
+ * @return
+ */
+ private BaseResult encapsulationShowTables(List tables, String database) {
+ List<String> list = new ArrayList<>();
+ Map<String, Object> date = new LinkedHashMap<>();
+ date.put("type", "enum");
+ date.put("name", database);
+ Iterator iterator = tables.iterator();
+ while (iterator.hasNext()) {
+ list.add(String.valueOf(iterator.next()));
+ }
+ date.put("symbols", list);
+ BaseResult<Map> baseResult = BaseResultGenerator.success("ok", null);
+ baseResult.setData(date);
+ return baseResult;
+ }
+
+ @Autowired
+ public void setDatabaseService(DatabaseService databaseService) {
+ this.databaseService = databaseService;
+ }
+
+}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/SystemServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/SystemServiceImpl.java
deleted file mode 100644
index 308ad06d..00000000
--- a/src/main/java/com/mesalab/qgw/service/impl/SystemServiceImpl.java
+++ /dev/null
@@ -1,650 +0,0 @@
-package com.mesalab.qgw.service.impl;
-
-import cn.hutool.core.collection.CollectionUtil;
-import cn.hutool.core.util.StrUtil;
-import cn.hutool.core.util.URLUtil;
-import cn.hutool.crypto.digest.DigestUtil;
-import cn.hutool.json.JSONUtil;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.alibaba.fastjson2.JSON;
-import com.google.common.base.CaseFormat;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.jayway.jsonpath.JsonPath;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.*;
-import com.mesalab.common.utils.sqlparser.SQLHelper;
-import com.mesalab.qgw.constant.QGWMessageConst;
-import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
-import com.mesalab.qgw.model.basic.ClickHouseHttpSource;
-import com.mesalab.qgw.model.basic.HttpConfig;
-import com.mesalab.qgw.dialect.Dialect;
-import com.mesalab.qgw.model.basic.QueryProfile;
-import com.mesalab.qgw.model.basic.JobAdminHttpSource;
-import com.mesalab.qgw.model.job.EncryptionInfo;
-import com.mesalab.qgw.model.job.ExecutorParam;
-import com.mesalab.qgw.model.job.StorageDeletionInfo;
-import com.mesalab.qgw.model.job.XxlJobInfo;
-import com.mesalab.qgw.service.QueryService;
-import com.mesalab.qgw.service.MetadataService;
-import com.mesalab.qgw.service.SystemService;
-import com.mesalab.services.common.property.SqlPropertySourceFactory;
-import com.geedgenetworks.utils.DateUtils;
-import com.geedgenetworks.utils.Encodes;
-import com.geedgenetworks.utils.StringUtil;
-import lombok.AllArgsConstructor;
-import lombok.Getter;
-import org.apache.http.NameValuePair;
-import org.apache.http.client.utils.URLEncodedUtils;
-import org.jasypt.util.text.BasicTextEncryptor;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.cglib.beans.BeanMap;
-import org.springframework.context.EnvironmentAware;
-import org.springframework.context.annotation.PropertySource;
-import org.springframework.core.env.Environment;
-import org.springframework.stereotype.Service;
-
-import javax.annotation.Resource;
-import java.lang.reflect.AccessibleObject;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.Field;
-import java.nio.charset.StandardCharsets;
-import java.util.*;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-@Service("systemService")
-@PropertySource(value = "classpath:http-sql-template.sql", factory = SqlPropertySourceFactory.class)
-public class SystemServiceImpl implements SystemService, EnvironmentAware {
- private static final Log log = LogFactory.get();
- private static Pattern pTTL = Pattern.compile(".*toIntervalSecond\\((\\d+)\\)", Pattern.CASE_INSENSITIVE);
- private static final String trafficPort = "8123";
- private Environment env;
- @Autowired
- private QueryService queryService;
- @Autowired
- private JobAdminHttpSource jobAdminHttpSource;
- @Autowired
- private HttpClientService httpClientService;
- @Resource(name = "metadataService")
- private MetadataService metadataService;
- @Autowired
- HttpConfig httpConfig;
- @Autowired
- ClickHouseHttpSource clickHouseHttpSource;
-
- public final static String queryIdSeparator = ":";
-
- private Map<String, String> headers = Maps.newHashMap();
-
- @Override
- public BaseResult getStorageQuota() {
-
- //封装sql进行查询:Analytic Logs、Files、Traffic Logs
- QueryProfile param = new QueryProfile();
- param.setQuery(env.getProperty("SYSTEM_STORAGE_QUOTA"));
- BaseResult result = queryService.executeQuery(param);
- return resetBaseResult(result, "type");
- }
-
- @Override
- public BaseResult dailyTrendOfStorage(String searchStartTime, String searchEndTime) {
-
- Date currentDate = DateUtils.convertStringToDate(DateUtils.getCurrentDate(), DateUtils.YYYY_MM_DD);
-
- if (StringUtil.isBlank(searchStartTime) && StringUtil.isBlank(searchEndTime)) {
- searchStartTime = DateUtils.getFormatDate(DateUtils.getSomeDate(currentDate, -7), DateUtils.YYYY_MM_DD_HH24_MM_SS);
- searchEndTime = DateUtils.getFormatDate(currentDate, DateUtils.YYYY_MM_DD_HH24_MM_SS);
- }
-
- if (StringUtil.isNotBlank(searchStartTime) && StringUtil.isNotBlank(searchEndTime)) {
- QueryProfile param = new QueryProfile();
- String sql = String.format(env.getProperty("SYSTEM_DAILY_TREND_OF_STORAGE"), searchStartTime, searchEndTime);
- param.setQuery(sql);
- BaseResult result = queryService.executeQuery(param);
- List<Map> data = (List<Map>) result.getData();
- data.forEach(o -> o.put("type", LogType.getLabelByValue(String.valueOf(o.get("type")))));
- return result;
- } else {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.START_TIME_AND_END_TIME_NOT_NULL));
- }
- }
-
- private BaseResult resetBaseResult(BaseResult result, String alias) {
- if (!result.isSuccess()) {
- return result;
- }
- List<Map> data = (List<Map>) result.getData();
- for (Map map : data){
- String logType = LogType.getLabelByValue(String.valueOf(map.get(alias)));
- BaseResult deleteStorageStatus = getDeleteStorageStatus(String.valueOf(map.get(alias)));
- List<StorageDeletionInfo> data1 = (List<StorageDeletionInfo>) deleteStorageStatus.getData();
- map.put(alias, logType);
- map.put("max_days",data1.get(0).getMaxDays());
- map.put("default_max_days",data1.get(0).getDefaultMaxDays());
- }
- return result;
- }
-
- @Override
- public BaseResult deleteStorage(List<StorageDeletionInfo> list) {
-
- if (!logTypeValid(list)) {
- return BaseResultGenerator.failure(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),"Match failed, please check log type!"));
- }
-
- preProcessOfLogType(list);
-
- if (jobIsBusy(list)) {
- return BaseResultGenerator.failure(ResultStatusEnum.LOCKED.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),"The task is busy, please try later!"));
- }
-
- BaseResult baseResult = null;
- for (StorageDeletionInfo info : list) {
- baseResult = executeDeleteStorageJob(info);
- }
- return baseResult;
- }
-
- @Override
- public BaseResult getDeleteStorageStatus(String logType) {
- setCookie();
- List<StorageDeletionInfo> list = new ArrayList<>();
- if (StringUtil.isBlank(logType)) {
-
- StorageDeletionInfo trafficInfo = getStorageDeletionInfoByHandler(LogType.TRAFFIC_LOGS.getValue(), JobHandlerEnum.DELETE_TRAFFIC_DATA_JOB_HANDLER.getValue());
- list.add(trafficInfo);
-
- StorageDeletionInfo reportInfo = getStorageDeletionInfoByHandler(LogType.REPORT_AND_METRICS.getValue(), JobHandlerEnum.DELETE_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue());
- list.add(reportInfo);
-
- StorageDeletionInfo fileInfo = getStorageDeletionInfoByHandler(LogType.FILES.getValue(), JobHandlerEnum.DELETE_FILES_JOB_HANDLER.getValue());
- list.add(fileInfo);
- } else if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(logType)) {
- StorageDeletionInfo trafficInfo = getStorageDeletionInfoByHandler(logType, JobHandlerEnum.DELETE_TRAFFIC_DATA_JOB_HANDLER.getValue());
- list.add(trafficInfo);
- } else if (LogType.REPORT_AND_METRICS.getValue().equalsIgnoreCase(logType)) {
- StorageDeletionInfo reportInfo = getStorageDeletionInfoByHandler(logType, JobHandlerEnum.DELETE_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue());
- list.add(reportInfo);
- } else if (LogType.FILES.getValue().equalsIgnoreCase(logType)) {
- StorageDeletionInfo reportInfo = getStorageDeletionInfoByHandler(logType, JobHandlerEnum.DELETE_FILES_JOB_HANDLER.getValue());
- list.add(reportInfo);
- } else {
- return BaseResultGenerator.failure(ResultStatusEnum.SERVER_ERROR.getCode(), "No matching log type: " + logType);
- }
- list.forEach(o -> o.setType(LogType.getLabelByValue(o.getType())));
- return BaseResultGenerator.success("ok", list);
- }
-
-
- @Override
- public String getCustomQueryId(String resultId, String query) {
- String tableName = SQLHelper.getTableName(query).get(0);
- String dbType = metadataService.getDBTypeByTableName(tableName);
- return DigestUtil.md5Hex(dbType) + queryIdSeparator + DigestUtil.md5Hex(resultId + query.trim());
- }
-
-
- @Override
- public BaseResult getCiphertext(EncryptionInfo param) {
- BaseResult baseResult;
- BasicTextEncryptor textEncryptor = new BasicTextEncryptor();
- textEncryptor.setPassword(param.getSalt());
- String encrypt = textEncryptor.encrypt(param.getPassword());
- Map dataMap = new HashMap();
- dataMap.put("password", param.getPassword());
- dataMap.put("encrypted_password", encrypt);
- baseResult = BaseResultGenerator.success("ok", Lists.newArrayList(dataMap));
- return baseResult;
- }
-
-
-
-
- private boolean logTypeValid(List<StorageDeletionInfo> list) {
- for (StorageDeletionInfo info : list) {
- String value = LogType.getValueByLabel(info.getType());
- if (StringUtil.isEmpty(value)) {
- return false;
- }
- }
- return true;
- }
-
- /**
- * @Description 涉及对ALL、LogType(value,label)处理
- * @Param list:
- * @return: java.util.List<com.mesalab.qgw.model.job.StorageDeletionInfo>
- * @Date: 2021/1/4 11:36 上午
- * @Created by wWei
- */
- private List<StorageDeletionInfo> preProcessOfLogType(List<StorageDeletionInfo> list) {
- if (list.size() == 1){
- StorageDeletionInfo deletionInfo = list.get(0);
- Integer maxDays = deletionInfo.getMaxDays();
- Integer defaultMaxDays = deletionInfo.getDefaultMaxDays();
- if (LogType.ALL.getValue().equalsIgnoreCase(list.get(0).getType())){
- list.clear();
- list.add(new StorageDeletionInfo(LogType.TRAFFIC_LOGS.getValue(), maxDays, defaultMaxDays));
- list.add(new StorageDeletionInfo(LogType.REPORT_AND_METRICS.getValue(), maxDays, defaultMaxDays));
- list.add(new StorageDeletionInfo(LogType.FILES.getValue(), maxDays, defaultMaxDays));
- return list;
- }else if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(list.get(0).getType()) |
- LogType.FILES.getValue().equalsIgnoreCase(list.get(0).getType()) |
- LogType.REPORT_AND_METRICS.getLabel().equalsIgnoreCase(list.get(0).getType())) {
- list.clear();
- list.add(new StorageDeletionInfo(LogType.getValueByLabel(deletionInfo.getType()), maxDays, defaultMaxDays));
- return list;
- }
- }
- list.forEach(o -> o.setType(LogType.getValueByLabel(o.getType())));
- return list;
- }
-
- private boolean jobIsBusy(List<StorageDeletionInfo> list) {
- setCookie();
- for (StorageDeletionInfo info : list) {
-
- String handler = getDeletePartHandlerByLogType(info.getType());
- if (jobIsBusyByHandler(handler)) {
- return true;
- }
- handler = getDeleteAllHandlerByLogType(info.getType());
- if (jobIsBusyByHandler(handler)) {
- return true;
- }
- }
- return false;
- }
-
- private boolean jobIsBusyByHandler(String handler) {
- Map dataByHandler = getDataByHandler(handler);
- String id = String.valueOf(dataByHandler.get("id"));
- BaseResult result = queryJobStatusByJobId(Integer.parseInt(id));
- if (result.getStatus().equals(ResultStatusEnum.LOCKED.getCode())) {
- return true;
- } else if (!result.getStatus().equals(ResultStatusEnum.SUCCESS.getCode())) {
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(),result.getMessage()));
- }
- return false;
- }
-
-
- /**
- * 执行日志删除任务: 调用调度任务任务
- *
- * @param info
- * @return
- */
- private BaseResult executeDeleteStorageJob(StorageDeletionInfo info) {
-
- BaseResult baseResult;
- String jobHandler;
- String logType = info.getType();
- ExecutorParam executorParam = new ExecutorParam();
- if (0 == info.getMaxDays()) {
- jobHandler = getDeleteAllHandlerByLogType(logType);
- Map dataByHandler = getDataByHandler(jobHandler);
-
- StorageDeletionInfo storageDeletionInfoByHandler = getStorageDeletionInfoByHandler(logType, getDeletePartHandlerByLogType(logType));
- executorParam.setMaxDays(storageDeletionInfoByHandler.getMaxDays());
- XxlJobInfo xxlJobInfo = setXxlJobInfoParam(dataByHandler, executorParam);
- baseResult = executeTriggerAndUpdate(xxlJobInfo);
- } else {
- jobHandler = getDeletePartHandlerByLogType(info.getType());
- executorParam.setMaxDays(info.getMaxDays());
- Map oldData = getDataByHandler(jobHandler);
- XxlJobInfo newData = setXxlJobInfoParam(oldData, executorParam);
- baseResult = executeManageJob("update", newData);
- if (baseResult.isSuccess() && LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(info.getType())) {
- updateSchema(info, oldData);
- }
- if (baseResult.isSuccess()) {
- baseResult = executeManageJob("trigger", newData);
- }
- }
- if (!baseResult.getStatus().equals(ResultStatusEnum.SUCCESS.getCode())) {
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(),QGWMessageConst.SCHEDULED_TASK_ERROR));
- }
- return baseResult;
- }
-
- private void updateSchema(StorageDeletionInfo info, Map dataByHandler) {
- Map<String, Map> previous = Maps.newHashMap();
- try {
- Map schemaInfo = metadataService.getSchemaInfo(MetadataTypeEnum.TABLES.getValue(), clickHouseHttpSource.getDbName(), false);
- Object tables = schemaInfo.get("symbols");
- if (StringUtil.isEmpty(tables)) {
- return;
- }
- long ttl = 24 * 60 * 60 * info.getMaxDays();
- List<String> list = (List) tables;
- for (String tableName : list) {
- Map schemaMap = metadataService.getSchemaInfo(MetadataTypeEnum.FIELDS.getValue(), tableName, false);
- List<Object> schemaDocTTL = JsonPath.read(schemaMap, "$.[?(@.doc.ttl != null)].doc.ttl");
- List<Object> fieldDocTTL = JsonPath.read(schemaMap, "$.fields[?(@.doc.ttl != null)].doc.ttl");
- if (schemaDocTTL.isEmpty() && fieldDocTTL.isEmpty()) {
- continue;
- }
- previous.put(tableName, metadataService.getSchemaInfo(MetadataTypeEnum.FIELDS.getValue(), tableName, false));
- Object schemaDoc = schemaMap.get("doc");
- Map<String, Object> map = Maps.newHashMap();
- if (StringUtil.isNotEmpty(schemaDoc)) {
- map = (Map<String, Object>) schemaDoc;
- }
- if (StringUtil.isNotEmpty(map.get("ttl")) && ttl < Long.parseLong(map.get("ttl").toString())) {
- map.put("ttl", ttl);
- schemaMap.put("doc", map);
- }
- List<Map<String, Object>> fields = JsonPath.read(schemaMap, "$.fields");
- List<String> indexKey = metadataService.getIndexKey(tableName);
- for (Map<String, Object> field : fields) {
- Object doc = field.get("doc");
- if (StringUtil.isEmpty(doc)) {
- doc = Maps.newHashMap();
- }
- Map<String, Object> fieldDoc = (Map<String, Object>) doc;
- if (indexKey.contains(field.get("name").toString())) {
- fieldDoc.put("ttl", null);
- field.put("doc", fieldDoc);
- continue;
- }
- if (StringUtil.isNotEmpty(fieldDoc.get("ttl")) && ttl < Long.parseLong(fieldDoc.get("ttl").toString())) {
- fieldDoc.put("ttl", ttl);
- field.put("doc", fieldDoc);
- }
- }
- metadataService.updateSchema(tableName, schemaMap);
- }
- } catch (RuntimeException ex) {
- for (String tableName : previous.keySet()) {
- metadataService.updateSchema(tableName, previous.get(tableName));
- }
- executeManageJob("update", mapToBean(dataByHandler, XxlJobInfo.class));
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),
- ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(),"up schema error in storage set ", ex.getMessage()));
- }
- }
-
- private String getDeleteAllHandlerByLogType(String logType) {
- String jobHandler = StringUtil.EMPTY;
- if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(logType)) {
- jobHandler = JobHandlerEnum.DELETE_ALL_TRAFFIC_DATA_JOB_HANDLER.getValue();
- } else if (LogType.REPORT_AND_METRICS.getValue().equalsIgnoreCase(logType)) {
- jobHandler = JobHandlerEnum.DELETE_ALL_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue();
- } else if (LogType.FILES.getValue().equalsIgnoreCase(logType)) {
- jobHandler = JobHandlerEnum.DELETE_ALL_FILES_JOB_HANDLER.getValue();
- }
- return jobHandler;
- }
-
- private String getDeletePartHandlerByLogType(String logType) {
- String jobHandler = StringUtil.EMPTY;
- if (LogType.TRAFFIC_LOGS.getValue().equalsIgnoreCase(logType)) {
- jobHandler = JobHandlerEnum.DELETE_TRAFFIC_DATA_JOB_HANDLER.getValue();
- } else if (LogType.REPORT_AND_METRICS.getValue().equalsIgnoreCase(logType)) {
- jobHandler = JobHandlerEnum.DELETE_REPORT_AND_METRICS_DATA_JOB_HANDLER.getValue();
- } else if (LogType.FILES.getValue().equalsIgnoreCase(logType)) {
- jobHandler = JobHandlerEnum.DELETE_FILES_JOB_HANDLER.getValue();
- }
- return jobHandler;
- }
-
- /**
- * 通过handler获取数据配额设置状态
- *
- * @param logType
- * @param jobHandlerValue
- * @return
- */
- private StorageDeletionInfo getStorageDeletionInfoByHandler(String logType, String jobHandlerValue) {
- Map trafficDate = getDataByHandler(jobHandlerValue);
- XxlJobInfo xxlJobInfo = mapToBean(trafficDate, XxlJobInfo.class);
- StorageDeletionInfo executorParam = JSON.parseObject(StrUtil.toUnderlineCase(xxlJobInfo.getExecutorParam()), StorageDeletionInfo.class);
- executorParam.setType(logType);
- return executorParam;
- }
-
- /**
- * 触发执行器并更新任务
- *
- * @param xxlJobInfo
- * @return
- */
- private BaseResult executeTriggerAndUpdate(XxlJobInfo xxlJobInfo) {
- BaseResult baseResult;
- BaseResult resultExecute = executeManageJob("trigger", xxlJobInfo);
- if (resultExecute.getStatus().equals(ResultStatusEnum.SUCCESS.getCode())) {
- BaseResult resultUpdate = executeManageJob("update", xxlJobInfo);
- if (resultUpdate.getStatus().equals(ResultStatusEnum.SUCCESS.getCode())) {
- baseResult = BaseResultGenerator.success("ok", null);
- } else {
- baseResult = BaseResultGenerator.failure(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(),"Update task failed:" + xxlJobInfo.getExecutorHandler()));
- }
- } else {
- baseResult = BaseResultGenerator.failure(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(),"Failed to execute task:" + xxlJobInfo.getExecutorHandler()));
- }
- return baseResult;
- }
-
- /**
- * 查询任务状态
- *
- * @param jobId
- * @return
- */
- private BaseResult queryJobStatusByJobId(int jobId) {
- BaseResult baseResult = null;
- StringBuilder url = new StringBuilder(jobAdminHttpSource.getUrl()).append("/jobinfo/jobBeat?jobId=").append(jobId);
- Map<String, String> resultMap = httpClientService.httpGet(url.toString(), headers, httpConfig.getServerResponseTimeOut());
- baseResult = resultEncapsulationOfJob(resultMap);
- return baseResult;
- }
-
- /**
- * 执行调度任务结果封装
- *
- * @param resultMap
- * @return
- */
- private BaseResult resultEncapsulationOfJob(Map<String, String> resultMap) {
- BaseResult baseResult;
- if (StringUtil.isEmpty(resultMap)) {
- baseResult = BaseResultGenerator.failure(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(),"The service is busy, please contact the scheduling platform!"));
- } else {
- if (resultMap.get("status").equals(String.valueOf(ResultStatusEnum.SUCCESS.getCode()))) {
- Map result = JSON.parseObject(resultMap.get("result"), Map.class);
- if (result.get("code").equals(ResultStatusEnum.SUCCESS.getCode())) {
- baseResult = BaseResultGenerator.success("ok", null);
- } else if (result.get("code").equals(ResultStatusEnum.LOCKED.getCode())) {
- baseResult = BaseResultGenerator.failure(ResultStatusEnum.LOCKED.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), "Clear Task is Running."));
- } else {
- baseResult = BaseResultGenerator.failure(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), result.get("msg")));
- }
- } else {
- baseResult = BaseResultGenerator.failure(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), "Clear Task Failed."));
- }
- }
- return baseResult;
- }
-
-
- /**
- * 执行调度平台任务更新
- *
- * @param executeType
- * @param xxlJobInfo
- * @return
- */
- private BaseResult executeManageJob(String executeType, XxlJobInfo xxlJobInfo) {
- BaseResult baseResult = null;
- String params = getUrlParamsByMap(getObjectToMap(xxlJobInfo));
- String url = String.format("%s/jobinfo/%s/?%s", jobAdminHttpSource.getUrl(), executeType, params);
- Map<String, String> resultMap = httpClientService.httpGet(url, headers, httpConfig.getServerResponseTimeOut());
- log.warn("请求调度任务" + executeType + "接口" + url);
- baseResult = resultEncapsulationOfJob(resultMap);
- return baseResult;
- }
-
-
- /**
- * 获取调度平台Cookie
- *
- * @return
- */
- public void setCookie() {
- StringBuilder url = new StringBuilder(jobAdminHttpSource.getUrl());
- headers.put("Content-Type", "application/json");
- String urlParamsByMap = getUrlParamsByMap(getObjectToMap(jobAdminHttpSource));
- int socketTimeOut = httpConfig.getServerResponseTimeOut();
- Map httpPostResponseHeads = httpClientService.getHttpPostResponseHeads(url + "/login?" + urlParamsByMap, headers, socketTimeOut);
- String cookie = String.valueOf(httpPostResponseHeads.get("SET-COOKIE"));
- headers.put("Cookie", cookie);
- }
-
- /**
- * 通过handler Value获取jobInfo数据
- *
- * @param handlerValue
- * @return
- */
- private Map getDataByHandler(String handlerValue) {
- StringBuilder url = new StringBuilder(jobAdminHttpSource.getUrl()).
- append("/jobinfo/pageList?jobGroup=-1&triggerStatus=-1&executorHandler=").
- append(handlerValue);
- Map<String, String> resultPageList = httpClientService.httpGet(url.toString(), headers, httpConfig.getServerResponseTimeOut());
- if (StringUtil.isNotEmpty(resultPageList) && resultPageList.get("status").equals(String.valueOf(ResultStatusEnum.SUCCESS.getCode()))) {
- Map<String, Object> maps = JSON.parseObject(resultPageList.get("result"), Map.class);
- if (StringUtil.isEmpty(maps)) {
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(),"Get or package result exception: " + JSON.toJSONString(resultPageList)));
- }
- List<Map> data = (List) maps.get("data");
- if (data.size() >= 1) {
- return data.get(0);
- } else {
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(),"The scheduled task has no task information matching executorHandler (" + handlerValue + ")"));
- }
- }
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(),"Get scheduled task exception: " + JSON.toJSONString(resultPageList)));
- }
-
- /**
- * 更新jobInfo
- *
- * @param data
- * @param executorParam
- * @return
- */
- private XxlJobInfo setXxlJobInfoParam(Map data, ExecutorParam executorParam) {
- XxlJobInfo xxlJobInfo = mapToBean(data, XxlJobInfo.class);
- ExecutorParam executor = JSON.parseObject(xxlJobInfo.getExecutorParam(), ExecutorParam.class);
- executorParam.setDefaultMaxDays(executor.getDefaultMaxDays());
- xxlJobInfo.setExecutorParam(JSON.toJSONString(executorParam));
- return xxlJobInfo;
- }
-
- public static String getUrlParamsByMap(Map<String, Object> map) {
- if (map == null) {
- return "";
- }
- StringBuffer sb = new StringBuffer();
- for (Map.Entry<String, Object> entry : map.entrySet()) {
- sb.append(entry.getKey() + "=" + Encodes.urlEncode(String.valueOf(entry.getValue())));
- sb.append("&");
- }
- String s = sb.toString();
- if (s.endsWith("&")) {
- s = StringUtil.substringBeforeLast(s, "&");
- }
- return s;
- }
-
- public static <T> T mapToBean(Map<String, Object> map, Class<T> clazz) {
- T bean = null;
- try {
- bean = clazz.newInstance();
- } catch (InstantiationException e) {
- log.error("Instantiation Exception: ", e);
- } catch (IllegalAccessException e) {
- log.error("Illegal Access Exception: ", e);
- }
- BeanMap beanMap = BeanMap.create(bean);
- beanMap.putAll(map);
- return bean;
- }
-
- public static Map<String, Object> getObjectToMap(Object obj) {
- Map<String, Object> map = new LinkedHashMap<String, Object>();
- Class<?> clazz = obj.getClass();
- Field[] declaredFields = clazz.getDeclaredFields();
- AccessibleObject.setAccessible(declaredFields, true);
- for (Field field : declaredFields) {
- String fieldName = field.getName();
- Object value = null;
- try {
- value = field.get(obj);
- } catch (IllegalAccessException e) {
- log.error("Illegal Access Exception: ", e);
- }
- if (value == null) {
- value = "";
- }
- map.put(fieldName, value);
- }
- return map;
- }
-
- @Override
- public void setEnvironment(Environment environment) {
- this.env = environment;
- }
-
-
- @Getter
- @AllArgsConstructor
- enum LogType {
- TRAFFIC_LOGS("Traffic Logs", "Traffic Logs"),
- REPORT_AND_METRICS("Report and Metrics", "Metrics"),
- FILES("Files", "Files"),
- ALL("ALL", "ALL");
- private final String value;
- private final String label;
-
-
- public static String getValueByLabel(String label) {
- for (LogType enums : LogType.values()) {
- if (enums.getLabel().equals(label)) {
- return enums.getValue();
- }
- }
- return "";
- }
-
- public static String getLabelByValue(String value) {
- for (LogType enums : LogType.values()) {
- if (enums.getValue().equals(value)) {
- return enums.getLabel();
- }
- }
- return "";
- }
- }
-}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/TrafficSpectrumDslServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/TrafficSpectrumDslServiceImpl.java
new file mode 100644
index 00000000..a615a2a4
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/impl/TrafficSpectrumDslServiceImpl.java
@@ -0,0 +1,588 @@
+package com.mesalab.qgw.service.impl;
+
+import cn.hutool.core.bean.BeanUtil;
+import cn.hutool.core.lang.UUID;
+import cn.hutool.core.util.NumberUtil;
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.crypto.digest.DigestUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.geedgenetworks.utils.IPUtil;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.entity.BaseResultGenerator;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.enums.QueryOption;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.constant.dsl.TrafficSpectrumConstants;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import com.mesalab.qgw.model.basic.DSLQueryContext;
+import com.mesalab.qgw.model.basic.DSLQueryRequestParam;
+import com.mesalab.qgw.model.basic.EngineConfigSource;
+import com.mesalab.qgw.model.basic.SQLQueryContext;
+import com.mesalab.qgw.service.DatabaseService;
+import com.mesalab.qgw.service.SQLSyncQueryService;
+import com.mesalab.qgw.service.TrafficSpectrumDslService;
+import com.mesalab.services.common.property.SqlPropertySourceFactory;
+import com.mesalab.services.configuration.JobConfig;
+import org.checkerframework.checker.nullness.qual.Nullable;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.core.env.Environment;
+import org.springframework.stereotype.Service;
+
+import java.math.BigDecimal;
+import java.util.*;
+import java.util.stream.Collectors;
+
+/**
+ * TODO
+ *
+ * @Classname TrafficSpectrumDslServiceImpl
+ * @Date 2024/5/10 15:01
+ * @Author wWei
+ */
+@Service
+@PropertySource(value = "classpath:dsl-sql-template.sql", factory = SqlPropertySourceFactory.class)
+public class TrafficSpectrumDslServiceImpl implements TrafficSpectrumDslService {
+
+ private static final Log log = LogFactory.get();
+ private Environment environment;
+ private DatabaseService databaseService;
+ private SQLSyncQueryService sqlSyncQueryService;
+ private EngineConfigSource engineConfigSource;
+ private static final String TRAFFIC_SPECTRUM_SUMMARY = "TRAFFIC_SPECTRUM_SUMMARY";
+ private static final String TRAFFIC_SPECTRUM_UNIQ_IP = "TRAFFIC_SPECTRUM_UNIQ_IP";
+ private static final String TRAFFIC_SPECTRUM_APP_DISTRIBUTION_SERVER_IP = "TRAFFIC_SPECTRUM_APP_DISTRIBUTION_SERVER_IP";
+ private static final String TRAFFIC_SPECTRUM_APP_DISTRIBUTION_SERVER_DOMAIN = "TRAFFIC_SPECTRUM_APP_DISTRIBUTION_SERVER_DOMAIN";
+ private static final String TRAFFIC_SPECTRUM_CLIENT_IP_CONNECT_APPLICATION_USAGE = "TRAFFIC_SPECTRUM_CLIENT_IP_CONNECT_APPLICATION_USAGE";
+ private static final String TRAFFIC_SPECTRUM_NETWORK_THROUGHPUT_TREND = "TRAFFIC_SPECTRUM_NETWORK_THROUGHPUT_TREND";
+ private static final String TABLE_TRAFFIC_SKETCH_METRIC = "traffic_sketch_metric";
+ private static final String TIME_FIELD_TYPE = "unix_timestamp";
+ private static final String GRANULARITY_DEFAULT = "PT60S";
+
+ @Override
+ public BaseResult run(DSLQueryRequestParam dslQueryRequestParam) {
+ if (JobConfig.TRAFFIC_SPECTRUM_SUMMARY.equals(dslQueryRequestParam.getName())) {
+ return BaseResultGenerator.success(summary(dslQueryRequestParam));
+ } else if (JobConfig.TRAFFIC_SPECTRUM_UNIQUE_CLIENT_AND_SERVER_IPS.equals(dslQueryRequestParam.getName())) {
+ return BaseResultGenerator.success(uniqIpsStat(dslQueryRequestParam));
+ } else if (JobConfig.TRAFFIC_SPECTRUM_APP_DISTRIBUTION.equals(dslQueryRequestParam.getName())) {
+ return BaseResultGenerator.success(topServerIpAndServerDomain(dslQueryRequestParam));
+ } else if (JobConfig.TRAFFIC_SPECTRUM_CLIENT_IP_CONNECT_APPLICATION_USAGE.equals(dslQueryRequestParam.getName())) {
+ return BaseResultGenerator.success(internalExternalDimensionMetricsStat(dslQueryRequestParam));
+ } else if (JobConfig.TRAFFIC_SPECTRUM_NETWORK_THROUGHPUT_TREND.equals(dslQueryRequestParam.getName())) {
+ return BaseResultGenerator.success(networkThroughputTrend(dslQueryRequestParam));
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.NOT_FOUND.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), dslQueryRequestParam.getName() + " was not found."));
+ }
+
+ @Override
+ public List<Map<String, Object>> summary(DSLQueryRequestParam dslQueryRequestParam) {
+ DSLQueryContext dslQueryContext = BeanUtil.copyProperties(dslQueryRequestParam, DSLQueryContext.class);
+ String sql = dslQueryContext.toSql(
+ environment.getProperty(TRAFFIC_SPECTRUM_SUMMARY)
+ , TABLE_TRAFFIC_SKETCH_METRIC
+ , databaseService.getPartitionKey(TABLE_TRAFFIC_SKETCH_METRIC)
+ , TIME_FIELD_TYPE);
+ BaseResult<List<Map<String, Object>>> baseResult = sqlSyncQueryService.executeQuery(
+ SQLQueryContext.builder()
+ .originalSQL(sql)
+ .option(QueryOption.REAL_TIME.getValue())
+ .build());
+ if (baseResult.isSuccess()) {
+ baseResult.getData().forEach(map -> {
+ long bytes = Long.parseLong(String.valueOf(map.get("total_bytes") == null ? 0 : map.get("total_bytes")));
+ long packets = Long.parseLong(String.valueOf(map.get("total_packets") == null ? 0 : map.get("total_packets")));
+ long sessions = Long.parseLong(String.valueOf(map.get("total_sessions") == null ? 0 : map.get("total_sessions")));
+ long unknownAppBytes = Long.parseLong(String.valueOf(map.get("unknown_app_bytes") == null ? 0 : map.get("unknown_app_bytes")));
+ long asymmetricFlows = Long.parseLong(String.valueOf(map.get("asymmetric_flows") == null ? 0 : map.get("asymmetric_flows")));
+ long fragmentationPackets = Long.parseLong(String.valueOf(map.get("fragmentation_packets") == null ? 0 : map.get("fragmentation_packets")));
+ HashMap<String, Object> item = Maps.newLinkedHashMap();
+ item.put(TrafficSpectrumConstants.DIRECTION, map.get("direction"));
+ item.put(TrafficSpectrumConstants.BYTES, bytes);
+ item.put(TrafficSpectrumConstants.PACKETS, packets);
+ item.put(TrafficSpectrumConstants.SESSIONS, sessions);
+ item.put(TrafficSpectrumConstants.UNKNOWN_APP_BYTES, unknownAppBytes);
+ item.put(TrafficSpectrumConstants.UNCATEGORIZED_PERCENT, bytes == 0 ? 0.0 : NumberUtil.div(BigDecimal.valueOf(unknownAppBytes), BigDecimal.valueOf(bytes), 4).doubleValue());
+ item.put(TrafficSpectrumConstants.ASYMMETRIC_FLOWS, asymmetricFlows);
+ item.put(TrafficSpectrumConstants.ASYMMETRIC_FLOWS_PERCENT, sessions == 0 ? 0.0 : NumberUtil.div(BigDecimal.valueOf(asymmetricFlows), BigDecimal.valueOf(sessions), 4).doubleValue());
+ item.put(TrafficSpectrumConstants.FRAGMENTATION_PACKETS, fragmentationPackets);
+ item.put(TrafficSpectrumConstants.FRAGMENTATION_PERCENT, packets == 0 ? 0.0 : NumberUtil.div(BigDecimal.valueOf(fragmentationPackets), BigDecimal.valueOf(packets), 4).doubleValue());
+ map.clear();
+ map.putAll(item);
+ });
+ return baseResult.getData();
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
+ }
+
+ @Override
+ public List<Map<String, Object>> uniqIpsStat(DSLQueryRequestParam dslQueryRequestParam) {
+ DSLQueryContext dslQueryContext = BeanUtil.copyProperties(dslQueryRequestParam, DSLQueryContext.class);
+ String sql = dslQueryContext.toSql(
+ environment.getProperty(TRAFFIC_SPECTRUM_UNIQ_IP)
+ , TABLE_TRAFFIC_SKETCH_METRIC
+ , databaseService.getPartitionKey(TABLE_TRAFFIC_SKETCH_METRIC)
+ , TIME_FIELD_TYPE);
+ BaseResult<List<Map<String, Object>>> baseResult = sqlSyncQueryService.executeQuery(
+ SQLQueryContext.builder()
+ .originalSQL(sql)
+ .option(QueryOption.REAL_TIME.getValue())
+ .build());
+ if (baseResult.isSuccess()) {
+ baseResult.getData().forEach(map -> {
+ long internalUniqClientIp = Long.parseLong(String.valueOf(map.get("internal_uniq_client_ip") == null ? 0 : map.get("internal_uniq_client_ip")));
+ long internalUniqServerIp = Long.parseLong(String.valueOf(map.get("internal_uniq_server_ip") == null ? 0 : map.get("internal_uniq_server_ip")));
+ long externalUniqClientIp = Long.parseLong(String.valueOf(map.get("external_uniq_client_ip") == null ? 0 : map.get("external_uniq_client_ip")));
+ long externalUniqServerIp = Long.parseLong(String.valueOf(map.get("external_uniq_server_ip") == null ? 0 : map.get("external_uniq_server_ip")));
+ HashMap<String, Object> item = Maps.newLinkedHashMap();
+ item.put(TrafficSpectrumConstants.INTERNAL_UNIQ_CLIENT_IP, internalUniqClientIp);
+ item.put(TrafficSpectrumConstants.INTERNAL_UNIQ_SERVER_IP, internalUniqServerIp);
+ item.put(TrafficSpectrumConstants.EXTERNAL_UNIQ_CLIENT_IP, externalUniqClientIp);
+ item.put(TrafficSpectrumConstants.EXTERNAL_UNIQ_SERVER_IP, externalUniqServerIp);
+ map.clear();
+ map.putAll(item);
+ });
+ return baseResult.getData();
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
+ }
+
+ @Override
+ public List<Map<String, Object>> topServerIpAndServerDomain(DSLQueryRequestParam dslQueryRequestParam) {
+ DSLQueryContext dslQueryContext = BeanUtil.copyProperties(dslQueryRequestParam, DSLQueryContext.class);
+ String limit = dslQueryContext.getLimit();
+ if (StrUtil.isBlank(limit)) {
+ dslQueryContext.setLimit("10");
+ }
+ String sqlIp = dslQueryContext.toSql(
+ environment.getProperty(TRAFFIC_SPECTRUM_APP_DISTRIBUTION_SERVER_IP)
+ , TABLE_TRAFFIC_SKETCH_METRIC
+ , databaseService.getPartitionKey(TABLE_TRAFFIC_SKETCH_METRIC)
+ , TIME_FIELD_TYPE);
+ BaseResult<List<Map<String, Object>>> baseResultIp = sqlSyncQueryService.executeQuery(
+ SQLQueryContext.builder()
+ .originalSQL(sqlIp)
+ .option(QueryOption.REAL_TIME.getValue())
+ .build());
+ String sqlDomain = dslQueryContext.toSql(
+ environment.getProperty(TRAFFIC_SPECTRUM_APP_DISTRIBUTION_SERVER_DOMAIN)
+ , TABLE_TRAFFIC_SKETCH_METRIC
+ , databaseService.getPartitionKey(TABLE_TRAFFIC_SKETCH_METRIC)
+ , TIME_FIELD_TYPE);
+ BaseResult<List<Map<String, Object>>> baseResultDomain = sqlSyncQueryService.executeQuery(
+ SQLQueryContext.builder()
+ .originalSQL(sqlDomain)
+ .option(QueryOption.REAL_TIME.getValue())
+ .build());
+ if (baseResultIp.isSuccess() && baseResultDomain.isSuccess()) {
+ if ((baseResultIp.getData() == null || baseResultIp.getData().isEmpty()) && (baseResultDomain.getData() == null || baseResultDomain.getData().isEmpty())) {
+ return Lists.newArrayList();
+ }
+ Map<String, Object> map = Maps.newHashMap();
+ map.put(TrafficSpectrumConstants.TOP_SERVER_IPS, baseResultIp.getData());
+ map.put(TrafficSpectrumConstants.TOP_SERVER_DOMAINS, baseResultDomain.getData());
+ return Lists.newArrayList(map);
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), baseResultIp.getMessage() + "; " + baseResultDomain.getMessage()));
+ }
+
+ @Override
+ public List<Map<String, Object>> internalExternalDimensionMetricsStat(DSLQueryRequestParam dslQueryRequestParam) {
+ DSLQueryContext dslQueryContext = BeanUtil.copyProperties(dslQueryRequestParam, DSLQueryContext.class);
+ dslQueryContext.setLimit(String.valueOf(engineConfigSource.getTrafficSpectrumClientIPAppResultNum()));
+ if (StrUtil.isBlank(dslQueryContext.getGranularity())) {
+ dslQueryContext.setGranularity(GRANULARITY_DEFAULT);
+ }
+ String sql = dslQueryContext.toSql(
+ environment.getProperty(TRAFFIC_SPECTRUM_CLIENT_IP_CONNECT_APPLICATION_USAGE)
+ , TABLE_TRAFFIC_SKETCH_METRIC
+ , databaseService.getPartitionKey(TABLE_TRAFFIC_SKETCH_METRIC)
+ , TIME_FIELD_TYPE);
+ BaseResult<List<Map<String, Object>>> baseResult = sqlSyncQueryService.executeQuery(
+ SQLQueryContext.builder()
+ .originalSQL(sql)
+ .option(QueryOption.REAL_TIME.getValue())
+ .build());
+ if (baseResult.isSuccess()) {
+ if (baseResult.getData() == null || baseResult.getData().isEmpty()) {
+ return Lists.newArrayList();
+ }
+ List<Map<String, Object>> data = baseResult.getData();
+ Map<String, Object> nodes = Maps.newHashMap();
+ Map<String, Map<String, Object>> internalNodes = Maps.newHashMap();
+ Map<String, Map<String, Object>> externalNodes = Maps.newHashMap();
+ List<Map<String, Object>> links = Lists.newArrayList();
+
+ data.forEach(x -> {
+ String direction = String.valueOf(x.get("direction"));
+ long sessions = Long.parseLong(String.valueOf(x.get("sessions") == null ? 0 : x.get("sessions")));
+ long bytes = Long.parseLong(String.valueOf(x.get("bytes") == null ? 0 : x.get("bytes")));
+ long packets = Long.parseLong(String.valueOf(x.get("packets") == null ? 0 : x.get("packets")));
+ long maxRate = Long.parseLong(String.valueOf(x.get("max_rate") == null ? 0 : x.get("max_rate")));
+ long avgRate = Long.parseLong(String.valueOf(x.get("avg_rate") == null ? 0 : x.get("avg_rate")));
+ String clientIp = String.valueOf(x.get("client_ip"));
+ String app = String.valueOf(x.get("app"));
+ String clientCountry = String.valueOf(x.get("client_country"));
+ String appCategory = String.valueOf(x.get("app_category"));
+
+ Map<String, Object> internalNode = Maps.newHashMap();
+ Map<String, Object> externalNode = Maps.newHashMap();
+ String uuidInternal;
+ String uuidExternal;
+ if (TrafficSpectrumConstants.OUTBOUND.equalsIgnoreCase(direction)) {
+ uuidInternal = UUID.nameUUIDFromBytes(StrUtil.concat(false, TrafficSpectrumConstants.INTERNAL_NODE, String.valueOf(clientIp), String.valueOf(clientCountry), TrafficSpectrumConstants.OBJECT_TYPE_IP).getBytes()).toString();
+ internalNode.put(TrafficSpectrumConstants.ID, uuidInternal);
+ internalNode.put(TrafficSpectrumConstants.OBJECT_TYPE, TrafficSpectrumConstants.OBJECT_TYPE_IP);
+ internalNode.put(TrafficSpectrumConstants.NAME, clientIp);
+ internalNode.put(TrafficSpectrumConstants.CATEGORY, getIpCategory(clientIp, clientCountry));
+ internalNodes.put(uuidInternal, internalNode);
+
+ uuidExternal = UUID.nameUUIDFromBytes(StrUtil.concat(false, TrafficSpectrumConstants.EXTERNAL_NODE, String.valueOf(app), String.valueOf(appCategory), TrafficSpectrumConstants.OBJECT_TYPE_APPLICATION).getBytes()).toString();
+ externalNode.put(TrafficSpectrumConstants.ID, uuidExternal);
+ externalNode.put(TrafficSpectrumConstants.OBJECT_TYPE, TrafficSpectrumConstants.OBJECT_TYPE_APPLICATION);
+ externalNode.put(TrafficSpectrumConstants.NAME, app);
+ externalNode.put(TrafficSpectrumConstants.CATEGORY, getApplicationCategory(appCategory));
+ externalNodes.put(uuidExternal, externalNode);
+
+ } else if (TrafficSpectrumConstants.INBOUND.equalsIgnoreCase(direction)) {
+ uuidExternal = UUID.nameUUIDFromBytes(StrUtil.concat(false, TrafficSpectrumConstants.EXTERNAL_NODE, String.valueOf(clientIp), String.valueOf(clientCountry), TrafficSpectrumConstants.OBJECT_TYPE_IP).getBytes()).toString();
+ externalNode.put(TrafficSpectrumConstants.ID, uuidExternal);
+ externalNode.put(TrafficSpectrumConstants.OBJECT_TYPE, TrafficSpectrumConstants.OBJECT_TYPE_IP);
+ externalNode.put(TrafficSpectrumConstants.NAME, clientIp);
+ externalNode.put(TrafficSpectrumConstants.CATEGORY, getIpCategory(clientIp, clientCountry));
+ externalNodes.put(uuidExternal, externalNode);
+
+ uuidInternal = UUID.nameUUIDFromBytes(StrUtil.concat(false, TrafficSpectrumConstants.INTERNAL_NODE, String.valueOf(app), String.valueOf(appCategory), TrafficSpectrumConstants.OBJECT_TYPE_APPLICATION).getBytes()).toString();
+ internalNode.put(TrafficSpectrumConstants.ID, uuidInternal);
+ internalNode.put(TrafficSpectrumConstants.OBJECT_TYPE, TrafficSpectrumConstants.OBJECT_TYPE_APPLICATION);
+ internalNode.put(TrafficSpectrumConstants.NAME, app);
+ internalNode.put(TrafficSpectrumConstants.CATEGORY, getApplicationCategory(appCategory));
+ internalNodes.put(uuidInternal, internalNode);
+ } else {
+ log.warn("Unknown direction: {}", direction);
+ return;
+ }
+ Map<String, Object> link = Maps.newHashMap();
+ link.put(TrafficSpectrumConstants.SOURCE, uuidInternal);
+ link.put(TrafficSpectrumConstants.TARGET, uuidExternal);
+ link.put(TrafficSpectrumConstants.DIRECTION, direction);
+ link.put(TrafficSpectrumConstants.SESSIONS, sessions);
+ link.put(TrafficSpectrumConstants.BYTES, bytes);
+ link.put(TrafficSpectrumConstants.PACKETS, packets);
+ link.put(TrafficSpectrumConstants.MAX_RATE, maxRate);
+ link.put(TrafficSpectrumConstants.AVG_RATE, avgRate);
+ links.add(link);
+
+ });
+ nodes.put(TrafficSpectrumConstants.INTERNAL_NODE, new ArrayList<>(internalNodes.values()));
+ nodes.put(TrafficSpectrumConstants.EXTERNAL_NODE, new ArrayList<>(externalNodes.values()));
+ Map<String, Object> result = Maps.newHashMap();
+ result.put(TrafficSpectrumConstants.NODES, nodes);
+ result.put(TrafficSpectrumConstants.LINKS, links);
+ return Lists.newArrayList(result);
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
+ }
+
+ @Override
+ public List<Map<String, Object>> networkThroughputTrend(DSLQueryRequestParam dslQueryRequestParam) {
+ DSLQueryContext dslQueryContext = BeanUtil.copyProperties(dslQueryRequestParam, DSLQueryContext.class);
+ if (StrUtil.isBlank(dslQueryContext.getGranularity())) {
+ dslQueryContext.setGranularity(GRANULARITY_DEFAULT);
+ }
+ String sql = dslQueryContext.toSql(
+ environment.getProperty(TRAFFIC_SPECTRUM_NETWORK_THROUGHPUT_TREND)
+ , TABLE_TRAFFIC_SKETCH_METRIC
+ , databaseService.getPartitionKey(TABLE_TRAFFIC_SKETCH_METRIC)
+ , TIME_FIELD_TYPE);
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(
+ SQLQueryContext.builder()
+ .originalSQL(sql)
+ .option(QueryOption.REAL_TIME.getValue())
+ .build());
+ if (baseResult.isSuccess()) {
+ return (List<Map<String, Object>>) baseResult.getData();
+ }
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
+ }
+
+ @Override
+ public Map<String, Object> getInternalExternalBipartiteGraph(Map<String, Object> data, Integer clientIPAppResultNum) {
+ Map<String, List<Map<String, Object>>> originalNodes = (Map<String, List<Map<String, Object>>>) data.get(TrafficSpectrumConstants.NODES);
+ List<Map<String, Object>> originalLinks = (List<Map<String, Object>>) data.get(TrafficSpectrumConstants.LINKS);
+ List<Map<String, Object>> originalExternalNodeList = originalNodes.get(TrafficSpectrumConstants.EXTERNAL_NODE);
+ List<Map<String, Object>> originalInternalNodeList = originalNodes.get(TrafficSpectrumConstants.INTERNAL_NODE);
+ // TODO 自动降序排序基于 bytes
+ originalLinks.sort((o1, o2) -> {
+ long bytes1 = Long.parseLong(String.valueOf(o1.get(TrafficSpectrumConstants.BYTES)));
+ long bytes2 = Long.parseLong(String.valueOf(o2.get(TrafficSpectrumConstants.BYTES)));
+ return Long.compare(bytes2, bytes1);
+ });
+ // TODO step1 原数据拆封 nodes、links
+ // TODO 从 internalNodeList 和 externalNodeList 过滤除 name 不为Other的元素列表 和是Other的元素列表
+ Map<String, Map<String, Object>> originalInternalOtherMap = Maps.newHashMap();
+ Map<String, Map<String, Object>> originalInternalNoOtherMap = Maps.newHashMap();
+ for (Map<String, Object> value : originalInternalNodeList) {
+ if (StrUtil.equals(String.valueOf(value.get(TrafficSpectrumConstants.NAME)), TrafficSpectrumConstants.OTHER)) {
+ originalInternalOtherMap.put(String.valueOf(value.get(TrafficSpectrumConstants.ID)), value);
+ } else {
+ originalInternalNoOtherMap.put(String.valueOf(value.get(TrafficSpectrumConstants.ID)), value);
+ }
+ }
+
+ Map<String, Map<String, Object>> originalExternalOtherMap = Maps.newHashMap();
+ Map<String, Map<String, Object>> originalExternalNoOtherMap = Maps.newHashMap();
+ for (Map<String, Object> value : originalExternalNodeList) {
+ if (StrUtil.equals(String.valueOf(value.get(TrafficSpectrumConstants.NAME)), TrafficSpectrumConstants.OTHER)) {
+ originalExternalOtherMap.put(String.valueOf(value.get(TrafficSpectrumConstants.ID)), value);
+ } else {
+ originalExternalNoOtherMap.put(String.valueOf(value.get(TrafficSpectrumConstants.ID)), value);
+ }
+ }
+
+ List<Map<String, Object>> originalOtherLinks = new ArrayList<>();
+ List<Map<String, Object>> originalNoOtherLinks = new ArrayList<>();
+ for (Map<String, Object> value : originalLinks) {
+ String source = String.valueOf(value.get(TrafficSpectrumConstants.SOURCE));
+ String target = String.valueOf(value.get(TrafficSpectrumConstants.TARGET));
+ if (originalExternalOtherMap.containsKey(source)
+ || originalExternalOtherMap.containsKey(target)
+ || originalInternalOtherMap.containsKey(source)
+ || originalInternalOtherMap.containsKey(target)) {
+ originalOtherLinks.add(value);
+ } else {
+ originalNoOtherLinks.add(value);
+ }
+ }
+
+ if (clientIPAppResultNum == null || clientIPAppResultNum >= originalNoOtherLinks.size()) {
+ return data;
+ }
+
+ // TODO step2 结果封装
+ // TODO 生成结果 nods
+ Map<String, Map<String, Object>> resultNoOtherInternalNodeMap = Maps.newHashMap();
+ Map<String, Map<String, Object>> resultNoOtherExternalNodeMap = Maps.newHashMap();
+ Set<String> sourceSets = Sets.newHashSet();
+ Set<String> targetSets = Sets.newHashSet();
+ for (Map<String, Object> map : originalNoOtherLinks) {
+ String source = String.valueOf(map.get(TrafficSpectrumConstants.SOURCE));
+ sourceSets.add(source);
+ String target = String.valueOf(map.get(TrafficSpectrumConstants.TARGET));
+ targetSets.add(target);
+ if (sourceSets.size() > clientIPAppResultNum || targetSets.size() > clientIPAppResultNum) {
+ continue;
+ }
+ resultNoOtherInternalNodeMap.put(source, originalInternalNoOtherMap.get(source));
+ resultNoOtherExternalNodeMap.put(target, originalExternalNoOtherMap.get(target));
+ }
+ if (sourceSets.size() <= clientIPAppResultNum || targetSets.size() <= clientIPAppResultNum) {
+ return data;
+ }
+
+ Map<String, Map<String, Object>> resultOtherInternalNodeMap = Maps.newHashMap();
+ Map<String, Map<String, Object>> resultOtherExternalNodeMap = Maps.newHashMap();
+ for (Map<String, Object> originalItem : originalInternalNodeList) {
+ String originalId = String.valueOf(originalItem.get(TrafficSpectrumConstants.ID));
+ if (resultNoOtherInternalNodeMap.containsKey(originalId)) {
+ continue;
+ }
+ Map<String, Object> otherInternalNodeMapItem = Maps.newHashMap();
+ String category = String.valueOf(originalItem.get(TrafficSpectrumConstants.CATEGORY));
+ String objectType = String.valueOf(originalItem.get(TrafficSpectrumConstants.OBJECT_TYPE));
+ otherInternalNodeMapItem.put(TrafficSpectrumConstants.OBJECT_TYPE, objectType);
+ boolean presentCategory = resultNoOtherInternalNodeMap.values().stream().anyMatch(o -> objectType.equals(o.get(TrafficSpectrumConstants.OBJECT_TYPE)) && category.equals(o.get(TrafficSpectrumConstants.CATEGORY)));
+ if (presentCategory) {
+ otherInternalNodeMapItem.put(TrafficSpectrumConstants.CATEGORY, category);
+ } else {
+ otherInternalNodeMapItem.put(TrafficSpectrumConstants.CATEGORY, TrafficSpectrumConstants.OTHER);
+ }
+ otherInternalNodeMapItem.put(TrafficSpectrumConstants.NAME, TrafficSpectrumConstants.OTHER);
+ String unoriginalId = UUID.nameUUIDFromBytes(StrUtil.concat(false, TrafficSpectrumConstants.INTERNAL_NODE, String.valueOf(otherInternalNodeMapItem.get(TrafficSpectrumConstants.NAME)), String.valueOf(otherInternalNodeMapItem.get(TrafficSpectrumConstants.CATEGORY)), String.valueOf(otherInternalNodeMapItem.get(TrafficSpectrumConstants.OBJECT_TYPE))).getBytes()).toString();
+ otherInternalNodeMapItem.put(TrafficSpectrumConstants.ID, unoriginalId);
+ resultOtherInternalNodeMap.put(originalId, otherInternalNodeMapItem);
+ }
+
+ for (Map<String, Object> originalItem : originalExternalNodeList) {
+ String originalId = String.valueOf(originalItem.get(TrafficSpectrumConstants.ID));
+ if (resultNoOtherExternalNodeMap.containsKey(originalId)) {
+ continue;
+ }
+ Map<String, Object> otherExternalNodeMapItem = Maps.newHashMap();
+ String category = String.valueOf(originalItem.get(TrafficSpectrumConstants.CATEGORY));
+ String objectType = String.valueOf(originalItem.get(TrafficSpectrumConstants.OBJECT_TYPE));
+ otherExternalNodeMapItem.put(TrafficSpectrumConstants.OBJECT_TYPE, objectType);
+ boolean presentCategory = resultNoOtherExternalNodeMap.values().stream().anyMatch(o -> objectType.equals(o.get(TrafficSpectrumConstants.OBJECT_TYPE)) && category.equals(o.get(TrafficSpectrumConstants.CATEGORY)));
+ if (!presentCategory) {
+ otherExternalNodeMapItem.put(TrafficSpectrumConstants.CATEGORY, TrafficSpectrumConstants.OTHER);
+ } else {
+ otherExternalNodeMapItem.put(TrafficSpectrumConstants.CATEGORY, category);
+ }
+ otherExternalNodeMapItem.put(TrafficSpectrumConstants.NAME, TrafficSpectrumConstants.OTHER);
+ String unoriginalId = UUID.nameUUIDFromBytes(StrUtil.concat(false, TrafficSpectrumConstants.EXTERNAL_NODE, String.valueOf(otherExternalNodeMapItem.get(TrafficSpectrumConstants.NAME)), String.valueOf(otherExternalNodeMapItem.get(TrafficSpectrumConstants.CATEGORY)), String.valueOf(otherExternalNodeMapItem.get(TrafficSpectrumConstants.OBJECT_TYPE))).getBytes()).toString();
+ otherExternalNodeMapItem.put(TrafficSpectrumConstants.ID, unoriginalId);
+ resultOtherExternalNodeMap.put(originalId, otherExternalNodeMapItem);
+ }
+
+ // TODO 基于 originalLinks 生成结果 resultLinks
+ List<Map<String, Object>> resultLinks = Lists.newArrayList(originalLinks);
+ for (Map<String, Object> resultLink : resultLinks) {
+ String source = String.valueOf(resultLink.get(TrafficSpectrumConstants.SOURCE));
+ String target = String.valueOf(resultLink.get(TrafficSpectrumConstants.TARGET));
+ if (resultOtherInternalNodeMap.containsKey(source)) {
+ resultLink.put(TrafficSpectrumConstants.SOURCE, resultOtherInternalNodeMap.get(source).get(TrafficSpectrumConstants.ID));
+ }
+ if (resultOtherExternalNodeMap.containsKey(target)) {
+ resultLink.put(TrafficSpectrumConstants.TARGET, resultOtherExternalNodeMap.get(target).get(TrafficSpectrumConstants.ID));
+ }
+ }
+
+ Map<String, Object> result = Maps.newHashMap();
+ List<Map<String, Object>> links = mergeLinks(resultLinks);
+ Map<String, List<Map<String, Object>>> nodes = Maps.newHashMap();
+ List<Map<String, Object>> internalNode = Lists.newArrayList();
+ List<Map<String, Object>> externalNode = Lists.newArrayList();
+ internalNode.addAll(resultOtherInternalNodeMap.values());
+ internalNode.addAll(resultNoOtherInternalNodeMap.values());
+ externalNode.addAll(resultOtherExternalNodeMap.values());
+ externalNode.addAll(resultNoOtherExternalNodeMap.values());
+ nodes.put(TrafficSpectrumConstants.INTERNAL_NODE, internalNode);
+ nodes.put(TrafficSpectrumConstants.EXTERNAL_NODE, externalNode);
+ Map<String, List<Map<String, Object>>> distinctNodes = distinctNodes(nodes);
+ result.put(TrafficSpectrumConstants.NODES, distinctNodes);
+ result.put(TrafficSpectrumConstants.LINKS, links);
+ return result;
+ }
+
+ @Override
+ public Map<String, Object> mergeInternalExternalBipartiteGraph(Map<String, Object> data1, Map<String, Object> data2) {
+ Map<String, List<Map<String, Object>>> nodes1 = (Map<String, List<Map<String, Object>>>) data1.get(TrafficSpectrumConstants.NODES);
+ List<Map<String, Object>> links1 = (List<Map<String, Object>>) data1.get(TrafficSpectrumConstants.LINKS);
+ List<Map<String, Object>> externalNodeList1 = nodes1.get(TrafficSpectrumConstants.EXTERNAL_NODE);
+ List<Map<String, Object>> internalNodeList1 = nodes1.get(TrafficSpectrumConstants.INTERNAL_NODE);
+
+ Map<String, List<Map<String, Object>>> nodes2 = (Map<String, List<Map<String, Object>>>) data2.get(TrafficSpectrumConstants.NODES);
+ List<Map<String, Object>> links2 = (List<Map<String, Object>>) data2.get(TrafficSpectrumConstants.LINKS);
+ List<Map<String, Object>> externalNodeList2 = nodes2.get(TrafficSpectrumConstants.EXTERNAL_NODE);
+ List<Map<String, Object>> internalNodeList2 = nodes2.get(TrafficSpectrumConstants.INTERNAL_NODE);
+ // TODO 合并 data1、data2,并且去重
+ List<Map<String, Object>> lists = Lists.newArrayList();
+ lists.addAll(links1);
+ lists.addAll(links2);
+ List<Map<String, Object>> distinctLinks = mergeLinks(lists);
+
+ Map<String, List<Map<String, Object>>> nodes = Maps.newHashMap();
+ List<Map<String, Object>> externalNode = Lists.newArrayList();
+ externalNode.addAll(externalNodeList1);
+ externalNode.addAll(externalNodeList2);
+ List<Map<String, Object>> internalNode = Lists.newArrayList();
+ internalNode.addAll(internalNodeList1);
+ internalNode.addAll(internalNodeList2);
+ nodes.put(TrafficSpectrumConstants.INTERNAL_NODE, internalNode);
+ nodes.put(TrafficSpectrumConstants.EXTERNAL_NODE, externalNode);
+ Map<String, List<Map<String, Object>>> distinctNodes = distinctNodes(nodes);
+ Map<String, Object> data = Maps.newHashMap();
+ data.put(TrafficSpectrumConstants.NODES, distinctNodes);
+ data.put(TrafficSpectrumConstants.LINKS, distinctLinks);
+ int sqrt = (int) Math.sqrt(engineConfigSource.getMaxCacheNum());
+ return getInternalExternalBipartiteGraph(data, Math.max(sqrt, engineConfigSource.getTrafficSpectrumClientIPAppResultNum()));
+ }
+
+ @Override
+ public List<Map<String, Object>> mergeLinks(List<Map<String, Object>> links) {
+ Map<String, Map<String, Object>> resultLinksMap = Maps.newHashMap();
+ links.forEach(o -> {
+ String uniqueKey = DigestUtil.md5Hex(o.get(TrafficSpectrumConstants.SOURCE)
+ + TrafficSpectrumConstants.SPLIT_HYPHEN + o.get(TrafficSpectrumConstants.DIRECTION)
+ + TrafficSpectrumConstants.SPLIT_HYPHEN + o.get(TrafficSpectrumConstants.TARGET));
+ if (resultLinksMap.containsKey(uniqueKey)) {
+ Map<String, Object> map = resultLinksMap.get(uniqueKey);
+ map.put(TrafficSpectrumConstants.SESSIONS, Long.parseLong(String.valueOf(map.get(TrafficSpectrumConstants.SESSIONS))) + Long.parseLong(String.valueOf(o.get(TrafficSpectrumConstants.SESSIONS))));
+ Long bytes1 = Long.parseLong(String.valueOf(map.get(TrafficSpectrumConstants.BYTES)));
+ Long bytes2 = Long.parseLong(String.valueOf(o.get(TrafficSpectrumConstants.BYTES)));
+ map.put(TrafficSpectrumConstants.BYTES, bytes1 + bytes2);
+ map.put(TrafficSpectrumConstants.PACKETS, Long.parseLong(String.valueOf(map.get(TrafficSpectrumConstants.PACKETS))) + Long.parseLong(String.valueOf(o.get(TrafficSpectrumConstants.PACKETS))));
+ map.put(TrafficSpectrumConstants.MAX_RATE, Math.max(Long.parseLong(String.valueOf(map.get(TrafficSpectrumConstants.MAX_RATE))), Long.parseLong(String.valueOf(o.get(TrafficSpectrumConstants.MAX_RATE)))));
+
+ //TODO 计算 avg_rate 时,通过 bytes 权重,计算加权平均值
+ Long avgRate1 = Long.parseLong(String.valueOf(map.get(TrafficSpectrumConstants.AVG_RATE)));
+ Long avgRate2 = Long.parseLong(String.valueOf(o.get(TrafficSpectrumConstants.AVG_RATE)));
+ if (bytes1 + bytes2 == 0) {
+ map.put(TrafficSpectrumConstants.AVG_RATE, (avgRate1 + avgRate2) / 2);
+ } else {
+ long avg = (long) (avgRate1 * (bytes1 * 1.0 / (bytes1 + bytes2)) + avgRate2 * (bytes2 * 1.0 / (bytes1 + bytes2)));
+ map.put(TrafficSpectrumConstants.AVG_RATE, avg);
+ }
+ resultLinksMap.put(uniqueKey, map);
+ } else {
+ resultLinksMap.put(uniqueKey, o);
+ }
+ });
+ List<Map<String, Object>> result = new ArrayList<>(resultLinksMap.values());
+ result.sort((o1, o2) -> {
+ long bytes1 = Long.parseLong(String.valueOf(o1.get(TrafficSpectrumConstants.BYTES)));
+ long bytes2 = Long.parseLong(String.valueOf(o2.get(TrafficSpectrumConstants.BYTES)));
+ return Long.compare(bytes2, bytes1);
+ });
+ return result;
+ }
+
+ @Override
+ public Map<String, List<Map<String, Object>>> distinctNodes(Map<String, List<Map<String, Object>>> nodes) {
+ Map<String, List<Map<String, Object>>> resultNode = Maps.newHashMap();
+ List<Map<String, Object>> internalNodes = nodes.get(TrafficSpectrumConstants.INTERNAL_NODE);
+ List<Map<String, Object>> externalNodes = nodes.get(TrafficSpectrumConstants.EXTERNAL_NODE);
+ List<Map<String, Object>> resultInternalNodes = internalNodes.stream().collect(Collectors.collectingAndThen(Collectors.toCollection(() -> new TreeSet<>(Comparator.comparing(o -> o.get(TrafficSpectrumConstants.ID).toString()))), ArrayList::new));
+ List<Map<String, Object>> resultExternalNodes = externalNodes.stream().collect(Collectors.collectingAndThen(Collectors.toCollection(() -> new TreeSet<>(Comparator.comparing(o -> o.get(TrafficSpectrumConstants.ID).toString()))), ArrayList::new));
+ resultNode.put(TrafficSpectrumConstants.INTERNAL_NODE, resultInternalNodes);
+ resultNode.put(TrafficSpectrumConstants.EXTERNAL_NODE, resultExternalNodes);
+ return resultNode;
+ }
+
+ private String getApplicationCategory(String appCategory) {
+ if (StrUtil.isEmpty(appCategory)) {
+ return TrafficSpectrumConstants.OTHER;
+ }
+ return appCategory;
+ }
+
+ private String getIpCategory(String clientIp, String clientCountry) {
+ if (!StrUtil.isEmpty(clientCountry)) {
+ return clientCountry;
+ }
+ if (StrUtil.isEmpty(clientCountry) && IPUtil.isIPv4Address(clientIp) && IPUtil.internalIp(clientIp)) {
+ return TrafficSpectrumConstants.PRIVATE_IP;
+ }
+ return TrafficSpectrumConstants.OTHER;
+ }
+
+ @Autowired
+ public void setEnvironment(Environment environment) {
+ this.environment = environment;
+ }
+
+ @Autowired
+ public void setDatabaseService(DatabaseService databaseService) {
+ this.databaseService = databaseService;
+ }
+
+ @Autowired
+ public void setSqlSyncQueryService(SQLSyncQueryService sqlSyncQueryService) {
+ this.sqlSyncQueryService = sqlSyncQueryService;
+ }
+
+ @Autowired
+ public void setEngineConfigSource(EngineConfigSource engineConfigSource) {
+ this.engineConfigSource = engineConfigSource;
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/TroubleshootingServiceImp.java b/src/main/java/com/mesalab/qgw/service/impl/TroubleshootingServiceImp.java
new file mode 100644
index 00000000..3b74a3af
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/impl/TroubleshootingServiceImp.java
@@ -0,0 +1,789 @@
+package com.mesalab.qgw.service.impl;
+
+import cn.hutool.core.collection.CollectionUtil;
+import cn.hutool.core.net.NetUtil;
+import cn.hutool.core.util.IdUtil;
+import cn.hutool.core.util.NumberUtil;
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.core.util.URLUtil;
+import cn.hutool.json.JSONUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.alibaba.fastjson2.JSON;
+import com.alibaba.fastjson2.JSONObject;
+import com.alibaba.fastjson2.JSONPath;
+import com.alibaba.nacos.api.config.ConfigService;
+import com.alibaba.nacos.api.exception.NacosException;
+import com.geedgenetworks.utils.CommonUtil;
+import com.geedgenetworks.utils.DateUtils;
+import com.geedgenetworks.utils.StringUtil;
+import com.google.common.base.Stopwatch;
+import com.google.common.collect.Lists;
+import com.google.common.collect.MapDifference;
+import com.google.common.collect.Maps;
+import com.jayway.jsonpath.JsonPath;
+import com.jfinal.plugin.activerecord.Db;
+import com.jfinal.plugin.activerecord.Record;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.entity.BaseResultGenerator;
+import com.mesalab.common.entity.DataTypeMapping;
+import com.mesalab.common.enums.*;
+import com.mesalab.common.exception.BusinessException;
+import com.mesalab.common.nacos.NacosConfig;
+import com.mesalab.common.utils.JsonSchemaValidator;
+import com.mesalab.qgw.benchmark.DialectWriter;
+import com.mesalab.qgw.constant.QGWMessageConst;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.*;
+import com.mesalab.qgw.service.*;
+import com.opencsv.CSVWriter;
+import org.apache.commons.math3.stat.StatUtils;
+import org.apache.http.NameValuePair;
+import org.apache.http.client.utils.URLEncodedUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.EnvironmentAware;
+import org.springframework.core.env.Environment;
+import org.springframework.stereotype.Service;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.*;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+@Service("troubleshootingService")
+public class TroubleshootingServiceImp implements TroubleshootingService, EnvironmentAware {
+
+ private static final Log log = LogFactory.get();
+ private static final String BACKEND_ENGINE = "backend_engine";
+ private static final String IDENTIFIER_NAME = "identifier_name";
+ private static final String TEMPLATE = "template";
+ private static final String TYPE = "type";
+
+ private static final String DATASET_TYPE = "type";
+
+ @Autowired
+ private DatabaseService databaseService;
+ @Autowired
+ private ClickHouseHttpSource clickHouseHttpSource;
+ @Autowired
+ private DruidIoHttpSource druidIoHttpSource;
+ @Autowired
+ private HBaseAPISource hBaseAPISource;
+
+ @Autowired
+ private SQLSyncQueryService sqlSyncQueryService;
+ @Autowired
+ private HttpClientService httpClientService;
+ @Autowired
+ private HttpClientServiceV2 httpClientServiceV2;
+ @Autowired
+ HttpConfig httpConfig;
+ @Autowired
+ private DatasetService datasetService;
+ @Autowired
+ private DialectWriter dialectWriter;
+ @Autowired
+ private ConfigService systemConfigService;
+ @Autowired
+ private NacosConfig nacosConfig;
+ @Autowired
+ private QueryJobService queryJobService;
+ @Value("${server.port}")
+ private int serverPort;
+ @Value("${management.endpoints.web.base-path}")
+ private String basePath;
+ @Value("${management.endpoints.web.path-mapping.health}")
+ private String healthPath;
+
+ private Environment env;
+ private static Pattern pTTL = Pattern.compile(".*toIntervalSecond\\((\\d+)\\)", Pattern.CASE_INSENSITIVE);
+ private static final String trafficPort = "8123";
+ private final static String DATA_ID = "version.json";
+ private final static String PUBLIC_SCHEMA_INFO = "public_schema_info.json";
+ private static final String FIELDS = "fields";
+ private static final String NAMESPACE = "namespace";
+ private static final String SCHEMA_SYNTAX_RULE = "schema-syntax-validation.json";
+
+ private static String localHostAddress;
+ private final static Map<String, String> metaMap = Maps.newHashMap();
+
+ static {
+ localHostAddress = NetUtil.getLocalhostStr();
+ }
+
+ static {
+ metaMap.put("Int8", DataTypeMapping.INT);
+ metaMap.put("Int16", DataTypeMapping.INT);
+ metaMap.put("Int32", DataTypeMapping.INT);
+ metaMap.put("Int64", DataTypeMapping.LONG);
+ metaMap.put("Int128", DataTypeMapping.LONG);
+ metaMap.put("Int256", DataTypeMapping.LONG);
+ metaMap.put("UInt8", DataTypeMapping.INT);
+ metaMap.put("UInt16", DataTypeMapping.INT);
+ metaMap.put("UInt32", DataTypeMapping.LONG);
+ metaMap.put("UInt64", DataTypeMapping.LONG);
+ metaMap.put("UInt256", DataTypeMapping.LONG);
+ metaMap.put("Nullable(Int32)", DataTypeMapping.INT);
+ metaMap.put("Nullable(Int64)", DataTypeMapping.LONG);
+ metaMap.put("Nullable(Float64)", DataTypeMapping.DOUBLE);
+ metaMap.put("BIGINT", DataTypeMapping.LONG);
+ metaMap.put("Float32", DataTypeMapping.FLOAT);
+ metaMap.put("Float64", DataTypeMapping.DOUBLE);
+ metaMap.put("Date", DataTypeMapping.DATE);
+ metaMap.put("DateTime", DataTypeMapping.TIMESTAMP);
+ metaMap.put("DateTime64(3)", DataTypeMapping.STRING);
+ metaMap.put("TIMESTAMP", DataTypeMapping.STRING);
+ metaMap.put("Array(Int64)", DataTypeMapping.ARRAY.concat("(").concat(DataTypeMapping.LONG).concat(")"));
+ metaMap.put("Array(Int32)", DataTypeMapping.ARRAY.concat("(").concat(DataTypeMapping.INT).concat(")"));
+ metaMap.put("Array(String)", DataTypeMapping.ARRAY.concat("(").concat(DataTypeMapping.STRING).concat(")"));
+ metaMap.put("VARCHAR", DataTypeMapping.STRING);
+ metaMap.put("LowCardinality(String)", DataTypeMapping.STRING);
+ }
+
+ @Override
+ public JSONObject getComponentStatus() {
+ Map<String, Object> jsonMap = Maps.newLinkedHashMap();
+ try {
+ String content = systemConfigService.getConfig(DATA_ID, nacosConfig.getGroup(), 3000);
+ Map componentInfo = JSON.parseObject(content, Map.class);
+ List<Map<String, Object>> componentList = (List<Map<String, Object>>) componentInfo.get("components");
+ Map<String, Object> componentHealthResult = getComponentHealth();
+ Set<String> componentNameSet = componentHealthResult.keySet();
+ boolean isStatusUp = true;
+ for (Map component : componentList) {
+ String componentName = String.valueOf(component.get("name"));
+ if (componentNameSet.contains(componentName)) {
+ Map<String, String> healthMap = buildHealth(componentName, componentHealthResult);
+ if (healthMap.get("status").equals("DOWN")) {
+ isStatusUp = false;
+ }
+ if (StringUtil.isNotEmpty(healthMap.get("version"))) {
+ component.put("version", healthMap.get("version"));
+ healthMap.remove("version");
+ }
+ component.put("health", healthMap);
+ }
+ }
+ jsonMap.put("product", componentInfo.get("product"));
+ jsonMap.put("status", isStatusUp ? "UP" : "DOWN");
+ jsonMap.put("components", componentList);
+ jsonMap.put("registered", componentInfo.get("registered"));
+ jsonMap.put("version", componentInfo.get("version"));
+ jsonMap.put("updated", componentInfo.get("updated"));
+ } catch (NacosException e) {
+ e.printStackTrace();
+ }
+ return new JSONObject(jsonMap);
+ }
+
+ @Override
+ public BaseResult consistencyCheck() {
+ List<Map<String, Object>> dataList = Lists.newArrayList();
+ Map<String, Object> logDiff = Maps.newHashMap();
+ Map tables = databaseService.getSchemaInfo(MetadataType.TABLES.getValue(), clickHouseHttpSource.getDbName(), false);
+ List<String> symbols = (List<String>) tables.get("symbols");
+ logDiff.put("logType", DatabaseServiceImpl.LogType.TRAFFIC_LOGS.getValue());
+ Map<String, Map<String, Object>> changeTTL = Maps.newHashMap();
+ List<String> ipPorts = getClusterAddressOfCK();
+ try {
+ for (String ipPort : ipPorts) {
+ if (!changeTTL.isEmpty()) {
+ break;
+ }
+ if (ipPort.startsWith("127.0.0.1")) {
+ String url = clickHouseHttpSource.getUrl();
+ ipPort = url.substring(url.indexOf("//") + 2, url.lastIndexOf(":") + 1).concat(trafficPort);
+ }
+ for (String tableName : symbols) {
+ Map schemaMap = databaseService.getSchemaInfo(FIELDS, tableName, false);
+ Object tableTTLInSchema = getTableTTLInSchema(schemaMap);
+ Map<String, Object> tableDiff = getTableDiff(ipPort, tableName, tableTTLInSchema);
+ List fieldDiff = getFieldDiff(ipPort, tableName, schemaMap);
+ if (CollectionUtil.isNotEmpty(fieldDiff)) {
+ tableDiff.put(FIELDS, fieldDiff);
+
+ }
+ if (CollectionUtil.isNotEmpty(tableDiff)) {
+ changeTTL.put(tableName, tableDiff);
+ logDiff.put("tables", Lists.newArrayList(changeTTL));
+ logDiff.put("address", Lists.newArrayList(ipPort));
+ }
+ }
+ }
+ } catch (RuntimeException ex) {
+ log.error(" schema TTL check error: {}", ex);
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), ex.getMessage()));
+ }
+ if (changeTTL.isEmpty()) {
+ return BaseResultGenerator.success("ok", dataList);
+ }
+ dataList.add(logDiff);
+ return BaseResultGenerator.success("Log streaming setting task not executed or failed", dataList);
+ }
+
+ @Override
+ public BaseResult validateMetadata() {
+ BaseResult result = null;
+ try {
+ Stopwatch watch = Stopwatch.createStarted();
+ Map<String, Object> statistics = Maps.newLinkedHashMap();
+ Map<String, Map<String, List<String>>> resultData = Maps.newHashMap();
+ JsonSchemaValidator jsonSchemaValidator = JsonSchemaValidator.getInstance();
+ for (String tableName : databaseService.getAllTable()) {
+ Map schemaMap = databaseService.getSchemaInfo(FIELDS, tableName, false);
+ if (!clickHouseHttpSource.getDbName().equals(schemaMap.get(NAMESPACE)) && !druidIoHttpSource.getDbname().equals(schemaMap.get(NAMESPACE)))
+ continue;
+ jsonSchemaValidator
+ .addRule(SCHEMA_SYNTAX_RULE, "schema")
+ .validateSchema(JSON.toJSONString(schemaMap));
+ }
+ checkCKIndexKey(clickHouseHttpSource.getDbName(), databaseService.getAllTable());
+ checkMetadataFieldAndType(databaseService.getAllTable(), resultData);
+ checkMetadataFunctionConstraint(databaseService.getAllTable());
+
+ statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS));
+ result = BaseResultGenerator.success("Json Schema Validation Success.", resultData, statistics);
+ result.setOutputMode(OutputMode.JSON.getValue());
+ } catch (RuntimeException e) {
+ log.error("Json Schema Validation Fail: {}", e.getMessage());
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), e.getMessage()));
+ }
+ return result;
+ }
+
+
+ @Override
+ public BaseResult datesetVerification() {
+ List<Record> datasetList = Db.find(String.format(Objects.requireNonNull(env.getProperty("SQL_DATASETS")), ""));
+ List<Map<String, String>> sqlDatasetList = Lists.newArrayList();
+ List<Map<String, String>> dslDatasetList = Lists.newArrayList();
+ List<Map<String, String>> resultData = Lists.newArrayList();
+ List<LinkedHashMap> variables = datasetService.getVariable();
+ datasetList.forEach(record -> {
+ Map<String, String> map = Maps.newHashMap();
+ map.put(IDENTIFIER_NAME, String.valueOf(record.getColumns().get(IDENTIFIER_NAME)));
+ map.put(TEMPLATE, String.valueOf(datasetService.buildExecSQL(variables, String.valueOf(record.getColumns().get(TEMPLATE)))));
+ if (record.getColumns().get(DATASET_TYPE).equals("sql")) {
+ sqlDatasetList.add(map);
+ } else if (record.getColumns().get(DATASET_TYPE).equals("dsl")) {
+ dslDatasetList.add(map);
+ }
+ });
+
+ SqlQueryRequestParam sqlQueryRequest;
+ for (Map sqlDataset : sqlDatasetList) {
+ Map<String, String> map = Maps.newHashMap();
+ try {
+ sqlQueryRequest = JSON.parseObject(String.valueOf(sqlDataset.get(TEMPLATE)), SqlQueryRequestParam.class);
+ sqlQueryRequest.setExecutionMode(ExecutionMode.ONESHOT.getValue());
+ sqlQueryRequest.setIsDryRun(1);
+ sqlQueryRequest.setOutputMode(OutputMode.JSON.getValue());
+ log.info("SQL Dataset Verification, ID is: {}", sqlDataset.get(IDENTIFIER_NAME));
+ queryJobService.createSQLAdHocQuery(sqlQueryRequest);
+ } catch (QGWBusinessException | BusinessException e) {
+ map.put(String.valueOf(sqlDataset.get(IDENTIFIER_NAME)), e.getMessage());
+ resultData.add(map);
+ log.error("SQL Dataset execute error, ID is: {}, message is: {}", sqlDataset.get(IDENTIFIER_NAME), e.getMessage());
+ }
+ }
+
+ DSLQueryRequestParam dslQueryRequest;
+ for (Map dslDataset : dslDatasetList) {
+ Map<String, String> map = Maps.newHashMap();
+ try {
+ dslQueryRequest = JSON.parseObject(String.valueOf(dslDataset.get(TEMPLATE)), DSLQueryRequestParam.class);
+ dslQueryRequest.setExecutionMode(ExecutionMode.ONESHOT.getValue());
+ dslQueryRequest.setIsDryRun(1);
+ dslQueryRequest.setOutputMode(OutputMode.JSON.getValue());
+ log.info("DSL Dataset Verification, ID is {}", dslDataset.get(IDENTIFIER_NAME));
+ queryJobService.createDSLAdHocQuery(dslQueryRequest);
+ } catch (RuntimeException e) {
+ map.put(String.valueOf(dslDataset.get(IDENTIFIER_NAME)), e.getMessage());
+ resultData.add(map);
+ log.error("DSL Dataset execute error, ID is: {}, message is: {}", dslDataset.get(IDENTIFIER_NAME), e.getMessage());
+ }
+ }
+
+ return buildResult(variables ,resultData, sqlDatasetList.size() + dslDatasetList.size());
+ }
+
+ @Override
+ public BaseResult benchmarkTest(String test, boolean isSaved) {
+
+ BaseResult result = null;
+ Map<String, Object> dataResult = Maps.newLinkedHashMap();
+ Map<String, Object> statistics = Maps.newLinkedHashMap();
+ Map<String, Map<String, String>> mergeResult;
+ Map<String, Map<String, String>> engineResult;
+ Map<String, Map<String, String>> clickhouseResult;
+ Map<String, Map<String, String>> druidResult;
+ Map<String, Map<String, String>> hbaseResult;
+ try {
+ List<Map<String, Object>> dataSetlist = datasetService.getDatasets(Lists.newArrayList(), test, null);
+ List<Map> druidDatasetList = Lists.newLinkedList();
+ List<Map> ckDatasetList = Lists.newLinkedList();
+ List<Map> engineDatasetList = Lists.newLinkedList();
+ List<Map> hbaseDatasetList = Lists.newLinkedList();
+ List<LinkedHashMap> variables = datasetService.getVariable();
+
+ Stopwatch watch = Stopwatch.createStarted();
+ if (CollectionUtil.isNotEmpty(dataSetlist)) {
+ for (int i = 0; i < dataSetlist.size(); i++) {
+ String backendEngine = String.valueOf(dataSetlist.get(i).get(BACKEND_ENGINE));
+ String identifierName = String.valueOf(dataSetlist.get(i).get(IDENTIFIER_NAME));
+ String template = String.valueOf(dataSetlist.get(i).get(TEMPLATE));
+ String type = String.valueOf(dataSetlist.get(i).get(TYPE));
+ Map<String, String> map = Maps.newHashMap();
+ map.put(IDENTIFIER_NAME, identifierName);
+ map.put(TYPE, type);
+ map.put(TEMPLATE, String.valueOf(datasetService.buildExecSQL(variables, template)));
+ if (DBEngineType.DRUID.getValue().equalsIgnoreCase(backendEngine)) {
+ druidDatasetList.add(map);
+ } else if (DBEngineType.CLICKHOUSE.getValue().equalsIgnoreCase(backendEngine)) {
+ ckDatasetList.add(map);
+ } else if (DBEngineType.HBASE.getValue().equalsIgnoreCase(backendEngine)) {
+ hbaseDatasetList.add(map);
+ } else {
+ engineDatasetList.add(map);
+ }
+ }
+ } else {
+ return BaseResultGenerator.failure(HttpStatusCodeEnum.BAD_REQUEST.getCode(), "The dataset category does not exist.");
+ }
+
+ clickhouseResult = buildResult(test, DBEngineType.CLICKHOUSE.getValue(), ckDatasetList, isSaved);
+ druidResult = buildResult(test, DBEngineType.DRUID.getValue(), druidDatasetList, isSaved);
+ engineResult = buildResult(test, DBEngineType.QGW.getValue(), engineDatasetList, isSaved);
+ hbaseResult = buildResult(test, DBEngineType.HBASE.getValue(), hbaseDatasetList, isSaved);
+
+ mergeResult = Stream
+ .concat(clickhouseResult.entrySet().stream(), druidResult.entrySet().stream())
+ .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
+ mergeResult = Stream.concat(mergeResult.entrySet().stream(), engineResult.entrySet().stream())
+ .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
+ mergeResult = Stream.concat(mergeResult.entrySet().stream(), hbaseResult.entrySet().stream())
+ .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
+
+ dataResult.put(test, mergeResult);
+ statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS));
+ result = BaseResultGenerator.success(StrUtil.format("SQL benchmark complete, time range is: {}", getTimeRangeByVariable(variables)), dataResult, statistics);
+ result.setOutputMode(OutputMode.JSON.getValue());
+ } catch (RuntimeException e) {
+ log.error("Execute Poc SQL Fail:{}", e);
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(),
+ CommonErrorCode.SQL_EXECUTION_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.SQL_EXECUTION_EXCEPTION.getMessage(), e.getMessage()));
+ }
+ return result;
+ }
+
+ private Map<String, Object> getComponentHealth() {
+ String healthCheckUrl = URLUtil.normalize(localHostAddress + ":" + serverPort + basePath + healthPath);
+ HttpResponseResult result = httpClientServiceV2.get(healthCheckUrl, httpConfig.getServerRequestTimeOut());
+ Map<String, Object> componentsHealthResult = JSON.parseObject(result.getResponseBody(), Map.class);
+ Map<String, Object> componentMap = (Map<String, Object>) componentsHealthResult.get("components");
+ return componentMap;
+ }
+
+
+ private Map<String, String> buildHealth(String componentName, Map<String, Object> map) {
+ Map<String, Object> componentMap = (Map<String, Object>) map.get(componentName);
+ Map<String, String> healthMap = (Map<String, String>) componentMap.get("details");
+ healthMap.put("status", String.valueOf(componentMap.get("status")));
+ healthMap.remove("app");
+ return healthMap;
+ }
+
+ private void checkCKIndexKey(String dbName, List<String> tables) {
+ for (String tableName : tables) {
+ LinkedHashMap<String, Object> schemaMap = databaseService.getSchemaByName(tableName);
+ if (!dbName.equals(schemaMap.get("namespace"))) continue;
+ BaseResult indexKeyResult = getBaseResultByEngine(SQLQueryContext.builder().option(QueryOption.REAL_TIME.getValue()).originalSQL(String.format(Objects.requireNonNull(env.getProperty("TABLE_INDEX_KEY")), tableName, clickHouseHttpSource.getDbName())).build());
+ if (HttpStatusCodeEnum.SUCCESS.getCode() != indexKeyResult.getStatus()) {
+ log.error("The Table[" + tableName + "] index key query failed" + indexKeyResult.getMessage());
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.SQL_EXECUTION_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.SQL_EXECUTION_EXCEPTION.getMessage(), " The Table[" + tableName + "] index key query failed"));
+ }
+ List<Map<String, Object>> dataList = (List<Map<String, Object>>) indexKeyResult.getData();
+ List<String> dbIndexKey = StringUtil.isEmpty(dataList.get(0)) ? Lists.newArrayList() : (List<String>) dataList.get(0).get("index_key");
+ List<String> schemaIndexKey = databaseService.getIndexKey(tableName);
+ if (schemaIndexKey.equals(dbIndexKey)) {
+ continue;
+ }
+ log.error("{} schema index key inconsistent with DB, schema: {}, db: {}", tableName, schemaIndexKey, dbIndexKey);
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.SCHEMA_AND_TABLE_NOT_CONSISTENCY.getCode(),
+ String.format(CommonErrorCode.SCHEMA_AND_TABLE_NOT_CONSISTENCY.getMessage(),
+ String.format("%s schema index key is: %s, db index key is: %s", tableName, schemaIndexKey, dbIndexKey)));
+ }
+ }
+
+ private BaseResult getBaseResultByEngine(SQLQueryContext queryProfile) {
+ return sqlSyncQueryService.executeQuery(queryProfile);
+ }
+
+ /**
+ * 对指定数据源dbName下的schema 与数据库的字段及字段类型进行对比检查
+ *
+ * @param
+ * @param tables
+ * @return
+ */
+ private void checkMetadataFieldAndType(List<String> tables, Map<String, Map<String, List<String>>> resultData) {
+ for (String tableName : tables) {
+ Map schemaMap = databaseService.getSchemaInfo(FIELDS, tableName, false);
+ String dbName = String.valueOf(schemaMap.get(NAMESPACE));
+ if (!clickHouseHttpSource.getDbName().equals(dbName)
+ && !druidIoHttpSource.getDbname().equals(dbName)
+ && !hBaseAPISource.getDbName().equals(dbName)) continue;
+ BaseResult baseResult = getBaseResultByEngine(SQLQueryContext.builder().option(QueryOption.REAL_TIME.getValue()).originalSQL("describe " + tableName).build());
+ if (HttpStatusCodeEnum.SUCCESS.getCode() != baseResult.getStatus()) {
+ log.error("The Table[" + tableName + "] Structure query failed" + baseResult.getMessage());
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.SQL_EXECUTION_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.SQL_EXECUTION_EXCEPTION.getMessage(), " The Table[" + tableName + "] Structure query failed"));
+ }
+ List<Map<String, Object>> dbFields = JsonPath.read(JSON.toJSONString(baseResult.getData()), "$.fields.*");
+ Map<String, Object> dbFieldMap = buildDBFiedldMap(dbFields);
+ List<Map<String, Object>> schemaFields = (List<Map<String, Object>>) schemaMap.get(FIELDS);
+ Map<String, Object> schemaFieldMap = buildSchemaFiedldMap(schemaFields);
+
+ MapDifference<String, Object> difference = Maps.difference(schemaFieldMap, dbFieldMap);
+ Map<String, Object> entriesOnlyOnLeft = difference.entriesOnlyOnLeft();
+ if (entriesOnlyOnLeft.size() > 0) {
+ log.error("Schema Fields greater than Table Fields,Schema " + tableName + " exist fields :" + entriesOnlyOnLeft);
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.SQL_EXECUTION_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.SQL_EXECUTION_EXCEPTION.getMessage(),
+ "Schema Fields greater than Table Fields,Schema " + tableName + " exist fields " + entriesOnlyOnLeft));
+ }
+
+ Map<String, Object> entriesOnlyOnRight = difference.entriesOnlyOnRight();
+ String fieldValidationMessage = "Schema Fields less than Table Fields:" + entriesOnlyOnRight;
+ if (entriesOnlyOnRight.size() > 0) {
+ log.warn(tableName + " " + fieldValidationMessage);
+ buildData(resultData, tableName, dbName, fieldValidationMessage);
+ }
+
+ Map<String, MapDifference.ValueDifference<Object>> fieldTypeDiff = difference.entriesDiffering();
+ if (fieldTypeDiff.size() > 0 && (clickHouseHttpSource.getDbName().equals(dbName)
+ || druidIoHttpSource.getDbname().equals(dbName))) {
+ StringBuilder sb = new StringBuilder();
+ for (String field : fieldTypeDiff.keySet()) {
+ sb.append(field)
+ .append("[")
+ .append(fieldTypeDiff.get(field).leftValue())
+ .append(" modified to ")
+ .append(fieldTypeDiff.get(field).rightValue())
+ .append("]")
+ .append(", ");
+ }
+ sb.setLength(sb.length()-2);
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(QGWMessageConst.SCHEMA_FIELD_TYPE_NEED_CHECK, tableName, sb));
+ }
+ }
+ }
+
+ private static Map<String, Object> buildDBFiedldMap(List<Map<String, Object>> dbFields) {
+ Map<String, Object> dbFieldMap = Maps.newHashMap();
+ dbFields.stream().forEach(objectMap -> {
+ String type = StrUtil.isBlank(metaMap.get(objectMap.get("type"))) ? "string" : metaMap.get(objectMap.get("type"));
+ dbFieldMap.put(String.valueOf(objectMap.get("name")), type);
+ });
+ return dbFieldMap;
+ }
+
+ private static Map<String, Object> buildSchemaFiedldMap(List<Map<String, Object>> schemaFields) {
+ Map<String, Object> schemaFieldMap = Maps.newHashMap();
+ schemaFields.stream().forEach(objectMap -> {
+ String type;
+ if (objectMap.get("type") instanceof String) {
+ type = String.valueOf(objectMap.get("type"));
+ } else {
+ Map map = (Map) objectMap.get("type");
+ type = String.valueOf(map.get("type")).equals("array") ? String.valueOf(map.get("type")).concat("(").concat(String.valueOf(map.get("items"))).concat(")") : String.valueOf(map.get("type"));
+ }
+ schemaFieldMap.put(String.valueOf(objectMap.get("name")), type);
+ });
+ return schemaFieldMap;
+ }
+
+ private static void buildData(Map<String, Map<String, List<String>>> resultData, String tableName, String dbName, String message) {
+ if (!resultData.isEmpty()) {
+ if (resultData.containsKey(dbName)) {
+ if (resultData.get(dbName).containsKey(tableName)) {
+ Map<String, List<String>> map = resultData.get(dbName);
+ map.get(tableName).add(message);
+ } else {
+ Map<String, List<String>> map = resultData.get(dbName);
+ List<String> list = Lists.newArrayList();
+ list.add(message);
+ map.put(tableName, list);
+ resultData.put(dbName, map);
+ }
+ } else {
+ putData(resultData, tableName, dbName, message);
+ }
+ } else {
+ putData(resultData, tableName, dbName, message);
+ }
+ }
+
+ private static void putData(Map<String, Map<String, List<String>>> resultData, String tableName, String dbName, String message) {
+ Map<String, List<String>> map = Maps.newHashMap();
+ List<String> list = Lists.newArrayList();
+ list.add(message);
+ map.put(tableName, list);
+ resultData.put(dbName, map);
+ }
+
+ private void checkMetadataFunctionConstraint(List<String> tables) {
+ Object cfg = databaseService.getCfg(PUBLIC_SCHEMA_INFO);
+ Map map = JSON.parseObject(String.valueOf(JSON.toJSON(cfg)), Map.class);
+ List<String> aggregationFunctions = JsonPath.read(map, "$.functions.aggregation[*].name");
+ for (String tableName : tables) {
+ Map schemaMap = databaseService.getSchemaInfo(FIELDS, tableName, false);
+ String dbName = String.valueOf(schemaMap.get(NAMESPACE));
+ if (!clickHouseHttpSource.getDbName().equals(dbName) && !druidIoHttpSource.getDbname().equals(dbName))
+ continue;
+ List<Map<String, Object>> schemaFields = (List<Map<String, Object>>) schemaMap.get(FIELDS);
+ schemaFields.stream().forEach(objectMap -> {
+ if(JSONPath.contains(objectMap, "$.doc.constraints.aggregation_functions")){
+ String str =JsonPath.read(objectMap, "$.doc.constraints.aggregation_functions");
+ String funStr = str.replaceAll(" ", "");
+ List<String> funList = new ArrayList<>(Arrays.asList(funStr.split(",")));
+ for (String fun : funList) {
+ if (!aggregationFunctions.contains(fun)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(),
+ CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(QGWMessageConst.SCHEMA_FUNCTION_CONSTRAINT_ERROR, tableName, objectMap.get("name"), fun));
+ }
+ }
+ }
+ });
+ }
+ }
+
+ private Object getTableTTLInSchema(Map schemaMap) {
+ Object tableTTLInSchema = null;
+ if (StringUtil.isNotEmpty(schemaMap)) {
+ if (schemaMap.containsKey("doc")) {
+ Map<String, Object> schemaHasDoc = (Map<String, Object>) schemaMap.get("doc");
+ if (schemaHasDoc.containsKey("ttl")) {
+ tableTTLInSchema = StringUtil.isEmpty(schemaHasDoc.get("ttl")) ? null : schemaHasDoc.get("ttl").toString();
+ }
+ }
+ }
+ return tableTTLInSchema;
+ }
+
+ private Map<String, Object> getTableDiff(String ipPort, String tableName, Object tableTTLInSchema) {
+ String sql = String.format(Objects.requireNonNull(env.getProperty("TABLE_TTL")), tableName, clickHouseHttpSource.getDbName());
+ Map<String, String> result = executeHttpGetOfCK(ipPort, sql, clickHouseHttpSource.getSystemDBName());
+ if (Integer.parseInt(result.get("status")) != HttpStatusCodeEnum.SUCCESS.getCode()) {
+ log.error(" query table TTL error: {}", result.toString());
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.SQL_EXECUTION_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.SQL_EXECUTION_EXCEPTION.getMessage(), result.toString()));
+ }
+ Object tableTTLInDB = null;
+ Map<String, Object> tableDiff = Maps.newHashMap();
+ Map o = (Map) JSON.parseObject(result.get("result"), Map.class);
+ Map<String, Object> resultMap = (Map<String, Object>) o;
+ for (Map<String, Object> datum : (List<Map<String, Object>>) resultMap.get("data")) {
+ Matcher matcher = pTTL.matcher(String.valueOf(datum.get("table_ttl")));
+ tableTTLInDB = matcher.find() ? matcher.group(1) : null;
+ }
+ if (!String.valueOf(tableTTLInDB).equals(String.valueOf(tableTTLInSchema))) {
+ tableDiff.put("last_ttl", StringUtil.isEmpty(tableTTLInSchema) ? null : tableTTLInSchema.toString());
+ tableDiff.put("used_ttl", StringUtil.isEmpty(tableTTLInDB) ? null : tableTTLInDB.toString());
+ log.warn("table ttl: {} not consistency.", tableName);
+ }
+ return tableDiff;
+ }
+
+ private List<Map<String, Object>> getFieldDiff(String ipPort, String
+ tableName, Map<String, Object> schemaMap) {
+ List<Map<String, Object>> fieldDiff = Lists.newArrayList();
+ Map<String, String> result = executeHttpGetOfCK(ipPort, "describe " + tableName + "_local", clickHouseHttpSource.getDbName());
+ if (Integer.parseInt(result.get("status")) != HttpStatusCodeEnum.SUCCESS.getCode()) {
+ log.error(" query table field TTL error: {}", result.toString());
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.SQL_EXECUTION_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.SQL_EXECUTION_EXCEPTION.getMessage(), result.toString()));
+ }
+ List<Map<String, Object>> fields = (List<Map<String, Object>>) schemaMap.get(FIELDS);
+ for (Map<String, Object> field : fields) {
+ Object read = JsonPath.read(result, "$.result");
+ List<Object> fieldList = JsonPath.read(JSONUtil.toJsonStr(JSONUtil.parseObj(read)), "$.data[?(@.name == \"" + field.get("name") + "\")].ttl_expression");
+ Object fieldTTLInSchema = null;
+ Object fieldTTLInDB;
+ if (StringUtil.isNotEmpty(field.get("doc"))) {
+ Map<String, Object> doc = (Map<String, Object>) field.get("doc");
+ fieldTTLInSchema = StringUtil.isEmpty(doc.get("ttl")) ? null : doc.get("ttl");
+ }
+ Matcher matcher = pTTL.matcher(String.valueOf(fieldList.get(0)));
+ fieldTTLInDB = matcher.find() ? matcher.group(1) : null;
+ if (String.valueOf(fieldTTLInSchema).equals(String.valueOf(fieldTTLInDB))) {
+ continue;
+ }
+ log.warn("field ttl: address {} {}-{} not consistency.", ipPort, tableName, field.get("name"));
+ Map<String, Object> ttlDiff = Maps.newHashMap();
+ ttlDiff.put("last_ttl", StringUtil.isEmpty(fieldTTLInSchema) ? null : fieldTTLInSchema.toString());
+ ttlDiff.put("used_ttl", StringUtil.isEmpty(fieldTTLInDB) ? null : fieldTTLInDB.toString());
+ if (fieldDiff.stream().noneMatch(o -> o.containsKey(field.get("name").toString()))) {
+ Map<String, Object> item = Maps.newHashMap();
+ item.put(field.get("name").toString(), ttlDiff);
+ fieldDiff.add(item);
+ }
+ }
+ return fieldDiff;
+ }
+
+
+ private List<String> getClusterAddressOfCK() {
+ List<String> endpointList = new ArrayList<>();
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(SQLQueryContext.builder().originalSQL("SELECT DISTINCT concat(host_address,':','" + trafficPort + "') as endpoint FROM clusters where cluster = 'ck_cluster'").build());
+ if (baseResult.isSuccess()) {
+ List<Map<String, Object>> data = (List<Map<String, Object>>) baseResult.getData();
+ for (Map<String, Object> datum : data) {
+ endpointList.add(String.valueOf(datum.get("endpoint")));
+ }
+ }
+ return endpointList;
+ }
+
+ private Map<String, String> executeHttpGetOfCK(String ipPort, String sql, String dbName) {
+ String queryURL = URLUtil.normalize("http://" + ipPort + "/?");
+ StringBuilder paramBuilder = new StringBuilder("user=")
+ .append(clickHouseHttpSource.getRealTimeAccountUserName()).append("&")
+ .append("password=").append(clickHouseHttpSource.getRealTimeAccountPin()).append("&")
+ .append("database=").append(dbName).append("&")
+ .append("query=").append(sql)
+ .append(" FORMAT JSON;");
+ List<NameValuePair> values = URLEncodedUtils.parse(paramBuilder.toString(), StandardCharsets.UTF_8);
+ int socketTimeOut = httpConfig.getCkRealTimeAccountSocketTimeOut();
+ return httpClientService.httpGet(queryURL + URLEncodedUtils.format(values, "utf-8"), socketTimeOut);
+ }
+
+ private BaseResult<List<Map<String, String>>> buildResult(List<LinkedHashMap> variables,List<Map<String, String>> resultData, int totalSize) {
+ if (CollectionUtil.isEmpty(resultData)) {
+ return BaseResultGenerator.success(StrUtil.format("There are {} datasets in total, and 100% of them pass the verification, time range is: {}", totalSize, getTimeRangeByVariable(variables)), resultData);
+ } else {
+ return BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), StrUtil.format("There are {} datasets in total, and {} of them pass the verification, time range is: {}", totalSize, NumberUtil.decimalFormat("#.##%", ((totalSize - resultData.size()) * 1.00 / totalSize)), getTimeRangeByVariable(variables)), resultData);
+ }
+ }
+
+ private Map<String, Map<String, String>> buildResult(String test, String executeEngine, List<Map> list, boolean isSaved) {
+ Map<String, Map<String, String>> resultMap = Maps.newLinkedHashMap();
+ Map<String, String> infoMap = Maps.newLinkedHashMap();
+ CSVWriter csvWriter = null;
+ String benchFileName = executeEngine + "_queries_" + DateUtils.getCurrentDate(DateUtils.YYYYMMDD);
+ if (isSaved) {
+ try {
+ String directoryPath = new File("").getCanonicalPath() + File.separator + "benchmark";
+ File directory = new File(directoryPath);
+ if (!directory.exists()) {
+ directory.mkdir();
+ }
+ String filePath = directoryPath + File.separator + benchFileName + "." + IdUtil.simpleUUID() + ".csv";
+ csvWriter = new CSVWriter(new java.io.FileWriter(filePath));
+ String[] headers = {"ID", "Elapsed(ms)", "Results"};
+ csvWriter.writeNext(headers);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ for (Map map : list) {
+ BaseResult queryResult;
+ Stopwatch watch = Stopwatch.createStarted();
+ if (String.valueOf(map.get(TYPE)).equals("sql")) {
+ SqlQueryRequestParam sqlQueryRequest = JSON.parseObject(String.valueOf(map.get(TEMPLATE)), SqlQueryRequestParam.class);
+ sqlQueryRequest.setExecutionMode(ExecutionMode.ONESHOT.getValue());
+ queryResult = queryJobService.createSQLAdHocQuery(sqlQueryRequest);
+ } else {
+ DSLQueryRequestParam dslQueryRequest = JSON.parseObject(String.valueOf(map.get(TEMPLATE)), DSLQueryRequestParam.class);
+ dslQueryRequest.setExecutionMode(ExecutionMode.ONESHOT.getValue());
+ queryResult = queryJobService.createDSLAdHocQuery(dslQueryRequest);
+ }
+ log.info("Benchmark Test, category is: {}, ID is: {}", test, map.get(IDENTIFIER_NAME));
+ if (StringUtil.isEmpty(queryResult.getStatistics())) {
+ Map<String, Object> statistics = Maps.newLinkedHashMap();
+ statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS));
+ queryResult.setStatistics(statistics);
+ }
+ processQueryResult(benchFileName, infoMap, csvWriter, String.valueOf(map.get(IDENTIFIER_NAME)), queryResult);
+ }
+ if (csvWriter != null) {
+ try {
+ csvWriter.close();
+ } catch (IOException e) {
+ log.error("Failed to close CSVWriter:{}", e.getMessage());
+ }
+ }
+
+ resultMap.put(benchFileName, buildQueryTimeMetric(infoMap));
+ return resultMap;
+ }
+
+ private static String getTimeRangeByVariable(List<LinkedHashMap> variables) {
+ String startTime = "";
+ String endTime = "";
+ for (LinkedHashMap variable : variables) {
+ if (variable.get("key").equals("start_time")) {
+ startTime = String.valueOf(variable.get("default"));
+ }
+ if (variable.get("key").equals("end_time")) {
+ endTime = String.valueOf(variable.get("default"));
+ }
+ }
+ return StrUtil.format("['{}'/'{}']", startTime, endTime);
+ }
+
+ private Map<String, String> buildQueryTimeMetric(Map<String, String> infoMap) {
+ if (StringUtil.isNotEmpty(infoMap)) {
+ double[] values = infoMap.values().stream().mapToDouble(value -> Double.valueOf(value)).sorted().toArray();
+ infoMap.put("Min", String.valueOf(CommonUtil.round(StatUtils.min(values), 2)));
+ infoMap.put("Mean", String.valueOf(CommonUtil.round(StatUtils.mean(values), 2)));
+ infoMap.put("Median", String.valueOf(CommonUtil.round(StatUtils.percentile(values, 50), 2)));
+ infoMap.put("P95", String.valueOf(CommonUtil.round(StatUtils.percentile(values, 95), 2)));
+ infoMap.put("P99", String.valueOf(CommonUtil.round(StatUtils.percentile(values, 99), 2)));
+ infoMap.put("MAX", String.valueOf(CommonUtil.round(StatUtils.max(values), 2)));
+ }
+ return infoMap;
+ }
+
+ private void processQueryResult(String name, Map<String, String> infoMap, CSVWriter writer,
+ String id, BaseResult queryResult) {
+ int statusCode = Integer.parseInt(queryResult.getStatus().toString());
+ if (statusCode == HttpStatusCodeEnum.SUCCESS.getCode()) {
+ if (writer != null) {
+ String [] data = {id, String.valueOf(queryResult.getStatistics().get("elapsed")), String.valueOf(queryResult.getData())};
+ writer.writeNext(data);
+ }
+ infoMap.put(id, queryResult.getStatistics().get("elapsed") + "");
+ } else if (statusCode == HttpStatusCodeEnum.GATEWAY_TIMEOUT.getCode() || statusCode == HttpStatusCodeEnum.BAD_GATEWAY.getCode()) {
+ infoMap.put(id, "status:" + queryResult.getStatus() + ",message:" + queryResult.getMessage());
+ } else {
+ throw new QGWBusinessException(statusCode, CommonErrorCode.SQL_EXECUTION_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.SQL_EXECUTION_EXCEPTION.getMessage(), name
+ + "," + id + "," + queryResult.getStatus() + "," + queryResult.getMessage()));
+ }
+ }
+
+ @Override
+ public void setEnvironment(Environment environment) {
+ this.env = environment;
+ }
+}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/UtilServiceImp.java b/src/main/java/com/mesalab/qgw/service/impl/UtilServiceImp.java
new file mode 100644
index 00000000..b60d9784
--- /dev/null
+++ b/src/main/java/com/mesalab/qgw/service/impl/UtilServiceImp.java
@@ -0,0 +1,36 @@
+package com.mesalab.qgw.service.impl;
+
+import com.google.common.collect.Lists;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.entity.BaseResultGenerator;
+import com.mesalab.common.utils.sqlparser.SQLSyntaxParserUtil;
+import com.mesalab.qgw.model.job.EncryptionInfo;
+import com.mesalab.qgw.service.UtilService;
+import org.jasypt.util.text.BasicTextEncryptor;
+import org.springframework.stereotype.Service;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@Service(value = "utilService")
+public class UtilServiceImp implements UtilService {
+
+ @Override
+ public BaseResult getCiphertext(EncryptionInfo param) {
+ BaseResult baseResult;
+ BasicTextEncryptor textEncryptor = new BasicTextEncryptor();
+ textEncryptor.setPassword(param.getSalt());
+ String encrypt = textEncryptor.encrypt(param.getPassword());
+ Map dataMap = new HashMap();
+ dataMap.put("password", param.getPassword());
+ dataMap.put("encrypted_password", encrypt);
+ baseResult = BaseResultGenerator.success("ok", Lists.newArrayList(dataMap));
+ return baseResult;
+ }
+ @Override
+ public BaseResult getSQLSyntaxTree(String sql) {
+ List list = SQLSyntaxParserUtil.syntaxParse(sql);
+ return BaseResultGenerator.success(list);
+ }
+}
diff --git a/src/main/java/com/mesalab/services/common/dsl/ComDSLParse.java b/src/main/java/com/mesalab/services/common/dsl/ComDSLParse.java
index 14d37b53..23471706 100644
--- a/src/main/java/com/mesalab/services/common/dsl/ComDSLParse.java
+++ b/src/main/java/com/mesalab/services/common/dsl/ComDSLParse.java
@@ -4,26 +4,18 @@ import cn.hutool.core.date.DateException;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.NumberUtil;
import cn.hutool.core.util.ObjectUtil;
-import cn.hutool.crypto.digest.DigestUtil;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.mesalab.cn.entity.pojo.DSLObject;
import com.mesalab.cn.entity.pojo.DSLParser;
import com.mesalab.cn.enums.RangeTypeEnum;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.common.exception.BusinessException;
-import com.mesalab.knowledge.enums.MatchEnum;
import com.mesalab.qgw.constant.QGWMessageConst;
-import com.geedgenetworks.utils.DateUtils;
import com.geedgenetworks.utils.StringUtil;
-import org.joda.time.format.ISODateTimeFormat;
+import com.mesalab.common.exception.CommonErrorCode;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.time.format.DateTimeFormatter;
import java.util.List;
-import java.util.regex.Pattern;
import static java.util.stream.Collectors.joining;
@@ -41,8 +33,8 @@ public class ComDSLParse extends DSLParser {
}
return Joiner.on(" ").join(" AND ", defField, ">=", intervals.get(0), " AND ", defField, "<", intervals.get(1));
} catch (DateException e) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.TIME_FORMAT_ERROR));
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.TIME_FORMAT_ERROR));
}
}
diff --git a/src/main/java/com/mesalab/services/common/dsl/ComDSLValidate.java b/src/main/java/com/mesalab/services/common/dsl/ComDSLValidate.java
index bf2f6f6e..ed0f68e2 100644
--- a/src/main/java/com/mesalab/services/common/dsl/ComDSLValidate.java
+++ b/src/main/java/com/mesalab/services/common/dsl/ComDSLValidate.java
@@ -2,14 +2,14 @@ package com.mesalab.services.common.dsl;
import cn.hutool.core.date.DateException;
import cn.hutool.core.date.DateUtil;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
import com.mesalab.knowledge.enums.MatchEnum;
import com.mesalab.knowledge.enums.RangeEnum;
import com.mesalab.knowledge.enums.SortEnum;
import com.mesalab.qgw.constant.QGWMessageConst;
import com.mesalab.qgw.exception.QGWBusinessException;
import com.geedgenetworks.utils.StringUtil;
+import com.mesalab.common.exception.CommonErrorCode;
import org.apache.commons.lang.Validate;
import org.apache.commons.lang3.EnumUtils;
import org.springframework.stereotype.Component;
@@ -30,46 +30,34 @@ public class ComDSLValidate {
public void validation(ComDSLObject dslObject) throws QGWBusinessException {
if (StringUtil.isEmpty(dslObject)) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.DSL_OBJECT_IS_INVALID));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.DSL_OBJECT_IS_INVALID));
}
ComDSLObject.Query query;
if (StringUtil.isEmpty(query = dslObject.getQuery())) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.QUERY_IS_INVALID));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.QUERY_IS_INVALID));
}
if (StringUtil.isEmpty(query.getDataEngine())) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.DATA_ENGINE_IS_INVALID));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.DATA_ENGINE_IS_INVALID));
}
if (StringUtil.isEmpty(query.getDataSource())) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.DATASOURCE_IS_INVALID));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.DATASOURCE_IS_INVALID));
}
ComDSLObject.Query.QueryBean parameters;
if (StringUtil.isEmpty(parameters = query.getParameters())) {
return;
}
if (!isValidGranularity(parameters.getGranularity())) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.GRANULARITY_VALUE_IS_INVALID));
- }
- if (!isValidMatch(parameters.getMatch())) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.MATCH_TYPE));
- }
- if (!isValidRange(parameters.getRange())) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.INTERVALS_PARAM_ERROR));
- }
- if (!isValidIntervals(parameters.getIntervals())) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.INTERVALS_PARAM_ERROR));
- }
- if (!isValidSort(parameters.getSort())) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.INTERVALS_PARAM_ERROR));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.GRANULARITY_VALUE_IS_INVALID));
}
+ validMatch(parameters.getMatch());
+ validRange(parameters.getRange());
+ validIntervals(parameters.getIntervals());
+ validSort(parameters.getSort());
}
private boolean isValidGranularity(String granularity) {
@@ -82,84 +70,80 @@ public class ComDSLValidate {
return false;
}
- private boolean isValidSort(List<ComDSLObject.Query.SortBean> sortList) {
+ private void validSort(List<ComDSLObject.Query.SortBean> sortList) {
if (CollectionUtils.isEmpty(sortList)) {
- return true;
+ return;
}
for (ComDSLObject.Query.SortBean sortBean : sortList) {
Validate.isTrue(EnumUtils.isValidEnum(SortEnum.class, StringUtil.upperCase(sortBean.getType())), QGWMessageConst.SORT_TYPE_IS_INVALID);
String fieldKey = sortBean.getFieldKey();
if (StringUtil.isBlank(fieldKey)) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.SORT_FIELD_VALUES));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.SORT_FIELD_VALUES));
}
}
- return true;
}
- private boolean isValidRange(List<ComDSLObject.Query.FilterBean> rangeList) {
+ private void validRange(List<ComDSLObject.Query.FilterBean> rangeList) {
if (CollectionUtils.isEmpty(rangeList)) {
- return true;
+ return;
}
for (ComDSLObject.Query.FilterBean rangeBean : rangeList) {
if (!EnumUtils.isValidEnum(RangeEnum.class, StringUtil.upperCase(rangeBean.getType()))) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.RANGE_TYPE_IS_INVALID));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.RANGE_TYPE_IS_INVALID));
}
String fieldKey = rangeBean.getFieldKey();
if (StringUtil.isBlank(fieldKey)) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.RANGE_FIELD_VALUES));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.RANGE_FIELD_VALUES));
}
}
- return true;
}
- private boolean isValidMatch(List<ComDSLObject.Query.FilterBean> matchList) {
+ private void validMatch(List<ComDSLObject.Query.FilterBean> matchList) {
if (CollectionUtils.isEmpty(matchList)) {
- return true;
+ return;
}
for (ComDSLObject.Query.FilterBean matchBean : matchList) {
if (!EnumUtils.isValidEnum(MatchEnum.class, StringUtil.upperCase(matchBean.getType()))) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.MATCH_TYPE));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.MATCH_TYPE));
}
if (MatchEnum.REGEX.getType().equals(matchBean.getType())) {
matchBean.getFieldValues().forEach(mv -> {
if (String.valueOf(mv).startsWith("$") && String.valueOf(mv).endsWith("*")) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.MATCHING_SIGN));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.MATCHING_SIGN));
}
});
}
}
- return true;
}
- private boolean isValidIntervals(List<String> intervalsBean) {
+ private void validIntervals(List<String> intervalsBean) {
try {
if (CollectionUtils.isEmpty(intervalsBean)) {
- return true;
+ return;
}
if (intervalsBean.size() != 1) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.INTERVALS_PARAM_ERROR));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.INTERVALS_PARAM_ERROR));
}
String[] split = intervalsBean.get(0).split("/");
if (split.length != 2) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.INTERVALS_PARAM_ERROR));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.INTERVALS_PARAM_ERROR));
}
for (String dateTimeStr : split) {
DateUtil.parse(dateTimeStr);
}
- return true;
} catch (DateException e) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.TIME_FORMAT_ERROR));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.TIME_FORMAT_ERROR));
}
}
}
diff --git a/src/main/java/com/mesalab/services/common/entity/KnowledgeBaseRequest.java b/src/main/java/com/mesalab/services/common/entity/KnowledgeBaseRequest.java
new file mode 100644
index 00000000..cd371620
--- /dev/null
+++ b/src/main/java/com/mesalab/services/common/entity/KnowledgeBaseRequest.java
@@ -0,0 +1,37 @@
+package com.mesalab.services.common.entity;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import lombok.AllArgsConstructor;
+import lombok.Builder;
+import lombok.Data;
+import lombok.NoArgsConstructor;
+
+import java.io.Serializable;
+
+@Data
+@Builder
+@AllArgsConstructor
+@NoArgsConstructor
+public class KnowledgeBaseRequest implements Serializable {
+ @JsonProperty("kb_id")
+ private String kbId;
+ @JsonProperty("name")
+ private String name;
+ @JsonProperty("format")
+ private String format;
+ @JsonProperty("category")
+ private String category;
+ @JsonProperty("is_valid")
+ private Integer isValid;
+ @JsonProperty("version")
+ private String version;
+ @JsonProperty("sha256")
+ private String sha256;
+ @JsonProperty("origin_url")
+ private String originUrl;
+ @JsonProperty("generated_time")
+ private String generatedTime;
+ @JsonProperty("last_update_time")
+ private String lastUpdateTime;
+
+}
diff --git a/src/main/java/com/mesalab/services/common/enums/EntityQueryType.java b/src/main/java/com/mesalab/services/common/enums/EntityQueryType.java
deleted file mode 100644
index 1788b6e3..00000000
--- a/src/main/java/com/mesalab/services/common/enums/EntityQueryType.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package com.mesalab.services.common.enums;
-
-import lombok.AllArgsConstructor;
-import lombok.Getter;
-
-/**
- * @description: for datasource
- * @author: tiangaochao
- * @create: 2021-08-24
- */
-
-@Getter
-@AllArgsConstructor
-public enum EntityQueryType {
- ACTIVECLIENTIP("activeclientip"),
- TOPSERVERIP("topserverip"),
- TOPSNI("topsni"),
- SUBSCRIBERIDPOOL("subscriberidpool"),
- GTPC("gtpc");
- private final String type;
-}
diff --git a/src/main/java/com/mesalab/services/common/enums/UnstructuredDataType.java b/src/main/java/com/mesalab/services/common/enums/UnstructuredDataType.java
deleted file mode 100644
index d651e8a4..00000000
--- a/src/main/java/com/mesalab/services/common/enums/UnstructuredDataType.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package com.mesalab.services.common.enums;
-
-import lombok.AllArgsConstructor;
-import lombok.Getter;
-
-@Getter
-@AllArgsConstructor
-public enum UnstructuredDataType {
- ALL("all"),
- MAIL("mail"),
- HTTP("http"),
- PCAP("pcap");
- private final String type;
-}
diff --git a/src/main/java/com/mesalab/services/configuration/JobConfig.java b/src/main/java/com/mesalab/services/configuration/JobConfig.java
index fe28d8d1..4262c92b 100644
--- a/src/main/java/com/mesalab/services/configuration/JobConfig.java
+++ b/src/main/java/com/mesalab/services/configuration/JobConfig.java
@@ -17,13 +17,21 @@ public class JobConfig {
public static final String STATISTICS = "statistics";
public static final String JOB = "job";
+ public static final String META = "meta";
+ public static final String DATA = "data";
public static final String JOB_RESOURCE_PATH = "resource_path";
public static final String JOB_ID = "job_id";
public static final String COUNT = "count";
public static final String IS_DONE = "is_done";
+ public static final String START_TIME = "start_time";
+ public static final String END_TIME = "end_time";
public static final String DONE_PROGRESS = "done_progress";
public static final String IS_CANCELED = "is_canceled";
public static final String IS_FAILED = "is_failed";
+ public static final String REASON = "reason";
+ public static final String LINKS = "links";
+ public static final String LINKS_STATUS = "status";
+ public static final String LINKS_RESULT = "result";
public static final String DURATION_TIME = "duration_time";
public static final String LAST_QUERY_TIME = "last_query_time";
public static final String LONG_TERM_RESULT = "result";
@@ -41,15 +49,23 @@ public class JobConfig {
public static final String DETAIL = "detail";
+ public static final String FILTER = "filter";
public static final String QUERY_DATA_SOURCE = "query.data_source";
public static final String SAVED_QUERY = "saved_query";
- public static final String FIELD_DISCOVERY= "field_discovery";
+ public static final String FIELD_DISCOVERY = "field_discovery";
+ public static final String DATAPATH_PACKET_COMBINE = "datapath_telemetry_packet_combine";
+ public static final String TRAFFIC_SPECTRUM_SUMMARY ="traffic-spectrum-summary";
+ public static final String TRAFFIC_SPECTRUM_UNIQUE_CLIENT_AND_SERVER_IPS ="traffic-spectrum-unique-client-and-server-ips";
+ public static final String TRAFFIC_SPECTRUM_APP_DISTRIBUTION ="traffic-spectrum-app-distribution";
+ public static final String TRAFFIC_SPECTRUM_CLIENT_IP_CONNECT_APPLICATION_USAGE ="traffic-spectrum-client-ip-connect-application-usage";
+ public static final String TRAFFIC_SPECTRUM_NETWORK_THROUGHPUT_TREND ="traffic-spectrum-network-throughput-trend";
public static final String LONG_TERM_COLUMN_FAMILY = "long_term";
public static final String DELIMIT = "-";
public static final String KEY_CUSTOM_FIELD_DISCOVERY_FIELDS = "custom.field_discovery.fields";
public static final String KEY_CUSTOM_FIELD_DISCOVERY_METRIC = "custom.field_discovery.metric";
+ public static final String FIELD_DISCOVERY_TOPK_METRIC_PREFIX = "metric_";
public static final String KEY_CUSTOM_FIELD_DISCOVERY_METRIC_FN = "custom.field_discovery.metric.fn";
public static final String KEY_CUSTOM_FIELD_DISCOVERY_FILTER = "custom.field_discovery.filter";
public static final String KEY_CUSTOM_STATISTICS_SQL = "custom.statistics.sql";
@@ -66,16 +82,23 @@ public class JobConfig {
private int responseTimeout;
@NacosValue(value = "${job.execution.timeout}", autoRefreshed = true)
private int executionTimeout;
- @NacosValue(value = "${job.pool.corePoolSize}", autoRefreshed = true)
- private int poolCorePoolSize;
- @NacosValue(value = "${job.pool.maxPoolSize}", autoRefreshed = true)
- private int poolMaxPoolSize;
- @NacosValue(value = "${job.pool.queueCapacity}", autoRefreshed = true)
- private int poolQueueCapacity;
- @NacosValue(value="${job.longPolling.enabled}", autoRefreshed = true)
+ @NacosValue(value = "${job.longPolling.enabled}", autoRefreshed = true)
private boolean longPollingEnabled;
- @NacosValue(value="${job.timeSlicing.enabled}", autoRefreshed = true)
+ @NacosValue(value = "${job.timeSlicing.enabled}", autoRefreshed = true)
private boolean timeSlicingEnabled;
- @NacosValue(value="${job.timeSlicing.interval}", autoRefreshed = true)
+ @NacosValue(value = "${job.timeSlicing.interval}", autoRefreshed = true)
private int timeSlicingInterval;
+
+ @NacosValue(value = "${job.threadPool.lightWeight.corePoolSize}", autoRefreshed = true)
+ private int lightWeightCorePoolSize;
+ @NacosValue(value = "${job.threadPool.lightWeight.maxPoolSize}", autoRefreshed = true)
+ private int lightWeightMaxPoolSize;
+ @NacosValue(value = "${job.threadPool.lightWeight.queueCapacity}", autoRefreshed = true)
+ private int lightWeightQueueCapacity;
+ @NacosValue(value = "${job.threadPool.heavyResource.corePoolSize}", autoRefreshed = true)
+ private int heavyResourceCorePoolSize;
+ @NacosValue(value = "${job.threadPool.heavyResource.maxPoolSize}", autoRefreshed = true)
+ private int heavyResourceMaxPoolSize;
+ @NacosValue(value = "${job.threadPool.heavyResource.queueCapacity}", autoRefreshed = true)
+ private int heavyResourceQueueCapacity;
}
diff --git a/src/main/java/com/mesalab/services/configuration/JobThreadPoolCfg.java b/src/main/java/com/mesalab/services/configuration/JobThreadPoolCfg.java
deleted file mode 100644
index 6080656c..00000000
--- a/src/main/java/com/mesalab/services/configuration/JobThreadPoolCfg.java
+++ /dev/null
@@ -1,41 +0,0 @@
-package com.mesalab.services.configuration;
-
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.scheduling.annotation.EnableAsync;
-import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
-
-import java.util.concurrent.ThreadPoolExecutor;
-
-/**
- * TODO
- *
- * @Classname ThreadPoolTaskConfig
- * @Date 2021/12/1 9:20 上午
- * @Author wWei
- */
-@Configuration
-@EnableAsync
-public class JobThreadPoolCfg {
- @Autowired
- JobConfig jobConfig;
- private static final int keepAliveTime = 10;
- private static final String threadNamePrefix = "Async-Service-";
-
- @Bean("jobExecutor")
- public ThreadPoolTaskExecutor getAsyncExecutor() {
- ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
- executor.setCorePoolSize(jobConfig.getPoolCorePoolSize());
- executor.setMaxPoolSize(jobConfig.getPoolMaxPoolSize());
- executor.setQueueCapacity(jobConfig.getPoolQueueCapacity());
- executor.setKeepAliveSeconds(keepAliveTime);
- executor.setThreadNamePrefix(threadNamePrefix);
-
- // 线程池对拒绝任务的处理策略
- executor.setRejectedExecutionHandler(new ThreadPoolExecutor.AbortPolicy());
- // 初始化
- executor.initialize();
- return executor;
- }
-} \ No newline at end of file
diff --git a/src/main/java/com/mesalab/services/configuration/TaskConfig.java b/src/main/java/com/mesalab/services/configuration/TaskConfig.java
index 4882cb4c..211caac4 100644
--- a/src/main/java/com/mesalab/services/configuration/TaskConfig.java
+++ b/src/main/java/com/mesalab/services/configuration/TaskConfig.java
@@ -7,8 +7,8 @@ import org.springframework.stereotype.Component;
/**
* TODO
*
- * @Classname TaskCfg
- * @Date 2021/12/8 4:26 下午
+ * @Classname TaskConfig
+ * @Date 2024/6/26 10:55
* @Author wWei
*/
@Data
diff --git a/src/main/java/com/mesalab/services/configuration/ThreadPoolMonitor.java b/src/main/java/com/mesalab/services/configuration/ThreadPoolMonitor.java
index 1a3a509d..bb0e0132 100644
--- a/src/main/java/com/mesalab/services/configuration/ThreadPoolMonitor.java
+++ b/src/main/java/com/mesalab/services/configuration/ThreadPoolMonitor.java
@@ -24,26 +24,32 @@ public class ThreadPoolMonitor implements InitializingBean {
private static final Iterable<Tag> TAG = Collections.singletonList(Tag.of("Async", EXECUTOR_NAME));
private final ScheduledExecutorService scheduledExecutor = Executors.newSingleThreadScheduledExecutor();
- @Autowired
- ThreadPoolTaskExecutor jobExecutor;
- @Autowired
- ThreadPoolTaskExecutor taskExecutor;
+ ThreadPoolTaskExecutor lightWeightThreadPool;
+ ThreadPoolTaskExecutor heavyResourceThreadPool;
+ ThreadPoolTaskExecutor taskThreadPool;
private final Runnable monitor = () -> {
try {
- Metrics.gauge("job.thread.pool.core.size", TAG, jobExecutor, ThreadPoolTaskExecutor::getCorePoolSize);
- Metrics.gauge("job.thread.pool.largest.size", TAG, jobExecutor, e -> e.getThreadPoolExecutor().getLargestPoolSize());
- Metrics.gauge("job.thread.pool.max.size", TAG, jobExecutor, ThreadPoolTaskExecutor::getMaxPoolSize);
- Metrics.gauge("job.thread.pool.active.size", TAG, jobExecutor, ThreadPoolTaskExecutor::getActiveCount);
- Metrics.gauge("job.thread.pool.thread.count", TAG, jobExecutor, ThreadPoolTaskExecutor::getPoolSize);
- Metrics.gauge("job.thread.pool.queue.size", TAG, jobExecutor, e -> e.getThreadPoolExecutor().getQueue().size());
-
- Metrics.gauge("task.thread.pool.core.size", TAG, taskExecutor, ThreadPoolTaskExecutor::getCorePoolSize);
- Metrics.gauge("task.thread.pool.largest.size", TAG, taskExecutor, e -> e.getThreadPoolExecutor().getLargestPoolSize());
- Metrics.gauge("task.thread.pool.max.size", TAG, taskExecutor, ThreadPoolTaskExecutor::getMaxPoolSize);
- Metrics.gauge("task.thread.pool.active.size", TAG, taskExecutor, ThreadPoolTaskExecutor::getActiveCount);
- Metrics.gauge("task.thread.pool.thread.count", TAG, taskExecutor, ThreadPoolTaskExecutor::getPoolSize);
- Metrics.gauge("task.thread.pool.queue.size", TAG, taskExecutor, e -> e.getThreadPoolExecutor().getQueue().size());
+ Metrics.gauge("light.weight.thread.pool.core.size", TAG, lightWeightThreadPool, ThreadPoolTaskExecutor::getCorePoolSize);
+ Metrics.gauge("light.weight.thread.pool.largest.size", TAG, lightWeightThreadPool, e -> e.getThreadPoolExecutor().getLargestPoolSize());
+ Metrics.gauge("light.weight.thread.pool.max.size", TAG, lightWeightThreadPool, ThreadPoolTaskExecutor::getMaxPoolSize);
+ Metrics.gauge("light.weight.thread.pool.active.size", TAG, lightWeightThreadPool, ThreadPoolTaskExecutor::getActiveCount);
+ Metrics.gauge("light.weight.thread.pool.thread.count", TAG, lightWeightThreadPool, ThreadPoolTaskExecutor::getPoolSize);
+ Metrics.gauge("light.weight.thread.pool.queue.size", TAG, lightWeightThreadPool, e -> e.getThreadPoolExecutor().getQueue().size());
+
+ Metrics.gauge("heavy.resource.thread.pool.core.size", TAG, heavyResourceThreadPool, ThreadPoolTaskExecutor::getCorePoolSize);
+ Metrics.gauge("heavy.resource.thread.pool.largest.size", TAG, heavyResourceThreadPool, e -> e.getThreadPoolExecutor().getLargestPoolSize());
+ Metrics.gauge("heavy.resource.thread.pool.max.size", TAG, heavyResourceThreadPool, ThreadPoolTaskExecutor::getMaxPoolSize);
+ Metrics.gauge("heavy.resource.thread.pool.active.size", TAG, heavyResourceThreadPool, ThreadPoolTaskExecutor::getActiveCount);
+ Metrics.gauge("heavy.resource.thread.pool.thread.count", TAG, heavyResourceThreadPool, ThreadPoolTaskExecutor::getPoolSize);
+ Metrics.gauge("heavy.resource.thread.pool.queue.size", TAG, heavyResourceThreadPool, e -> e.getThreadPoolExecutor().getQueue().size());
+
+ Metrics.gauge("task.thread.pool.core.size", TAG, taskThreadPool, ThreadPoolTaskExecutor::getCorePoolSize);
+ Metrics.gauge("task.thread.pool.largest.size", TAG, taskThreadPool, e -> e.getThreadPoolExecutor().getLargestPoolSize());
+ Metrics.gauge("task.thread.pool.max.size", TAG, taskThreadPool, ThreadPoolTaskExecutor::getMaxPoolSize);
+ Metrics.gauge("task.thread.pool.active.size", TAG, taskThreadPool, ThreadPoolTaskExecutor::getActiveCount);
+ Metrics.gauge("task.thread.pool.thread.count", TAG, taskThreadPool, ThreadPoolTaskExecutor::getPoolSize);
+ Metrics.gauge("task.thread.pool.queue.size", TAG, taskThreadPool, e -> e.getThreadPoolExecutor().getQueue().size());
} catch (RuntimeException e) {
log.error("Collect thread pool status error:", e.getMessage());
}
@@ -55,7 +61,23 @@ public class ThreadPoolMonitor implements InitializingBean {
}
public void clearQueue() {
- jobExecutor.getThreadPoolExecutor().getQueue().clear();
- taskExecutor.getThreadPoolExecutor().getQueue().clear();
+ lightWeightThreadPool.getThreadPoolExecutor().getQueue().clear();
+ heavyResourceThreadPool.getThreadPoolExecutor().getQueue().clear();
+ taskThreadPool.getThreadPoolExecutor().getQueue().clear();
+ }
+
+ @Autowired
+ private void setLightWeightThreadPool(ThreadPoolTaskExecutor lightWeightThreadPool) {
+ this.lightWeightThreadPool = lightWeightThreadPool;
+ }
+
+ @Autowired
+ private void setHeavyResourceThreadPool(ThreadPoolTaskExecutor heavyResourceThreadPool) {
+ this.heavyResourceThreadPool = heavyResourceThreadPool;
+ }
+
+ @Autowired
+ private void setTaskThreadPool(ThreadPoolTaskExecutor taskThreadPool) {
+ this.taskThreadPool = taskThreadPool;
}
}
diff --git a/src/main/java/com/mesalab/services/controller/EntityController.java b/src/main/java/com/mesalab/services/controller/EntityController.java
deleted file mode 100644
index 70d9718c..00000000
--- a/src/main/java/com/mesalab/services/controller/EntityController.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package com.mesalab.services.controller;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.common.exception.BusinessException;
-import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
-import com.mesalab.services.common.dsl.ComDSLValidate;
-import com.mesalab.services.common.dsl.ComDSLObject;
-import com.mesalab.services.common.enums.EntityQueryType;
-import com.mesalab.services.service.EntityService;
-import com.geedgenetworks.utils.StringUtil;
-import org.apache.commons.lang3.EnumUtils;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.validation.annotation.Validated;
-import org.springframework.web.bind.annotation.PostMapping;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RestController;
-
-import javax.servlet.http.HttpServletRequest;
-import java.util.Enumeration;
-/**
- * @author tiangaochao
- * @version *
- * @date 2021/8/24 15:42 下午
- */
-@RestController
-@RequestMapping(value = "/entity")
-public class EntityController {
- private static final Log log = LogFactory.get();
- @Autowired
- private EntityService entityService;
- @Autowired
- private ComDSLValidate comDSLValidate;
-
- @PostMapping(value = "/v1/", produces = "application/json")
- public BaseResult EntityQuery(HttpServletRequest request, @Validated @RequestBody ComDSLObject dslObject) {
- Enumeration<String> parameterNames = request.getParameterNames();
- while (parameterNames.hasMoreElements()) {
- String param = parameterNames.nextElement();
- log.debug("实体推荐接口, 参数: queryString is {},params is {}", param, dslObject);
- if (!EnumUtils.isValidEnum(EntityQueryType.class, StringUtil.upperCase(param))
- || StringUtil.isNotEmpty(request.getParameter(param))) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),"Not Support"));
- }
- comDSLValidate.validation(dslObject);
- return entityService.getEntityInfo(param, dslObject);
- }
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),"Not Support"));
- }
-}
diff --git a/src/main/java/com/mesalab/services/controller/JobController.java b/src/main/java/com/mesalab/services/controller/JobController.java
deleted file mode 100644
index 89289834..00000000
--- a/src/main/java/com/mesalab/services/controller/JobController.java
+++ /dev/null
@@ -1,192 +0,0 @@
-package com.mesalab.services.controller;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.common.exception.BusinessException;
-import com.mesalab.qgw.constant.QGWMessageConst;
-import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.service.DeferredResultHolder;
-import com.mesalab.qgw.service.MetadataService;
-import com.mesalab.services.configuration.JobConfig;
-import com.mesalab.services.service.JobService;
-import com.geedgenetworks.utils.StringUtil;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.web.bind.annotation.*;
-import org.springframework.web.context.request.async.DeferredResult;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static com.mesalab.services.configuration.JobConfig.*;
-
-/**
- * TODO
- *
- * @Classname jobController
- * @Date 2021/11/24 3:54 下午
- * @Author wWei
- */
-@RestController
-@RequestMapping(value = "/query/job")
-public class JobController {
- private static final Log log = LogFactory.get();
-
- @Autowired
- JobService jobService;
- @Autowired
- JobConfig jobConfig;
- @Autowired
- MetadataService metadataService;
-
- @Autowired
- private DeferredResultHolder deferredResultHolder;
-
- @PostMapping(consumes = "application/json")
- public BaseResult commit(@RequestBody HashMap<String, Object> reqBody) {
- log.info("Commit Job, param is {}", reqBody);
- validateJobParams(reqBody);
- String type = String.valueOf(reqBody.get(JobConfig.KEY_QUERY_TYPE));
- if (JobConfig.FIELD_DISCOVERY.equals(type) || JobConfig.STATISTICS.equals(type)) {
- return jobService.commitAdHocQuery(reqBody);
- } else if (JobConfig.SAVED_QUERY.equals(type)) {
- return jobService.commitSavedQuery(reqBody);
- }
- throw new BusinessException(ResultStatusEnum.NOT_FOUND.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), "Not Support"));
- }
-
- @GetMapping(value = "/{jobId}/field_discovery/result")
- public DeferredResult<BaseResult> fieldDiscoveryResult(@PathVariable String jobId) {
- log.info("Long Polling Request,Job ID is {}", jobId);
- BaseResult baseResult = null;
- baseResult = jobService.getResultFieldDiscovery(jobId);
- DeferredResult<BaseResult> deferredResult =
- deferredResultHolder.newDeferredResult(jobId, jobConfig.getResponseTimeout(), baseResult);
- if (!jobConfig.isLongPollingEnabled()) {
- deferredResultHolder.handleDeferredData(deferredResult, baseResult);
- return deferredResult;
- }
- deferredResult.onTimeout(() -> {
- deferredResultHolder.handleDeferredData(deferredResult, jobService.getResultFieldDiscovery(jobId));
- });
-
- if (StringUtil.isNotEmpty(baseResult)
- && StringUtil.isNotEmpty(baseResult.getJob())
- && Boolean.valueOf(String.valueOf(baseResult.getJob().get(JobConfig.IS_DONE)))) {
- deferredResultHolder.handleDeferredData(deferredResult, baseResult);
- }
- return deferredResult;
- }
-
- @GetMapping(value = {"/{jobId}/statistics/result"})
- public DeferredResult<BaseResult> statisticsResult(@PathVariable String jobId) {
- log.info("Long Polling Request,Job ID is {}", jobId);
- BaseResult baseResult = null;
- baseResult = jobService.longTermResult(jobId);
- DeferredResult<BaseResult> deferredResult =
- deferredResultHolder.newDeferredResult(jobId, jobConfig.getResponseTimeout(), baseResult);
- if (!jobConfig.isLongPollingEnabled()) {
- deferredResultHolder.handleDeferredData(deferredResult, baseResult);
- return deferredResult;
- }
- deferredResult.onTimeout(() -> {
- deferredResultHolder.handleDeferredData(deferredResult, jobService.longTermResult(jobId));
- });
-
- if (StringUtil.isNotEmpty(baseResult)
- && StringUtil.isNotEmpty(baseResult.getJob())
- && Boolean.valueOf(String.valueOf(baseResult.getJob().get(JobConfig.IS_DONE)))) {
- deferredResultHolder.handleDeferredData(deferredResult, baseResult);
- }
- return deferredResult;
- }
-
- @GetMapping(value = "/{jobId}/saved_query/result")
- public BaseResult savedQueryResult(@PathVariable String jobId) {
- return jobService.getSavedQueryResult(jobId);
- }
-
- //未实现
- @GetMapping(value = "/{jobId}/field_discovery")
- public BaseResult fieldDiscoveryStatus(@PathVariable String jobId) {
- throw new BusinessException(ResultStatusEnum.NOT_FOUND.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), "Not Support"));
- }
-
- //未实现
- @GetMapping(value = "/{jobId}/statistics")
- public BaseResult statisticsStatus(@PathVariable String jobId) {
- throw new BusinessException(ResultStatusEnum.NOT_FOUND.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), "Not Support"));
- }
-
- @GetMapping(value = "/{jobId}/saved_query")
- public BaseResult savedQueryStatus(@PathVariable String jobId) {
- return jobService.getSavedQueryStatus(jobId);
- }
-
- //未实现
- @DeleteMapping(value = "/{jobId}/field_discovery")
- public BaseResult cancelFieldDiscovery(@PathVariable String jobId) {
- throw new BusinessException(ResultStatusEnum.NOT_FOUND.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), "Not Support"));
- }
-
- //未实现
- @DeleteMapping(value = "/{jobId}/statistics")
- public BaseResult cancelStatistics(@PathVariable String jobId) {
- throw new BusinessException(ResultStatusEnum.NOT_FOUND.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), "Not Support"));
- }
-
- @DeleteMapping(value = "/{jobId}/saved_query")
- public BaseResult cancelSavedQuery(@PathVariable String jobId) {
- log.info("Cancel Saved Query Job, ID is {}", jobId);
- return jobService.cancelSavedQuery(jobId);
- }
-
- @GetMapping(consumes = "application/json")
- public BaseResult batchStatus(@RequestBody HashMap<String, Object> reqBody) {
- String queryType = String.valueOf(reqBody.get(JobConfig.KEY_QUERY_TYPE));
- List<String> ids = (List<String>) reqBody.get(JobConfig.KEY_QUERY_JOB_IDS);
- if (SAVED_QUERY.equals(queryType)) {
- List<Map<String, Object>> result = jobService.batchSavedQueryStatus(ids);
- return BaseResultGenerator.success(result);
- }
- throw new BusinessException(ResultStatusEnum.NOT_FOUND.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), "Not Support"));
- }
-
- @GetMapping(value = "/result", consumes = "application/json")
- public BaseResult batchResult(@RequestBody HashMap<String, Object> reqBody) {
- String queryType = String.valueOf(reqBody.get(JobConfig.KEY_QUERY_TYPE));
- List<String> ids = (List<String>) reqBody.get(JobConfig.KEY_QUERY_JOB_IDS);
- if (SAVED_QUERY.equals(queryType)) {
- List<Map<String, Object>> result = jobService.batchSavedQueryResult(ids);
- return BaseResultGenerator.success(result);
- }
- throw new BusinessException(ResultStatusEnum.NOT_FOUND.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), "Not Support"));
- }
-
- private void validateJobParams(HashMap<String, Object> reqBody) {
- if (!reqBody.containsKey(JobConfig.KEY_QUERY_TYPE)) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.QUERY_TYPE_NOT_NULL));
- }
- if (!reqBody.containsKey(JobConfig.KEY_QUERY_DATA_SOURCE)) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.QUERY_DATA_SOURCE_NOT_NULL));
- }
- String dataSource = String.valueOf(reqBody.get(JobConfig.KEY_QUERY_DATA_SOURCE));
- if (!metadataService.getAllTable().contains(dataSource)) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.QUERY_DATA_SOURCE_ERROR));
- }
- }
-}
diff --git a/src/main/java/com/mesalab/services/controller/KBController.java b/src/main/java/com/mesalab/services/controller/KBController.java
new file mode 100644
index 00000000..7fe16e4b
--- /dev/null
+++ b/src/main/java/com/mesalab/services/controller/KBController.java
@@ -0,0 +1,157 @@
+package com.mesalab.services.controller;
+
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.alibaba.fastjson2.JSON;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.entity.BaseResultGenerator;
+import com.mesalab.common.enums.FileFormat;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.exception.BusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.constant.QGWMessageConst;
+import com.mesalab.qgw.model.basic.AuditLog;
+import com.mesalab.services.common.entity.KnowledgeBaseRequest;
+import com.mesalab.services.service.KBService;
+import com.geedgenetworks.utils.StringUtil;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.web.bind.annotation.*;
+import org.springframework.web.multipart.MultipartFile;
+
+import javax.servlet.http.HttpServletRequest;
+import java.util.Arrays;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Map;
+
+
+@RestController
+@RequestMapping(value = "/v1/knowledge_base")
+public class KBController {
+ private static final Log log = LogFactory.get();
+ @Autowired
+ KBService kbService;
+
+ @PostMapping(produces = "application/json", consumes = "multipart/form-data")
+ @AuditLog("KBController.publishKnowledgeBase")
+ public BaseResult publishKnowledgeBase(@RequestParam("file") MultipartFile file, HttpServletRequest request) {
+ try {
+ KnowledgeBaseRequest KBRequest = getKnowledgeBaseRequest(request);
+ validateRequest(KBRequest);
+ log.info("Knowledge Base File publish interface:param is :{}", KBRequest.toString());
+ return kbService.publishKnowledge(file, KBRequest);
+ } catch (Exception e) {
+ log.error("Knowledge Base File publish error: {}", e.getMessage());
+ return BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), "Knowledge Base File publish: " + e.getMessage());
+ }
+ }
+
+ @PutMapping(value = {"/{kb_Id}"},consumes = "multipart/form-data")
+ @AuditLog("KBController.updateKnowledgeBase")
+ public BaseResult updateKnowledgeBase(@PathVariable(value = "kb_Id",required = false) String kbId,@RequestPart(value = "file") MultipartFile file,HttpServletRequest request) {
+ try {
+ KnowledgeBaseRequest KBRequest = getKnowledgeBaseRequest(request);
+ if (StringUtil.isBlank(kbId)) {
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.KB_FILE_ID_CANNOT_BE_EMPTY));
+ }
+ if (StringUtil.isBlank(KBRequest.getVersion())) {
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.KB_FILE_VERSION_CANNOT_BE_EMPTY));
+ }
+ KBRequest.setKbId(kbId);
+ log.info("Knowledge Base File update interface:param is :{}", KBRequest.toString());
+ return kbService.updateKnowledge(file, KBRequest);
+ } catch (Exception e) {
+ log.error("Knowledge Base File update error: {}", e.getMessage());
+ return BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), "Knowledge Base File update error: " + e.getMessage());
+ }
+ }
+
+ @DeleteMapping(value = "/{kb_id}",consumes = "application/x-www-form-urlencoded")
+ @AuditLog("KBController.deleteKnowledgeBase")
+ public BaseResult deleteKnowledgeBase(@PathVariable(value = "kb_id") String kbId, @RequestParam(required = false) String version) {
+ try {
+ KnowledgeBaseRequest request = new KnowledgeBaseRequest();
+ if (StrUtil.isBlankIfStr(kbId)){
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.KB_FILE_ID_CANNOT_BE_EMPTY));
+ }
+ if (StrUtil.isNotBlank(version)){
+ request.setVersion(version);
+ }
+ request.setKbId(kbId);
+ log.info("Knowledge Base File delete interface,param is:{}",request.toString());
+ return kbService.deleteKnowledge(request);
+ } catch (Exception e) {
+ log.error("Knowledge Base File delete error: {}", e.getMessage());
+ return BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), "Knowledge Base File delete error:" + e.getMessage());
+ }
+ }
+
+ @PutMapping(value = "/{kb_id}/status",consumes = "application/x-www-form-urlencoded")
+ @AuditLog("KBController.updateStatusKnowledgeBase")
+ public BaseResult updateStatusKnowledgeBase(@PathVariable(value = "kb_id") String kbId, HttpServletRequest request) {
+ try {
+ KnowledgeBaseRequest KBRequest = getKnowledgeBaseRequest(request);
+ if (StrUtil.isBlankIfStr(kbId)){
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.KB_FILE_ID_CANNOT_BE_EMPTY));
+ }
+ if (StrUtil.isBlank(String.valueOf(KBRequest.getIsValid()))){
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.KB_FILE_IS_VALID_CANNOT_BE_EMPTY));
+ }
+ KBRequest.setKbId(kbId);
+ log.info("Knowledge Base File update status interface,param is:{}",KBRequest.toString());
+ return kbService.updateStatus(KBRequest);
+ } catch (Exception e) {
+ log.error("Knowledge Base File update status error: {}", e.getMessage());
+ return BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), "Knowledge Base File update status error: " + e.getMessage());
+ }
+ }
+
+ @GetMapping(produces = "application/json")
+ @AuditLog("KBController.getList")
+ public BaseResult getList(@RequestParam(required = false, value = "kb_id") String kbId, @RequestParam(required = false) String category) {
+ try {
+ log.info("Knowledge Base File get list interface,param is: kb_id= {},category = {}", kbId, category);
+ return kbService.getList(kbId, category);
+ } catch (BusinessException e) {
+ log.error("Knowledge Base File get list error:{}", e.getMessage());
+ return BaseResultGenerator.failure(HttpStatusCodeEnum.SERVER_ERROR.getCode(), "Knowledge Base File get list error: " + e.getMessage());
+ }
+ }
+
+ private static KnowledgeBaseRequest getKnowledgeBaseRequest(HttpServletRequest request) {
+ Map<String, String> paramMap = new HashMap<>();
+ Enumeration<String> parameterNames = request.getParameterNames();
+ while (parameterNames.hasMoreElements()) {
+ String paramName = parameterNames.nextElement();
+ String paramValue = request.getParameter(paramName);
+ paramMap.put(paramName, paramValue);
+ }
+ KnowledgeBaseRequest KBRequest = JSON.parseObject(JSON.toJSONString(JSON.toJSON(paramMap)), KnowledgeBaseRequest.class);
+ return KBRequest;
+ }
+ private KnowledgeBaseRequest validateRequest(KnowledgeBaseRequest request){
+ if (StrUtil.isBlankIfStr(request.getName())){
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.KB_FILE_NAME_CANNOT_BE_EMPTY));
+ }
+ if (StrUtil.isBlankIfStr(request.getFormat())){
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.KB_FILE_FORMAT_CANNOT_BE_EMPTY));
+ }else {
+ if (Arrays.stream(FileFormat.values()).noneMatch(o -> o.getValue().equalsIgnoreCase(request.getFormat()))){
+ throw new BusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.KB_FILE_FORMAT_NOT_SUPPORT));
+ }
+ }
+ if (StrUtil.isBlankIfStr(String.valueOf(request.getIsValid()))) {
+ request.setIsValid(1);
+ }
+ return request;
+ }
+}
diff --git a/src/main/java/com/mesalab/services/controller/KnowledgeBaseController.java b/src/main/java/com/mesalab/services/controller/KnowledgeBaseController.java
deleted file mode 100644
index e5358ed2..00000000
--- a/src/main/java/com/mesalab/services/controller/KnowledgeBaseController.java
+++ /dev/null
@@ -1,99 +0,0 @@
-package com.mesalab.services.controller;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.common.exception.BusinessException;
-import com.mesalab.services.common.entity.KnowledgeBase;
-import com.mesalab.services.service.KnowledgeBaseService;
-import com.geedgenetworks.utils.StringUtil;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.web.bind.annotation.*;
-import org.springframework.web.multipart.MultipartFile;
-
-
-@RestController
-@RequestMapping(value = "/knowledge_base/v1")
-public class KnowledgeBaseController {
- private static final Log log = LogFactory.get();
- @Autowired
- KnowledgeBaseService knowledgeService;
-
- @GetMapping(produces = "application/json")
- public BaseResult queryKnowledge() {
- try {
- return knowledgeService.queryKnowledge();
- } catch (BusinessException e) {
- log.error("get knowledge meta error:{}", e.getMessage());
- return BaseResultGenerator.failure(ResultStatusEnum.SERVER_ERROR.getCode(), "get knowledge meta error: " + e.getMessage());
- }
- }
-
-
- @PostMapping(produces = "application/json")
- public BaseResult publishKnowledge(@RequestParam("file") MultipartFile file, KnowledgeBase knowledge) {
- KnowledgeBase knowledgeBase = validateKnowledge(knowledge);
- try {
- return knowledgeService.publishKnowledge(file, knowledgeBase);
- } catch (Exception e) {
- log.error("publish knowledge error: {}", e.getMessage());
- return BaseResultGenerator.failure(ResultStatusEnum.SERVER_ERROR.getCode(), "publish knowledge error: " + e.getMessage());
- }
- }
-
- @PutMapping(value = {"/{id}", ""})
- public BaseResult updateKnowledge(@PathVariable(required = false) String id, @RequestPart(value = "file") MultipartFile file, KnowledgeBase knowledge) {
- try {
- if (StringUtil.isNotBlank(id)) {
- knowledge.setId(id);
- }
- return knowledgeService.updateKnowledge(file, knowledge);
- } catch (Exception e) {
- log.error("update knowledge error: {}", e);
- return BaseResultGenerator.failure(ResultStatusEnum.SERVER_ERROR.getCode(), "update knowledge error: " + e.getMessage());
- }
- }
-
- @DeleteMapping(value = {"/{id}", ""})
- public BaseResult deleteKnowledge(@PathVariable(required = false) String id, KnowledgeBase knowledgeBase) {
- try {
- return knowledgeService.deleteKnowledge(StringUtil.isNotBlank(id) ? id : knowledgeBase.getId());
- } catch (Exception e) {
- log.error("delete knowledge error: {}", e.getMessage());
- return BaseResultGenerator.failure(ResultStatusEnum.SERVER_ERROR.getCode(), "delete knowledge error:" + e.getMessage());
- }
- }
-
- @PatchMapping(value = "/{id}")
- public BaseResult toggleKnowledge(@PathVariable String id, KnowledgeBase knowledgeBase) {
- try {
- if (StringUtil.isNotBlank(id)) {
- knowledgeBase.setId(id);
- }
- return knowledgeService.toggleKnowledge(knowledgeBase);
- } catch (Exception e) {
- log.error("toggle knowledge error: {}", e.getMessage());
- return BaseResultGenerator.failure(ResultStatusEnum.SERVER_ERROR.getCode(), "toggle knowledge error:" + e.getMessage());
- }
- }
-
- private KnowledgeBase validateKnowledge(KnowledgeBase knowledge) {
- if (StringUtil.isEmpty(knowledge.getName())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),"Name is required");
- }
- if (knowledge.getName().contains(":")) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),ResultCodeEnum.PARAMETER_ERROR.getCode(),"Name cannot contain ':'");
- }
- if (knowledge.getName().contains(".")) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),ResultCodeEnum.PARAMETER_ERROR.getCode(),"Name cannot contain '.'");
- }
- if (StringUtil.isEmpty(knowledge.getFormat())) {
- throw new BusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),ResultCodeEnum.PARAMETER_ERROR.getCode(),"Format is required");
- }
- return knowledge;
- }
-
-}
diff --git a/src/main/java/com/mesalab/services/controller/RelationController.java b/src/main/java/com/mesalab/services/controller/RelationController.java
deleted file mode 100644
index 6de595ac..00000000
--- a/src/main/java/com/mesalab/services/controller/RelationController.java
+++ /dev/null
@@ -1,53 +0,0 @@
-package com.mesalab.services.controller;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.common.exception.BusinessException;
-import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
-import com.mesalab.services.common.dsl.ComDSLObject;
-import com.mesalab.services.common.dsl.ComDSLValidate;
-import com.mesalab.services.common.enums.EntityQueryType;
-import com.mesalab.services.service.RelationService;
-import com.geedgenetworks.utils.StringUtil;
-import org.apache.commons.lang3.EnumUtils;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.validation.annotation.Validated;
-import org.springframework.web.bind.annotation.PostMapping;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RestController;
-
-import javax.servlet.http.HttpServletRequest;
-import java.util.Enumeration;
-
-@RestController
-@RequestMapping(value = "/relation")
-public class RelationController {
- private static final Log log = LogFactory.get();
- @Autowired
- private RelationService relationService;
- @Autowired
- private ComDSLValidate comDSLValidate;
-
- @PostMapping(value = "/v1/", produces = "application/json")
- public BaseResult relationQuery(HttpServletRequest request, @Validated @RequestBody ComDSLObject dslObject) {
- Enumeration<String> parameterNames = request.getParameterNames();
- while (parameterNames.hasMoreElements()) {
- String param = parameterNames.nextElement();
- log.debug("subscribe id 实时推荐接口, 参数: queryString is {},params is {}", param, dslObject);
- if (!EnumUtils.isValidEnum(EntityQueryType.class, StringUtil.upperCase(param))
- ||StringUtil.isNotEmpty(request.getParameter(param))) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),"Not Support"));
- }
- comDSLValidate.validation(dslObject);
- return relationService.getRealRelation(param, dslObject);
- }
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),"Not Support"));
- }
-}
diff --git a/src/main/java/com/mesalab/services/controller/SQLDatasetController.java b/src/main/java/com/mesalab/services/controller/SQLDatasetController.java
index 791d384c..a30c109d 100644
--- a/src/main/java/com/mesalab/services/controller/SQLDatasetController.java
+++ b/src/main/java/com/mesalab/services/controller/SQLDatasetController.java
@@ -1,22 +1,25 @@
package com.mesalab.services.controller;
-import cn.hutool.core.util.StrUtil;
+import cn.hutool.core.exceptions.ExceptionUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.fasterxml.jackson.databind.PropertyNamingStrategy;
-import com.fasterxml.jackson.databind.annotation.JsonNaming;
import com.geedgenetworks.utils.StringUtil;
-import com.google.common.collect.Lists;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.utils.sqlparser.AutoPeriodHelper;
import com.mesalab.qgw.constant.QGWMessageConst;
import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.services.service.SQLDatasetService;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.AuditLog;
+import com.mesalab.qgw.service.DatabaseService;
+import com.mesalab.services.service.SQLDatasetService;
+import net.sf.jsqlparser.JSQLParserException;
+import net.sf.jsqlparser.parser.CCJSqlParserUtil;
+import net.sf.jsqlparser.parser.JJTCCJSqlParserState;
+import net.sf.jsqlparser.statement.Statement;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
@@ -31,15 +34,15 @@ import java.util.*;
*/
@RestController
@RequestMapping(value = "/v1")
-public class SQLDatasetController
-{
+public class SQLDatasetController {
private static final Log log = LogFactory.get();
- @Autowired
- SQLDatasetService sqlDatasetService;
+ private DatabaseService databaseService;
+ private SQLDatasetService sqlDatasetService;
@DeleteMapping(value = "sql/query/{query_id}")
+ @AuditLog("DatabaseController.killQuery")
public BaseResult killQuery(@PathVariable("query_id") String queryId) {
- log.warn("停止查询, 参数: queryId is {}", queryId);
+ log.warn("skill query, param: queryId is {}", queryId);
if (StringUtil.isBlank(queryId)) {
return BaseResultGenerator.success4Message("ok");
}
@@ -48,18 +51,22 @@ public class SQLDatasetController
@RequestMapping(value = "sql/query/query_id", method = RequestMethod.POST)
+ @AuditLog("DatabaseController.getQueryId")
public BaseResult getQueryId(@RequestParam String query, @RequestParam(value = "result_id", required = false) Integer resultId) {
- log.info("获取当前SQL Query ID, 参数: query ={},result_id ={}", query, resultId);
+ log.info("get current SQL Query ID, parma: query ={},result_id ={}", query, resultId);
if (StringUtil.isEmpty(query)) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.QUERY_CANNOT_BE_EMPTY));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.QUERY_CANNOT_BE_EMPTY));
}
- String queryId;
- if (resultId != null) {
- queryId = sqlDatasetService.getCustomQueryId(resultId.toString(), query.toString());
- } else {
- queryId = sqlDatasetService.getCustomQueryId("", query.toString());
+ Statement statement;
+ try {
+ statement = CCJSqlParserUtil.parse(query);
+ } catch (JSQLParserException e) {
+ log.error("Error Parsing SQL: {}", ExceptionUtil.getRootCauseMessage(e));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage()));
}
+ String queryId = databaseService.getCustomQueryId(resultId == null ? null : resultId.toString(), AutoPeriodHelper.buildSqlGranularity(statement).toString());
List<Map<String, Object>> result = new ArrayList<>();
Map<String, Object> map = new HashMap<>(16);
map.put("queryId", queryId);
@@ -68,35 +75,21 @@ public class SQLDatasetController
}
@GetMapping(value = "sql/query/{query_id}/progress")
+ @AuditLog("DatabaseController.getProcessesByQueryId")
public BaseResult getProcessesByQueryId(@PathVariable("query_id") String queryId) {
- log.info("查询SQL任务执行状态, 参数: queryId is {}", queryId);
+ log.info("get query status, param: queryId is {}", queryId);
return sqlDatasetService.getProcessesByQueryId(queryId);
}
- @GetMapping(value = "/dataset")
- public BaseResult getDatasets(@RequestParam(value = "identifier_names",required = false) String datasetIds, @RequestParam(required = false) String category, @RequestParam(required = false) String title)
- {
- log.info("Get Datasets. IDs is: {}, category is: {}, title is: {}", datasetIds, category, title);
- return BaseResultGenerator.success(sqlDatasetService.getDatasets(Arrays.asList(StrUtil.split(datasetIds, ",")), category, title));
- }
- @GetMapping(value = "/dataset/{identifier_name}")
- public BaseResult getDataset(@PathVariable("identifier_name") String identifierName)
- {
- log.info("Get Dataset, ID is: {}", identifierName);
- return BaseResultGenerator.success(sqlDatasetService.getDataset(identifierName));
+ @Autowired
+ public void setDatabaseService(DatabaseService databaseService) {
+ this.databaseService = databaseService;
}
-
- @GetMapping(value = "/dataset/{identifier_name}/preview")
- public BaseResult getPreview(@PathVariable("identifier_name") String identifierName,@RequestParam(required = false) String option)
- {
- return sqlDatasetService.getPreview(identifierName,option);
+ @Autowired
+ public void setSQLDatasetService(SQLDatasetService sqlDatasetService) {
+ this.sqlDatasetService = sqlDatasetService;
}
- @GetMapping(value = "/dataset/global_variable")
- public BaseResult getVariable()
- {
- return BaseResultGenerator.success(sqlDatasetService.getVariable());
- }
} \ No newline at end of file
diff --git a/src/main/java/com/mesalab/services/controller/UnstructuredController.java b/src/main/java/com/mesalab/services/controller/UnstructuredController.java
deleted file mode 100644
index 6d73c285..00000000
--- a/src/main/java/com/mesalab/services/controller/UnstructuredController.java
+++ /dev/null
@@ -1,54 +0,0 @@
-package com.mesalab.services.controller;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.services.common.dsl.ComDSLObject;
-import com.mesalab.services.common.dsl.ComDSLValidate;
-import com.mesalab.services.common.enums.EntityQueryType;
-import com.mesalab.services.common.enums.UnstructuredDataType;
-import com.mesalab.services.service.UnstructuredService;
-import com.geedgenetworks.utils.StringUtil;
-import org.apache.commons.lang3.EnumUtils;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.validation.annotation.Validated;
-import org.springframework.web.bind.annotation.PostMapping;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RestController;
-
-import javax.servlet.http.HttpServletRequest;
-import java.util.Enumeration;
-
-@RestController
-@RequestMapping(value = "/unstructured_data")
-public class UnstructuredController {
- private static final Log log = LogFactory.get();
-
- @Autowired
- private UnstructuredService unstructuredService;
- @Autowired
- private ComDSLValidate comDSLValidate;
-
- @PostMapping(value = "/v1", produces = "application/json")
- public BaseResult unstructuredDataQuery(HttpServletRequest request, @Validated @RequestBody ComDSLObject dslObject) {
- Enumeration<String> parameterNames = request.getParameterNames();
- while (parameterNames.hasMoreElements()) {
- String param = parameterNames.nextElement();
- log.debug("非结构化数据-文件路径列表获取接口, 参数: queryString is {},params is {}", param, dslObject);
- if (!EnumUtils.isValidEnum(UnstructuredDataType.class, StringUtil.upperCase(param))
- || StringUtil.isNotEmpty(request.getParameter(param))) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), "Not Support"));
- }
- comDSLValidate.validation(dslObject);
- return unstructuredService.getUnstructuredData(param, dslObject);
- }
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),"Not Support"));
- }
-
-}
diff --git a/src/main/java/com/mesalab/services/service/EntityService.java b/src/main/java/com/mesalab/services/service/EntityService.java
deleted file mode 100644
index e2257ae3..00000000
--- a/src/main/java/com/mesalab/services/service/EntityService.java
+++ /dev/null
@@ -1,50 +0,0 @@
-package com.mesalab.services.service;
-
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.services.common.dsl.ComDSLObject;
-
-import java.util.List;
-import java.util.Map;
-
-
-public interface EntityService {
-
- /**
- * Desc: TODO
- *
- * @param param
- * @param dslObject
- * @return {@link com.mesalab.common.entity.BaseResult}
- * @created by wWei
- * @date 2022/6/21 4:41 下午
- */
- BaseResult getEntityInfo(String param, ComDSLObject dslObject);
-
- /**
- * Desc: TODO
- *
- * @param data
- * @return {@link List< Map< String, Object>>}
- * @created by wWei
- * @date 2022/6/21 4:41 下午
- */
- List<Map<String, Object>> mergeSNI(List<Map<String, Object>> data);
-
- /**
- *
- * @param map
- * @Description 构建域名Hash表
- * @date 2022/7/8 13:56
- */
- Map<String, List<String>> buildDomainHashTable(Map<String, List<String>> map);
-
- /**
- *
- * @param domain
- * @return boolean
- * @Description 判断domain是否属于CDN域名列表
- * @date 2022/7/7 17:52
- */
- boolean inCDN(Map<String, List<String>> cdnMap, String domain);
-}
-
diff --git a/src/main/java/com/mesalab/services/service/KBService.java b/src/main/java/com/mesalab/services/service/KBService.java
new file mode 100644
index 00000000..556b65f9
--- /dev/null
+++ b/src/main/java/com/mesalab/services/service/KBService.java
@@ -0,0 +1,17 @@
+package com.mesalab.services.service;
+
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.services.common.entity.KnowledgeBaseRequest;
+import org.springframework.web.multipart.MultipartFile;
+
+public interface KBService {
+ BaseResult publishKnowledge(MultipartFile file, KnowledgeBaseRequest request);
+
+ BaseResult updateKnowledge(MultipartFile file, KnowledgeBaseRequest request);
+
+ BaseResult deleteKnowledge(KnowledgeBaseRequest request);
+
+ BaseResult updateStatus(KnowledgeBaseRequest request);
+
+ BaseResult getList(String kbId, String category);
+}
diff --git a/src/main/java/com/mesalab/services/service/KnowledgeBaseService.java b/src/main/java/com/mesalab/services/service/KnowledgeBaseService.java
deleted file mode 100644
index 4638c4c8..00000000
--- a/src/main/java/com/mesalab/services/service/KnowledgeBaseService.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package com.mesalab.services.service;
-
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.services.common.entity.KnowledgeBase;
-import org.springframework.web.multipart.MultipartFile;
-
-/**
- * @Author wxs
- * @Date 2022/7/11
- */
-public interface KnowledgeBaseService {
- BaseResult queryKnowledge();
-
- BaseResult publishKnowledge(MultipartFile file, KnowledgeBase knowledge) throws Exception;
-
- BaseResult updateKnowledge(MultipartFile file, KnowledgeBase knowledgeBase) throws Exception;
-
- BaseResult deleteKnowledge(String id) throws Exception;
-
- BaseResult toggleKnowledge(KnowledgeBase knowledgeBase) throws Exception;
-}
diff --git a/src/main/java/com/mesalab/services/service/RelationService.java b/src/main/java/com/mesalab/services/service/RelationService.java
deleted file mode 100644
index e5f0ce1d..00000000
--- a/src/main/java/com/mesalab/services/service/RelationService.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package com.mesalab.services.service;
-
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.services.common.dsl.ComDSLObject;
-
-
-public interface RelationService {
-
- BaseResult getRealRelation(String param, ComDSLObject dslObject);
-}
diff --git a/src/main/java/com/mesalab/services/service/SQLDatasetService.java b/src/main/java/com/mesalab/services/service/SQLDatasetService.java
index 20b8caaf..7878c99c 100644
--- a/src/main/java/com/mesalab/services/service/SQLDatasetService.java
+++ b/src/main/java/com/mesalab/services/service/SQLDatasetService.java
@@ -1,12 +1,7 @@
package com.mesalab.services.service;
-import com.google.common.collect.Lists;
import com.mesalab.common.entity.BaseResult;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-
/**
* SQL数据集市管理服务
*
@@ -14,8 +9,7 @@ import java.util.Map;
* @Date 2023/8/10 13:54
* @Author wWei
*/
-public interface SQLDatasetService
-{
+public interface SQLDatasetService {
/**
* Desc: 通过queryId查询SQL任务执行状态
@@ -29,69 +23,10 @@ public interface SQLDatasetService
/**
* 终止查询任务
+ *
* @param queryId
* @return
*/
BaseResult deleteQueryTask(String queryId);
- /**
- * 自定义查询ID: 由 Catalog(数据库类型): resultID+query 组成。
- *
- * @param resultId
- * @param query
- * @return {@link String}
- * @created by wWei
- * @date 2021/1/7 6:48 下午
- */
- String getCustomQueryId(String resultId, String query);
-
- /**
- * Desc: 获取动态变量
- *
- * @param
- * @return {@link List<LinkedHashMap>}
- * @created by wWei
- * @date 2023/8/16 19:39
- */
- List<LinkedHashMap> getVariable();
-
- /**
- * Desc: 获取单个SQL模版
- *
- * @param datasetId
- * @return {@link Map<String, Object>}
- * @created by wWei
- * @date 2023/8/17 09:45
- */
- Map<String, Object> getDataset(String datasetId);
-
- /**
- * Desc: 批量获取SQL模版
- *
- * @param
- * @return {@link Map<String, Object>}
- * @created by wWei
- * @date 2023/8/16 15:31
- */
- Map<String, Object> getDatasets(List<String> ids, String category, String title);
-
- /**
- * Desc: 结果预览
- *
- * @param datasetId
- * @return {@link BaseResult}
- * @created by wWei
- * @date 2023/8/16 19:33
- */
- BaseResult getPreview(String datasetId,String option);
-
- /**
- * Desc: 获取执行SQL
- *
- * @param
- * @return
- * @created by wanghao
- * @date 2023/9/16 19:33
- */
- String buildExecSQL(List<LinkedHashMap> variables, String sql);
}
diff --git a/src/main/java/com/mesalab/services/service/UnstructuredService.java b/src/main/java/com/mesalab/services/service/UnstructuredService.java
deleted file mode 100644
index e46f9897..00000000
--- a/src/main/java/com/mesalab/services/service/UnstructuredService.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package com.mesalab.services.service;
-
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.services.common.dsl.ComDSLObject;
-
-import java.util.Map;
-
-public interface UnstructuredService {
- Map<String, Object> getUnstructuredFields();
-
- BaseResult getUnstructuredData(String param, ComDSLObject dslObject);
-}
diff --git a/src/main/java/com/mesalab/services/service/impl/EntityServiceImp.java b/src/main/java/com/mesalab/services/service/impl/EntityServiceImp.java
deleted file mode 100644
index 1ac01fd2..00000000
--- a/src/main/java/com/mesalab/services/service/impl/EntityServiceImp.java
+++ /dev/null
@@ -1,581 +0,0 @@
-package com.mesalab.services.service.impl;
-
-import cn.hutool.core.net.NetUtil;
-import cn.hutool.core.util.*;
-import cn.hutool.crypto.digest.DigestUtil;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.alibaba.fastjson2.JSON;
-import com.alibaba.fastjson2.JSONWriter;
-import com.beust.jcommander.internal.Sets;
-import com.google.common.base.Splitter;
-import com.google.common.base.Stopwatch;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.net.InternetDomainName;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.QueryFormatEnum;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.qgw.constant.QGWMessageConst;
-import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.model.basic.HttpConfig;
-import com.mesalab.knowledge.enums.MatchEnum;
-import com.mesalab.qgw.model.basic.EntityConfigSource;
-import com.mesalab.qgw.service.MetadataService;
-import com.mesalab.qgw.service.impl.QueryServiceImpl;
-import com.mesalab.qgw.service.impl.HttpClientService;
-import com.mesalab.services.common.dsl.ComDSLObject;
-import com.mesalab.services.common.dsl.ComDSLParse;
-import com.mesalab.services.common.enums.EntityQueryType;
-import com.mesalab.services.common.property.SqlPropertySourceFactory;
-import com.mesalab.services.service.EntityService;
-import com.geedgenetworks.utils.*;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.EnvironmentAware;
-import org.springframework.context.annotation.PropertySource;
-import org.springframework.core.env.Environment;
-import org.springframework.stereotype.Service;
-
-import javax.annotation.PostConstruct;
-import java.io.UnsupportedEncodingException;
-import java.net.URLEncoder;
-import java.util.*;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.TimeUnit;
-import java.util.function.Function;
-import java.util.function.Predicate;
-import java.util.stream.Collectors;
-
-/**
- * @author tiangaochao
- * @create: 2021-08-24
- */
-@Service("entityService")
-@PropertySource(value = "classpath:http-sql-template.sql", factory = SqlPropertySourceFactory.class)
-public class EntityServiceImp implements EntityService, EnvironmentAware {
- private static final Log log = LogFactory.get();
-
- @Autowired
- private QueryServiceImpl queryService;
- @Autowired
- private HttpClientService httpClientServiceTemp;
- @Autowired
- private HttpConfig httpConfigTemp;
- @Autowired
- private EntityConfigSource entityConfigSourceTemp;
- @Autowired
- private MetadataService metadataService;
-
- private static HttpClientService httpClientService;
- private static HttpConfig httpConfig;
- private static EntityConfigSource entityConfigSource;
-
- @PostConstruct
- public void init() {
- httpClientService = this.httpClientServiceTemp;
- httpConfig = this.httpConfigTemp;
- entityConfigSource = this.entityConfigSourceTemp;
- }
-
- @Value("${server.port}")
- private int serverPort;
-
- private Environment env;
- private static String localHostAddress;
-
- static {
- localHostAddress = NetUtil.getLocalhostStr();
- }
-
- @Override
- public BaseResult getEntityInfo(String param, ComDSLObject comDSLObject) {
-
- BaseResult baseResult;
- String sql;
- if (param.equalsIgnoreCase(EntityQueryType.ACTIVECLIENTIP.getType())) {
- setDefaultValue(comDSLObject.getQuery());
- baseResult = getActiveClientIP(comDSLObject.getQuery());
- } else if (param.equalsIgnoreCase(EntityQueryType.TOPSERVERIP.getType())) {
- baseResult = getResultOfServerIP(comDSLObject.getQuery());
- } else if (param.equalsIgnoreCase(EntityQueryType.TOPSNI.getType())) {
- sql = generateTopSNI(comDSLObject.getQuery(), env.getProperty("ENTITY_TOP_SNI"));
- baseResult = query(sql);
-
- String totalSQL = generateSNITotal(comDSLObject.getQuery(), env.getProperty("ENTITY_SNI_TOTAL"));
- BaseResult baseResultTotal = query(totalSQL);
- int limit = StringUtil.isBlank(comDSLObject.getQuery().getLimit()) ? entityConfigSource.getTopSNIDefaultSize() : Integer.parseInt(comDSLObject.getQuery().getLimit());
- baseResult = build(limit, baseResult, baseResultTotal);
- } else {
- baseResult = BaseResultGenerator.failure(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(), "No match queryType");
- }
- if (!baseResult.isSuccess()) {
- BaseResult result = JSON.parseObject(baseResult.getMessage(), BaseResult.class);
- throw new QGWBusinessException(baseResult.getStatus(), baseResult.getCode(), result.getMessage());
- }
- return baseResult;
- }
-
- private BaseResult build(int limit, BaseResult baseResult, BaseResult baseResultTotal) {
- if (!baseResult.isSuccess() || !baseResultTotal.isSuccess()) {
- return baseResult.isSuccess() ? baseResultTotal : baseResult;
- }
- Stopwatch watch = Stopwatch.createStarted();
- List<Map<String, Object>> data = (List<Map<String, Object>>) baseResult.getData();
- List<Map<String, Object>> total = (List<Map<String, Object>>) baseResultTotal.getData();
- long totalBytes = StringUtil.isEmpty(total.get(0).get("bytes")) ? 0 : Long.parseLong(total.get(0).get("bytes").toString());
- long totalSessions = StringUtil.isEmpty(total.get(0).get("sessions")) ? 0 : Long.parseLong(total.get(0).get("sessions").toString());
- List<Map<String, Object>> result = mergeSNI(data);
-
- //sort
- result.sort((o1, o2) -> {
- long numThis = StringUtil.isEmpty(o2.get("sessions")) ? 0 : Long.parseLong(o2.get("sessions").toString());
- long numParam = StringUtil.isEmpty(o1.get("sessions")) ? 0 : Long.parseLong(o1.get("sessions").toString());
- if (numThis > numParam) {
- return 1;
- } else if (numThis < numParam) {
- return -1;
- } else {
- return 0;
- }
- });
-
- result = result.size() > limit ? result.subList(0, limit) : result;
-
- long sessionsMerge = 0;
- long bytesMerge = 0;
- long countCDN = 0;
-
- for (Map<String, Object> item : result) {
- bytesMerge += StringUtil.isEmpty(item.get("bytes")) ? 0 : Long.parseLong(item.get("bytes").toString());
- sessionsMerge += StringUtil.isEmpty(item.get("sessions")) ? 0 : Long.parseLong(item.get("sessions").toString());
- if (BooleanUtil.toBoolean(String.valueOf(item.get("isCDN")))) {
- countCDN++;
- }
- if (!StringUtil.startsWith(String.valueOf(item.get("ssl_sni")), "*")
- && !StringUtil.startsWith(String.valueOf(item.get("ssl_sni")), "$")) {
- item.replace("ssl_sni", "$".concat(String.valueOf(item.get("ssl_sni"))));
- }
- item.remove("sessions");
- item.remove("bytes");
- item.remove("isCDN");
- }
-
- baseResult.setMessage(StrUtil.format("Top {} SNI, {} of ssl bytes, {} of ssl sessions, {} popular CDNs",
- result.size(),
- totalBytes == 0 ? "-" : NumberUtil.decimalFormat("#.##%", (bytesMerge * 1.0 / totalBytes)),
- totalSessions == 0 ? "-" : NumberUtil.decimalFormat("#.##%", (sessionsMerge * 1.0 / totalSessions)),
- countCDN == 0 ? "-" : countCDN));
- baseResult.setData(result);
- Map statistics = baseResult.getStatistics();
- statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS) + (StringUtil.isEmpty(statistics.get("elapsed")) ? 0 : Long.parseLong(statistics.get("elapsed").toString())));
- statistics.put("result_rows", result.size());
- statistics.put("result_bytes", JSON.toJSONBytes(baseResult.getData(), JSONWriter.Feature.LargeObject).length);
- return baseResult;
- }
-
- @Override
- public List<Map<String, Object>> mergeSNI(List<Map<String, Object>> data) {
- Map<String, List<String>> cdn = buildDomainHashTable(getCfgCDN());
- List<Map<String, Object>> dataRec = Lists.newArrayList();
- Map<String, Map<String, Object>> dataSLD = Maps.newHashMap();
- Map<String, Map<String, Object>> dataMerge = Maps.newHashMapWithExpectedSize(data.size() / 4);
- Iterator<Map<String, Object>> iterator = data.iterator();
- while (iterator.hasNext()) {
- Map<String, Object> datum = iterator.next();
- String value = String.valueOf(datum.get("ssl_sni"));
- if (StringUtil.isBlank(value) || isTSLDomain(value)) {
- continue;
- }
- String key = getTopPrivateDomain(value);
- //1. filter exception data
- if (StringUtil.isBlank(key)) {
- dataRec.add(datum);
- continue;
- }
- String domain = FormatUtils.getDomain(value);
- //2. filter CDN
- if (inCDN(cdn, domain)) {
- datum.put("isCDN", true);
- dataRec.add(datum);
- continue;
- }
-
- //3 filter TopPrivateDomain
- if (isSLDDomain(domain)) {
- Map<String, Object> item = dataSLD.get(key);
- if (StringUtil.isNotEmpty(item)) {
- long bytesBefore = StringUtil.isEmpty(item.get("bytes")) ? 0 : Long.parseLong(item.get("bytes").toString());
- long sessionsBefore = StringUtil.isEmpty(item.get("sessions")) ? 0 : Long.parseLong(item.get("sessions").toString());
- long bytesAfter = StringUtil.isEmpty(datum.get("bytes")) ? 0 : Long.parseLong(datum.get("bytes").toString());
- long sessionsAfter = StringUtil.isEmpty(datum.get("sessions")) ? 0 : Long.parseLong(datum.get("sessions").toString());
- datum.put("bytes", bytesBefore + bytesAfter);
- datum.put("sessions", sessionsBefore + sessionsAfter);
- }
- datum.put("ssl_sni", key);
- dataSLD.put(key, datum);
- continue;
- }
-
- //4. first add
- if (!dataMerge.containsKey(key)) {
- dataMerge.put(key, datum);
- continue;
- }
-
- //5. merge
- Map<String, Object> item = dataMerge.get(key);
- long bytesBefore = StringUtil.isEmpty(item.get("bytes")) ? 0 : Long.parseLong(item.get("bytes").toString());
- long sessionsBefore = StringUtil.isEmpty(item.get("sessions")) ? 0 : Long.parseLong(item.get("sessions").toString());
- long bytesAfter = StringUtil.isEmpty(datum.get("bytes")) ? 0 : Long.parseLong(datum.get("bytes").toString());
- long sessionsAfter = StringUtil.isEmpty(datum.get("sessions")) ? 0 : Long.parseLong(datum.get("sessions").toString());
- item.put("ssl_sni", "*.".concat(key));
- item.put("bytes", bytesBefore + bytesAfter);
- item.put("sessions", sessionsBefore + sessionsAfter);
- }
- List<Map<String, Object>> result = Lists.newArrayList();
- dataMerge.keySet().forEach(o -> result.add(dataMerge.get(o)));
- dataSLD.keySet().forEach(o -> result.add(dataSLD.get(o)));
- result.addAll(dataRec);
- return result;
- }
-
- @Override
- public Map<String, List<String>> buildDomainHashTable(Map<String, List<String>> cdn) {
- Map<String, List<String>> map = Maps.newHashMap();
- for (String key : cdn.keySet()) {
- List<String> list = cdn.get(key);
- if (StringUtil.isEmpty(list)) {
- continue;
- }
- list.forEach(o -> {
- String topPrivateDomain = getTopPrivateDomain(o);
- if (StringUtil.isEmpty(map.get(topPrivateDomain))) {
- map.put(topPrivateDomain, Lists.newArrayList(o));
- } else {
- map.get(topPrivateDomain).add(o);
- }
- });
- }
- return map;
- }
-
- @Override
- public boolean inCDN(Map<String, List<String>> cdnMap, String domain) {
- String topPrivateDomain = getTopPrivateDomain(domain);
- List<String> list = cdnMap.get(topPrivateDomain);
- if (StringUtil.isEmpty(list)) {
- return false;
- }
- List<String> param = Splitter.on(".").omitEmptyStrings().splitToList(domain);
- out:
- for (String cdn : list) {
- List<String> flag = Splitter.on(".").omitEmptyStrings().splitToList(cdn);
- if (flag.size() > param.size()) {
- continue;
- }
- for (int i = 1; i < flag.size() + 1; i++) {
- if (!String.valueOf(param.get(param.size() - i)).equalsIgnoreCase(String.valueOf(flag.get(flag.size() - i)))) {
- continue out;
- }
- }
- return true;
- }
- return false;
- }
-
- private String getTopPrivateDomain(String domain) {
- String topPrivateDomain = null;
- try {
- topPrivateDomain = FormatUtils.getTopPrivateDomain(domain);
- } catch (RuntimeException outException) {
- log.warn("Parse top-level domain exceptions, exception domain names:" + domain);
- }
- return topPrivateDomain;
- }
-
- public boolean isTSLDomain(String domain) {
- try {
- domain = FormatUtils.getDomain(domain);
- if (!StringUtil.isBlank(domain) && InternetDomainName.isValid(domain)) {
- InternetDomainName internetDomainName = InternetDomainName.from(domain);
- return StringUtil.isEmpty(internetDomainName.publicSuffix()) ? false : internetDomainName.isPublicSuffix();
- } else {
- return false;
- }
- } catch (RuntimeException exception) {
- log.warn("valid isTSL domain exceptions, exception domain names:" + domain);
- }
- return false;
- }
-
- public boolean isSLDDomain(String domain) {
- try {
- if (!StringUtil.isBlank(domain) && InternetDomainName.isValid(domain)) {
- InternetDomainName internetDomainName = InternetDomainName.from(domain);
- return internetDomainName.isTopPrivateDomain();
- } else {
- return false;
- }
- } catch (RuntimeException exception) {
- log.warn("valid isSLD domain exceptions, exception domain names:" + domain);
- }
- return false;
- }
-
-
- private Map<String, List<String>> getCfgCDN() {
- Object codeInfo = metadataService.getCfg("public_code_info.json");
- if (StringUtil.isNotEmpty(codeInfo)) {
- Map<String, Map<String, List<String>>> data = JSON.parseObject(codeInfo.toString(), Map.class);
- return Maps.newHashMap(data.get("CDN"));
- }
- return Maps.newHashMap();
- }
-
- private BaseResult getActiveClientIP(ComDSLObject.Query query) {
-
- ComDSLObject.Query.FilterBean filterBean = query.getParameters().getMatch().get(0);
- if (MatchEnum.EXACTLY.getType().equalsIgnoreCase(String.valueOf(filterBean.getType()))) {
- return query(generateActiveClientIP(query, env.getProperty("ENTITY_ACTIVE_CLIENT_IP"), query.getLimit()));
- } else {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.MATCH_TYPE));
- }
- }
-
- private void setDefaultValue(ComDSLObject.Query query) {
- if (StringUtil.isEmpty(query.getParameters().getIntervals())) {
- Date now = new Date();
- Date someMinute = DateUtils.getSomeMinute(now, -30);
- String end = DateUtils.getFormatDate(now, DateUtils.YYYY_MM_DD_HH24_MM_SS);
- String start = DateUtils.getFormatDate(someMinute, DateUtils.YYYY_MM_DD_HH24_MM_SS);
- List<String> list = Lists.newArrayList(start.concat("/").concat(end));
- query.getParameters().setIntervals(list);
- }
- query.setLimit(StringUtil.isEmpty(query.getLimit()) ? String.valueOf(entityConfigSource.getActiveClientIP()) : query.getLimit());
- }
-
- public boolean isValid(BaseResult baseResult, String[] intervals) {
- if (StringUtil.isEmpty(baseResult.getData())) {
- return false;
- }
- List<Map> data = (List<Map>) baseResult.getData();
- long updateTime = (long) (data.get(0).get("last_update_time"));
- long paramStartTime = DateUtils.convertStringToTimestamp(intervals[0], TimeConstants.YYYY_MM_DD_HH24_MM_SS);
- long paramEndTime = DateUtils.convertStringToTimestamp(intervals[1], TimeConstants.YYYY_MM_DD_HH24_MM_SS);
- if (paramStartTime >= updateTime - 30 * 60 && paramEndTime <= updateTime + 30 * 60) {
- return true;
- } else {
- return false;
- }
- }
-
- private BaseResult formatResult(BaseResult baseResult, String limit) {
- if (String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(String.valueOf(baseResult.getStatus()))) {
- List<Object> alterationResultList = new ArrayList<>();
- List<Map> resultList = new ObjectMapper().convertValue(baseResult.getData(), new TypeReference<List<Map>>() {
- });
- resultList.forEach(r -> {
- if (ObjectUtil.isNotEmpty(r)) {
- List<Object> list = (List<Object>) JSON.parseObject((String)(r.get("client_ip")), Object.class);
- alterationResultList.addAll(list);
- }
- });
-
- List<Object> sortedAndDistinctList = alterationResultList.stream()
- .sorted(Comparator.comparingInt(x -> Integer.parseInt(String.valueOf(((Map) x).get("sessions")))).reversed())
- .filter(distinctByKey(x -> String.valueOf(((Map) x).get("common_client_ip"))))
- .collect(Collectors.toList());
-
- List<Object> dataResult = new ArrayList<>();
- for (Object o : sortedAndDistinctList) {
- if (dataResult.size() >= Integer.parseInt(limit)) {
- break;
- }
- Map clientIp = new HashMap();
- clientIp.put("client_ip", ((Map) o).get("common_client_ip"));
- dataResult.add(clientIp);
- }
- baseResult.getStatistics().put("result_rows", dataResult.size());
- baseResult.getStatistics().put("result_bytes", JSON.toJSONString(dataResult).getBytes().length);
- return BaseResultGenerator.generate(baseResult.getStatus(), baseResult.getCode(), baseResult.getMessage(), dataResult, baseResult.getMeta(), baseResult.getStatistics(), QueryFormatEnum.JSON.getValue());
- } else {
- baseResult = BaseResultGenerator.error(baseResult.getStatus(), baseResult.getMessage());
- }
- return baseResult;
- }
-
- /**
- * @param keyExtractor
- * @return java.util.function.Predicate<T>
- * @Description 自定义函数,根据value去重
- * @author wanghao
- * @date 2022/1/10 11:36
- */
- private static <T> Predicate<T> distinctByKey(Function<? super T, Object> keyExtractor) {
- Map<Object, Boolean> seen = new ConcurrentHashMap<>();
- return t -> seen.putIfAbsent(keyExtractor.apply(t), Boolean.TRUE) == null;
- }
-
- private BaseResult getResultOfServerIP(ComDSLObject.Query query) {
- String range = ComDSLParse.parseRange(query.getParameters().getRange());
- String timeInterval = getTimeInterval(query.getParameters(), -24 * 60);
- String datasource = query.getDataSource();
- String fieldWhere = timeInterval.concat(range);
-
- String tcpSessionSql = String.format(Objects.requireNonNull(env.getProperty("ENTITY_TCP_SESSION")), datasource, fieldWhere,
- StringUtil.isBlank(query.getLimit()) ? entityConfigSource.getTopServerIPByTCPSession() : query.getLimit());
- Map<String, String> tcpSessionResult = executeQuery(tcpSessionSql);
-
- String tcpClientIpsSql = String.format(Objects.requireNonNull(env.getProperty("ENTITY_TCP_UNIQ_CLIENT_IPS")), datasource, fieldWhere,
- StringUtil.isBlank(query.getLimit()) ? entityConfigSource.getTopServerIPByTCPUniqClientIP() : query.getLimit());
- Map<String, String> tcpClientIpResult = executeQuery(tcpClientIpsSql);
-
- String udpSessionSql = String.format(Objects.requireNonNull(env.getProperty("ENTITY_UDP_SESSION")), datasource, fieldWhere,
- StringUtil.isBlank(query.getLimit()) ? entityConfigSource.getTopServerIPByUDPBySession() : query.getLimit());
- Map<String, String> udpSessionResult = executeQuery(udpSessionSql);
-
- String udpClientIpsSql = String.format(Objects.requireNonNull(env.getProperty("ENTITY_UDP_UNIQ_CLIENT_IPS")), datasource, fieldWhere,
- StringUtil.isBlank(query.getLimit()) ? entityConfigSource.getTopServerIPByUDPUniqClientIP() : query.getLimit());
- Map<String, String> udpClientIpsResult = executeQuery(udpClientIpsSql);
-
- if (!String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(tcpSessionResult.get("status"))
- || !String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(tcpClientIpResult.get("status"))
- || !String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(udpSessionResult.get("status"))
- || !String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(udpClientIpsResult.get("status"))) {
- return BaseResultGenerator.error(
- "tcp by session result:" + tcpSessionResult
- + "tcp by client ips result:" + tcpClientIpResult
- + "udp by session result:" + udpSessionResult
- + "udp by client ips result:" + udpClientIpsResult
- );
- }
-
- HashSet<Map<String, String>> hashSet = new HashSet<>();
- getData(udpSessionResult).forEach(hashSet::add);
- getData(udpClientIpsResult).forEach(hashSet::add);
- getData(tcpSessionResult).forEach(hashSet::add);
- getData(tcpClientIpResult).forEach(hashSet::add);
-
- Set<Map<String, String>> data = Sets.newHashSet();
- if (StringUtil.isNotEmpty(query.getLimit()) && hashSet.size() > Integer.parseInt(query.getLimit())) {
- for (Map<String, String> item : hashSet) {
- if (data.size() >= Integer.parseInt(query.getLimit())) {
- break;
- }
- data.add(item);
- }
- } else {
- data = hashSet;
- }
- Map<String, Object> statistics = Maps.newHashMap();
- Map tcpSessionResultMap = JSON.parseObject(tcpSessionResult.get("result"), Map.class);
- Map tcpSessionStatistics = (Map) tcpSessionResultMap.get("statistics");
- Map tcpClientIpResultMap = JSON.parseObject(tcpClientIpResult.get("result"), Map.class);
- Map tcpClientIpStatistics = (Map) tcpClientIpResultMap.get("statistics");
- Map udpSessionResultMap = JSON.parseObject(udpSessionResult.get("result"), Map.class);
- Map udpSessionStatistics = (Map) udpSessionResultMap.get("statistics");
- Map udpClientIpResultMap = (Map) JSON.parseObject(udpClientIpsResult.get("result"), Map.class);
- Map udpClientIpStatistics = (Map) udpClientIpResultMap.get("statistics");
- for (Object key : tcpSessionStatistics.keySet()) {
- if ("result_bytes".equals(key.toString())) {
- statistics.put(key.toString(), JSON.toJSONString(data).getBytes().length);
- } else if ("result_rows".equals(key.toString())) {
- statistics.put(key.toString(), data.size());
- } else {
- statistics.put(String.valueOf(key), Long.parseLong(tcpSessionStatistics.get(key).toString())
- + Long.parseLong(tcpClientIpStatistics.get(key).toString())
- + Long.parseLong(udpSessionStatistics.get(key).toString())
- + Long.parseLong(udpClientIpStatistics.get(key).toString()));
- }
- }
- return BaseResultGenerator.success("ok", data, statistics);
- }
-
- public List<Map> getData(Map<String, String> result) {
- Map resultMap = JSON.parseObject(result.get("result"), Map.class);
- List<Map<String, Object>> dataList = (List<Map<String, Object>>) resultMap.get("data");
- List<Map> serverIpList = Lists.newArrayList();
- for (Map item : dataList) {
- Map<String, String> serverIpMap = Maps.newHashMap();
- serverIpMap.put("server_ip", String.valueOf(item.get("server_ip")));
- serverIpMap.put("vsys_id",String.valueOf(item.get("vsys_id")));
- serverIpList.add(serverIpMap);
- }
- return serverIpList;
- }
-
- public String generateActiveClientIP(ComDSLObject.Query query, String fileSql, String limit) {
- String range = ComDSLParse.parseRange(query.getParameters().getRange());
- String timeInterval = getTimeInterval(query.getParameters(), -30);
- String matchDate = ComDSLParse.parseMath(query.getParameters().getMatch());
- return String.format(fileSql, query.getDataSource(), timeInterval, range.concat(matchDate), limit);
- }
-
- private String getTimeInterval(ComDSLObject.Query.QueryBean parameters, int defaultInterval) {
- if (StringUtil.isEmpty(parameters.getIntervals())) {
- Date now = new Date();
- Date someMinute = DateUtils.getSomeMinute(now, defaultInterval);
- String end = DateUtils.getFormatDate(now, DateUtils.YYYY_MM_DD_HH24_MM_SS);
- String start = DateUtils.getFormatDate(someMinute, DateUtils.YYYY_MM_DD_HH24_MM_SS);
- List<String> list = Lists.newArrayList(start.concat("/").concat(end));
- parameters.setIntervals(list);
- }
- StringBuilder timeInterval = new StringBuilder();
- String[] intervals = getIntervals(parameters.getIntervals());
- timeInterval.append("recv_time >= UNIX_TIMESTAMP('").append(intervals[0]).append("') AND recv_time< UNIX_TIMESTAMP('").append(intervals[1]).append("')");
- return timeInterval.toString();
- }
-
- private String[] getIntervals(List<String> intervals) {
- return intervals.get(0).split("/");
- }
-
- public BaseResult query(String sql) {
- BaseResult baseResult;
- Map<String, String> result = executeQuery(sql);
- if (String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(result.get("status"))) {
- Map resultMap = JSON.parseObject(result.get("result"), Map.class);
- List<Map> data = (List<Map>) resultMap.get("data");
- baseResult = BaseResultGenerator.success("ok", data, (Map) resultMap.get("statistics"));
- } else {
- baseResult = BaseResultGenerator.error(result.get("result"));
- }
- return baseResult;
- }
-
- public String generateTopSNI(ComDSLObject.Query query, String fileSql) {
- String range = ComDSLParse.parseRange(query.getParameters().getRange());
- String timeInterval = getTimeInterval(query.getParameters(), -24 * 60);
- String datasource = query.getDataSource();
- return String.format(fileSql, datasource, timeInterval.concat(range), entityConfigSource.getTopSNIDataset());
- }
- public String generateSNITotal(ComDSLObject.Query query, String fileSql) {
- String range = ComDSLParse.parseRange(query.getParameters().getRange());
- String timeInterval = getTimeInterval(query.getParameters(), -24 * 60);
- String datasource = query.getDataSource();
- return String.format(fileSql, datasource, timeInterval.concat(range));
- }
-
- private Map<String, String> executeQuery(String sql) {
- try {
- sql = URLEncoder.encode(sql, "utf-8").replaceAll("\\+", "%20");
- } catch (UnsupportedEncodingException e) {
- log.error("sql Encode error: ", e);
- }
-
- String queryURL = URLUtil.normalize(localHostAddress + ":" + serverPort + "/sql/?option=long_term&resultId=" + DigestUtil.md5Hex(sql) + "&query=");
- int socketTimeOut = httpConfig.getCkLongTermAccountSocketTimeOut();
- return httpClientService.httpGet(queryURL + sql, socketTimeOut);
- }
-
- @Override
- public void setEnvironment(Environment environment) {
- this.env = environment;
- }
-}
diff --git a/src/main/java/com/mesalab/services/service/impl/JobExecuteService.java b/src/main/java/com/mesalab/services/service/impl/JobExecuteService.java
index e3c4cb31..cbc01454 100644
--- a/src/main/java/com/mesalab/services/service/impl/JobExecuteService.java
+++ b/src/main/java/com/mesalab/services/service/impl/JobExecuteService.java
@@ -1,22 +1,32 @@
package com.mesalab.services.service.impl;
+import cn.hutool.core.bean.BeanUtil;
+import cn.hutool.core.collection.CollectionUtil;
+import cn.hutool.core.date.DatePattern;
+import cn.hutool.core.date.DateUtil;
+import cn.hutool.core.util.BooleanUtil;
+import cn.hutool.core.util.NumberUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
-import com.google.common.base.Stopwatch;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.jayway.jsonpath.JsonPath;
import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.enums.DBTypeEnum;
-import com.mesalab.common.enums.QueryOptionEnum;
+import com.mesalab.common.entity.DataTypeMapping;
+import com.mesalab.common.enums.DBEngineType;
+import com.mesalab.common.enums.QueryOption;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.exception.BusinessException;
+import com.mesalab.common.utils.HazelcastInstanceMapUtil;
import com.mesalab.common.utils.sqlparser.SQLFunctionUtil;
import com.mesalab.common.utils.sqlparser.SQLHelper;
+import com.mesalab.qgw.constant.DslIdentifierNameConst;
import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.model.basic.QueryProfile;
-import com.mesalab.qgw.service.QueryService;
-import com.mesalab.qgw.service.MetadataService;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.*;
+import com.mesalab.qgw.service.*;
import com.mesalab.services.common.property.SqlPropertySourceFactory;
import com.mesalab.services.configuration.JobConfig;
import com.geedgenetworks.utils.DateUtils;
@@ -36,16 +46,22 @@ import org.springframework.context.EnvironmentAware;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.env.Environment;
import org.springframework.scheduling.annotation.Async;
+import org.springframework.scheduling.annotation.AsyncResult;
+import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Service;
+import java.io.Serializable;
import java.math.RoundingMode;
import java.text.DecimalFormat;
import java.util.*;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
-import static com.mesalab.services.service.impl.JobServiceImp.execute;
-import static com.mesalab.services.service.impl.JobServiceImp.executeUpdate;
+import static com.mesalab.services.service.impl.JobServiceImpl.execute;
+import static com.mesalab.services.service.impl.JobServiceImpl.executeUpdate;
/**
* TODO
@@ -58,43 +74,51 @@ import static com.mesalab.services.service.impl.JobServiceImp.executeUpdate;
@PropertySource(value = "classpath:job-sql-template.sql", factory = SqlPropertySourceFactory.class)
public class JobExecuteService implements EnvironmentAware {
private static final Log log = LogFactory.get();
- @Autowired
- Environment env;
- @Autowired
- QueryService queryService;
- @Autowired
- TaskExecuteService taskExecuteService;
- @Autowired
- MetadataService metadataService;
- @Autowired
- JobConfig jobCfg;
+ private Environment env;
+ private SQLSyncQueryService sqlSyncQueryService;
+ private TaskExecuteService taskExecuteService;
+ private DatabaseService databaseService;
+ private DSLService dslService;
+ private PacketCombineDslService packetCombineDslService;
+ private TrafficSpectrumDslService trafficSpectrumDslService;
+ private EngineConfigSource engineConfigSource;
+ private JobConfig jobCfg;
+ private final ThreadPoolTaskExecutor heavyResourceThreadPool;
+ private final ThreadPoolTaskExecutor lightWeightThreadPool;
+ private final ThreadPoolTaskExecutor taskThreadPool;
+
+ public JobExecuteService(ThreadPoolTaskExecutor heavyResourceThreadPool, ThreadPoolTaskExecutor lightWeightThreadPool, ThreadPoolTaskExecutor taskThreadPool) {
+ this.heavyResourceThreadPool = heavyResourceThreadPool;
+ this.lightWeightThreadPool = lightWeightThreadPool;
+ this.taskThreadPool = taskThreadPool;
+ }
- @Async("jobExecutor")
- public void addExecutorFieldDiscovery(String id, HashMap<String, Object> param) {
- Stopwatch watch = Stopwatch.createStarted();
+ @Async("lightWeightThreadPool")
+ public Future<Boolean> addExecutorFieldDiscovery(DSLQueryRequestParam request) {
try {
- JobInfo jobInfo = new JobInfo(id);
- String partitionKey = metadataService.getPartitionKey(param.get(JobConfig.QUERY_DATA_SOURCE).toString());
- Interval interval = getInterval("SELECT 1 FROM table WHERE " + param.get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_FILTER).toString(), partitionKey);
+ markJobBegin(request.getId());
+ JobInfo jobInfo = new JobInfo(request.getId());
+ String partitionKey = databaseService.getPartitionKey(request.getDataSource());
+ Interval interval = getInterval("SELECT 1 FROM table WHERE " + request.getFilter(), partitionKey);
if (jobCfg.isTimeSlicingEnabled() && interval.isValid()) {
- executeSlicingFieldDiscoveryJob(jobInfo, param, interval, partitionKey);
+ executeSlicingFieldDiscoveryJob(request.getId(), jobInfo, request, interval, partitionKey);
} else {
- executeFieldDiscoveryJob(jobInfo, param, param.get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_FILTER).toString());
+ executeFieldDiscoveryJob(request.getId(), jobInfo, request, request.getFilter());
}
} catch (Exception e) {
- executeUpdate(String.format(Objects.requireNonNull(env.getProperty("JOB_UPDATE_FIELD_VALUE")), JobConfig.DETAIL, JobConfig.IS_FAILED, id, true));
- log.error(e, "execute job error: jobId-{}", id);
+ markJobFailure(request.getId(), e.getMessage());
} finally {
- execute(String.format(Objects.requireNonNull(env.getProperty("JOB_UPDATE_PROGRESS")),
- id, true, 1, watch.elapsed(TimeUnit.MILLISECONDS)));
+ markJobCompletion(request.getId());
}
+ return new AsyncResult<>(true);
}
- @Async("jobExecutor")
+ @Deprecated
+ @Async("lightWeightThreadPool")
public void addExecutorStatistics(String id, HashMap<String, Object> param, Map<String, Object> property) {
JobInfo jobInfo = new JobInfo(id);
try {
- Interval interval = getInterval(param.get(JobConfig.KEY_CUSTOM_STATISTICS_SQL).toString(), metadataService.getPartitionKey(param.get(JobConfig.QUERY_DATA_SOURCE).toString()));
+ Interval interval = getInterval(param.get(JobConfig.KEY_CUSTOM_STATISTICS_SQL).toString(), databaseService.getPartitionKey(param.get(JobConfig.QUERY_DATA_SOURCE).toString()));
if (jobCfg.isTimeSlicingEnabled()
&& interval.isValid()
&& StringUtil.isNotEmpty(property.get(JobConfig.JOB_PROPERTY_K_OPTION))) {
@@ -118,8 +142,232 @@ public class JobExecuteService implements EnvironmentAware {
}
}
+ @Async("lightWeightThreadPool")
+ public void addExecutorSql(SqlQueryRequestParam request) {
+ try {
+ markJobBegin(request.getId());
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(SQLQueryContext.builder()
+ .originalSQL(request.getStatement())
+ .option(request.isDryRun() ? QueryOption.SYNTAX_VALIDATION.getValue() : QueryOption.REAL_TIME.getValue())
+ .format(request.getOutputMode().getValue())
+ .build());
+ updateJobResultOnQueryCache(request.getId(), baseResult, null);
+ } catch (RuntimeException e) {
+ markJobFailure(request.getId(), e.getMessage());
+ } finally {
+ markJobCompletion(request.getId());
+ }
+ }
+
+ @Async("lightWeightThreadPool")
+ public void addExecutorDsl(DSLQueryRequestParam request) {
+ try {
+ markJobBegin(request.getId());
+ DSLQueryContext dslProfile = BeanUtil.copyProperties(request, DSLQueryContext.class);
+ updateJobResultOnQueryCache(request.getId(), dslService.execDsl(dslProfile, request.isDryRun()), null);
+ } catch (RuntimeException e) {
+ markJobFailure(request.getId(), e.getMessage());
+ } finally {
+ markJobCompletion(request.getId());
+ }
+ }
+
+ public BaseResult addDslExecutorPacketCombineWithoutCache(DSLQueryRequestParam request) {
+ try {
+ CompletableFuture<BaseResult> future = CompletableFuture.supplyAsync(() -> packetCombineDslService.run(request), heavyResourceThreadPool);
+ return future.get();
+ } catch (InterruptedException | ExecutionException | RuntimeException e) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Job was Interrupted. message: " + e.getMessage()));
+ }
+ }
+
+ public BaseResult addDslExecutorTrafficSpectrumWithoutCache(DSLQueryRequestParam request) {
+ try {
+ CompletableFuture<BaseResult> future = CompletableFuture.supplyAsync(() -> trafficSpectrumDslService.run(request), lightWeightThreadPool);
+ return future.get();
+ } catch (InterruptedException | ExecutionException | RuntimeException e) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Job was Interrupted. message: " + e.getMessage()));
+ }
+ }
+
+ @Async("lightWeightThreadPool")
+ public void addDslExecutorTrafficSpectrumWithCache(DSLQueryRequestParam request, CountDownLatch countDownLatch) {
+ try {
+ markJobBegin(request.getId());
+ Interval interval = buildDSLInterval(request);
+ if (DslIdentifierNameConst.TRAFFIC_SPECTRUM_CLIENT_IP_CONNECT_APPLICATION_USAGE.equalsIgnoreCase(request.getName())
+ && jobCfg.isTimeSlicingEnabled()
+ && interval.isValid()) {
+ JobInfo jobInfo = new JobInfo(request.getId());
+ Long end = interval.getEnd();
+ Long start = interval.getStart();
+ int timeSlicingInterval = jobCfg.getTimeSlicingInterval();
+ long endSlicing = end;
+ long startSlicing = start;
+ while (endSlicing > start) {
+ if (jobInfo.isTimeout()) {
+ log.error("execute job timeout: job-{}", jobInfo);
+ markJobFailure(request.getId(), "execute job timeout");
+ break;
+ }
+ QueryCache queryCache = HazelcastInstanceMapUtil.get(request.getId());
+ if (StrUtil.isEmptyIfStr(queryCache)) {
+ log.warn("cache expiration: job-{}", jobInfo);
+ break;
+ }
+ Map<String, Object> job = queryCache.getBaseResult().getJob();
+ if (BooleanUtil.toBoolean(String.valueOf(job.get(JobConfig.IS_DONE)))
+ || BooleanUtil.toBoolean(String.valueOf(job.get(JobConfig.IS_FAILED)))
+ || BooleanUtil.toBoolean(String.valueOf(job.get(JobConfig.IS_CANCELED)))) {
+ log.warn("job done or interrupt: job obj-{}", job);
+ break;
+ }
+ startSlicing = endSlicing - timeSlicingInterval;
+ if (startSlicing < start) {
+ startSlicing = start;
+ }
+ DecimalFormat format = getDecimalFormat();
+ double doneProgress = Double.parseDouble(format.format((end - startSlicing) * 1.0 / (end - start)));
+ DSLQueryRequestParam requestParamTemp = BeanUtil.copyProperties(request, DSLQueryRequestParam.class);
+ requestParamTemp.setIntervals(Lists.newArrayList(
+ DateUtil.format(new Date(startSlicing * 1000), DatePattern.UTC_PATTERN)
+ + "/"
+ + DateUtil.format(new Date(endSlicing * 1000), DatePattern.UTC_PATTERN)));
+ int sqrt = (int) Math.sqrt(engineConfigSource.getMaxCacheNum());
+ requestParamTemp.setLimit(String.valueOf(Math.max(sqrt, engineConfigSource.getTrafficSpectrumClientIPAppResultNum())));
+
+ BaseResult baseResult = trafficSpectrumDslService.run(requestParamTemp);
+ if (!baseResult.isSuccess()) {
+ markJobFailure(request.getId(), baseResult.getMessage());
+ break;
+ }
+ endSlicing = startSlicing;
+ BaseResult<Object> baseResultCache = queryCache.getBaseResult();
+ if (baseResultCache != null && StringUtil.isNotEmpty(baseResultCache.getData())) {
+ if (StringUtil.isEmpty(baseResult.getData())) {
+ continue;
+ }
+ List<Map<String, Object>> data1 = (List<Map<String, Object>>) baseResult.getData();
+ List<Map<String, Object>> data2 = (List<Map<String, Object>>) baseResultCache.getData();
+ Map<String, Object> map = trafficSpectrumDslService.mergeInternalExternalBipartiteGraph(data1.get(0), data2.get(0));
+ baseResultCache.setData(Lists.newArrayList(map));
+ updateJobResultOnQueryCache(request.getId(), baseResultCache, doneProgress);
+ } else {
+ updateJobResultOnQueryCache(request.getId(), baseResult, doneProgress);
+ }
+ }
+ } else {
+ updateJobResultOnQueryCache(request.getId(), trafficSpectrumDslService.run(request), null);
+ }
+ } catch (RuntimeException e) {
+ markJobFailure(request.getId(), e.getMessage());
+ } finally {
+ try {
+ markJobCompletion(request.getId());
+ } finally {
+ if (countDownLatch != null) {
+ countDownLatch.countDown();
+ }
+ }
+ }
+ }
+
+ private Interval buildDSLInterval(DSLQueryRequestParam request) {
+ List<String> intervals = request.getIntervals();
+ Interval interval = new Interval();
+ if (intervals != null && intervals.size() == 1) {
+ String[] split = intervals.get(0).split("/");
+ interval.setStart(DateUtil.parse(split[0]).getTime() / 1000);
+ interval.setEnd(DateUtil.parse(split[1]).getTime() / 1000);
+ }
+ return interval;
+ }
+
+ @Async("heavyResourceThreadPool")
+ public void addExecutorDslPacketCombineWithCache(DSLQueryRequestParam request, CountDownLatch countDownLatch) {
+ try {
+ markJobBegin(request.getId());
+ updateJobResultOnQueryCache(request.getId(), packetCombineDslService.run(request), null);
+ } catch (RuntimeException e) {
+ markJobFailure(request.getId(), e.getMessage());
+ } finally {
+ try {
+ markJobCompletion(request.getId());
+ } finally {
+ if (countDownLatch != null) {
+ countDownLatch.countDown();
+ }
+ }
+ }
+ }
+
+ private static void markJobBegin(String id) {
+ log.info("Async Query Job Start, id is: {}", id);
+ try {
+ HazelcastInstanceMapUtil.retrieveMap().lock(id);
+ QueryCache queryCache = HazelcastInstanceMapUtil.get(id);
+ Objects.requireNonNull(queryCache).getBaseResult().getJob().put(JobConfig.START_TIME, DateUtils.convertTimestampToString(System.currentTimeMillis() / 1000, DatePattern.UTC_PATTERN));
+ HazelcastInstanceMapUtil.put(id, queryCache);
+ } finally {
+ HazelcastInstanceMapUtil.retrieveMap().unlock(id);
+ }
+ }
+
+ public static void markJobFailure(String id, String reasonMessage) {
+ log.error("Async Query Job Error, message is: {}", reasonMessage);
+ try {
+ HazelcastInstanceMapUtil.retrieveMap().lock(id);
+ QueryCache queryCache = HazelcastInstanceMapUtil.get(id);
+ Map<String, Object> job = Objects.requireNonNull(queryCache).getBaseResult().getJob();
+ job.put(JobConfig.IS_FAILED, true);
+ job.put(JobConfig.REASON, reasonMessage);
+ HazelcastInstanceMapUtil.put(id, queryCache);
+ } finally {
+ HazelcastInstanceMapUtil.retrieveMap().unlock(id);
+ }
+ }
+
+ private static void markJobCompletion(String id) {
+ log.info("Async Query Job Done, id is: {}", id);
+ try {
+ HazelcastInstanceMapUtil.retrieveMap().lock(id);
+ QueryCache queryCache = HazelcastInstanceMapUtil.get(id);
+ Map<String, Object> job = Objects.requireNonNull(queryCache).getBaseResult().getJob();
+ job.put(JobConfig.IS_DONE, true);
+ job.put(JobConfig.DONE_PROGRESS, 1);
+ job.put(JobConfig.END_TIME, DateUtils.convertTimestampToString(System.currentTimeMillis() / 1000, DatePattern.UTC_PATTERN));
+ HazelcastInstanceMapUtil.put(id, queryCache);
+ } finally {
+ HazelcastInstanceMapUtil.retrieveMap().unlock(id);
+ }
+ }
+
+ private static void updateJobResultOnQueryCache(String id, BaseResult baseResult, Double doneProgress) {
+ if (!baseResult.isSuccess()) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), baseResult.getMessage()));
+ }
+ try {
+ HazelcastInstanceMapUtil.retrieveMap().lock(id);
+ QueryCache queryCache = HazelcastInstanceMapUtil.get(id);
+ Map<String, Object> job = queryCache.getBaseResult().getJob();
+ if (doneProgress != null) {
+ job.put(JobConfig.DONE_PROGRESS, doneProgress);
+ }
+ baseResult.setJob(job);
+ queryCache.setBaseResult(baseResult);
+ HazelcastInstanceMapUtil.put(id, queryCache);
+ } finally {
+ HazelcastInstanceMapUtil.retrieveMap().unlock(id);
+ }
+ }
+
+
+ @Deprecated
private void executeLongTerm(JobInfo jobInfo, String cusSql, String countDataSource, String newFilter, String countFilter, boolean isSecBuild) throws JSQLParserException {
- if (isCancel(jobInfo.getId())) {
+ if (isCancelLongTerm(jobInfo.getId())) {
return;
}
@@ -140,42 +388,55 @@ public class JobExecuteService implements EnvironmentAware {
LongValue longValue = new LongValue(Long.parseLong(limit.getRowCount().toString()) * 10);
limit.setRowCount(longValue.getValue() > 10000 ? longValue : new LongValue(10000));
}
- List<Future<Boolean>> list = Lists.newArrayList(taskExecuteService.executeLongTerm(jobInfo.getId(), parseSQL.toString()));
- waitAsyncResultAndUpdateProcess(jobInfo, list);
+ List<Future<Boolean>> list = Lists.newArrayList(CompletableFuture.supplyAsync(() -> taskExecuteService.executeLongTerm(jobInfo.getId(), parseSQL.toString())));
+ waitAsyncResultAndUpdateProcessLongTerm(jobInfo, list);
}
- private void executeSlicingFieldDiscoveryJob(JobInfo jobInfo, Map<String, Object> param, Interval interval, String partitionKey) throws JSQLParserException {
+ private void executeSlicingFieldDiscoveryJob(String id, JobInfo jobInfo, DSLQueryRequestParam request, Interval interval, String partitionKey) throws ExecutionException, InterruptedException {
jobInfo.setTotalTimes((int) ((interval.getEnd() - interval.getStart()) / jobCfg.getTimeSlicingInterval() + 1));
while (true) {
jobInfo.setCurrentTimes(jobInfo.getCurrentTimes() + 1);
if (jobInfo.isTimeout()) {
log.error("execute job timeout: job-{}", jobInfo);
- executeUpdate(String.format(Objects.requireNonNull(env.getProperty("JOB_UPDATE_FIELD_VALUE")), JobConfig.DETAIL, JobConfig.IS_FAILED, jobInfo.getId(), true));
+ markJobFailure(id, "execute job timeout");
+ break;
+ }
+ QueryCache queryCache = HazelcastInstanceMapUtil.get(id);
+ if (StrUtil.isEmptyIfStr(queryCache)) {
+ log.warn("cache expiration: job-{}", jobInfo);
+ break;
+ }
+ Map<String, Object> job = queryCache.getBaseResult().getJob();
+ if (BooleanUtil.toBoolean(String.valueOf(job.get(JobConfig.IS_DONE)))
+ || BooleanUtil.toBoolean(String.valueOf(job.get(JobConfig.IS_FAILED)))
+ || BooleanUtil.toBoolean(String.valueOf(job.get(JobConfig.IS_CANCELED)))) {
+ log.warn("job done or interrupt: job obj-{}", job);
break;
}
long start = interval.getEnd() - jobCfg.getTimeSlicingInterval();
if (start > interval.getStart()) {
interval.setEnd(start);
String newFilter = String.format("(%s) AND ( %s > %s AND %s <= %s)",
- param.get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_FILTER),
+ request.getFilter(),
partitionKey, interval.getEnd(), partitionKey, interval.getEnd() + jobCfg.getTimeSlicingInterval());
- executeFieldDiscoveryJob(jobInfo, param, newFilter);
+ executeFieldDiscoveryJob(id, jobInfo, request, newFilter);
} else {
String where = String.format("(%s) AND (%s >= %s AND %s <= %s)",
- param.get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_FILTER),
+ request.getFilter(),
partitionKey, interval.getStart(), partitionKey, interval.getEnd());
- executeFieldDiscoveryJob(jobInfo, param, where);
+ executeFieldDiscoveryJob(id, jobInfo, request, where);
break;
}
}
}
+ @Deprecated
private void executeSlicingStatisticsJob(JobInfo jobInfo, Map<String, Object> param, Interval interval) throws JSQLParserException {
- String partitionKey = metadataService.getPartitionKey(param.get(JobConfig.QUERY_DATA_SOURCE).toString());
+ String partitionKey = databaseService.getPartitionKey(param.get(JobConfig.QUERY_DATA_SOURCE).toString());
List<Expression> timeFloorWithFill = SQLHelper.getFunctionParams(param.get(JobConfig.KEY_CUSTOM_STATISTICS_SQL).toString(), SQLFunctionUtil.TIME_FLOOR_WITH_FILL);
String per = StringUtil.isEmpty(timeFloorWithFill) ? "PT5M" : timeFloorWithFill.get(1).toString().substring(1, timeFloorWithFill.get(1).toString().length() - 1);
- String function = SQLFunctionUtil.translateCommonFun(SQLFunctionUtil.TIME_FLOOR_WITH_FILL + "(" + interval.getEnd() + ", '" + per + "', 'zero')", DBTypeEnum.CLICKHOUSE.getValue());
- BaseResult baseResult = queryService.executeQuery(QueryProfile.builder().query(StrUtil.format("SELECT {} AS stat_time FROM {} LIMIT 1", function, param.get(JobConfig.QUERY_DATA_SOURCE))).build());
+ String function = SQLFunctionUtil.translateCommonFun(SQLFunctionUtil.TIME_FLOOR_WITH_FILL + "(" + interval.getEnd() + ", '" + per + "', 'zero')", DBEngineType.CLICKHOUSE.getValue());
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(SQLQueryContext.builder().originalSQL(StrUtil.format("SELECT {} AS stat_time FROM {} LIMIT 1", function, param.get(JobConfig.QUERY_DATA_SOURCE))).build());
List<Map<String, Object>> data = (List<Map<String, Object>>) baseResult.getData();
long statTime = Long.parseLong(data.get(0).get("stat_time").toString()) * 1000;
@@ -185,7 +446,7 @@ public class JobExecuteService implements EnvironmentAware {
//分片粒度 Max(per, TimeSlicingInterval)
long perSeconds = (dateList.get(0).getTime() - dateList.get(1).getTime()) / 1000;
long step = jobCfg.getTimeSlicingInterval() / perSeconds == 0 ? 1 : jobCfg.getTimeSlicingInterval() / perSeconds;
- int totalTimes = new Double(Math.ceil((dateList.size() - 1) * 1.0 / step)).intValue();
+ int totalTimes = Double.valueOf(Math.ceil((dateList.size() - 1) * 1.0 / step)).intValue();
jobInfo.setTotalTimes(totalTimes);
while (true) {
if (jobInfo.isTimeout()) {
@@ -208,68 +469,260 @@ public class JobExecuteService implements EnvironmentAware {
}
}
- private void executeFieldDiscoveryJob(JobInfo jobInfo, Map<String, Object> param, String newFilter) throws JSQLParserException {
+ private void executeFieldDiscoveryJob(String id, JobInfo jobInfo, DSLQueryRequestParam request, String segmentFilter) throws ExecutionException, InterruptedException {
if (isCancel(jobInfo.getId())) {
return;
}
- String dataSource = param.get(JobConfig.QUERY_DATA_SOURCE).toString();
+ String dataSource = request.getDataSource();
String totalMetric = "count(*)";
- Object metric = param.get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_METRIC);
+ Object metric = request.getCustomRequestParam().get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_METRIC);
if (StringUtil.isNotEmpty(metric)) {
- Object fn = param.get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_METRIC_FN);
- String measurements = metadataService.getValueByKeyInSchemaDoc(dataSource, "measurements");
- List<Map<String, String>> read = JsonPath.read(measurements, "$.aggregates['" + metric + "'][?(@.fn == '" + fn + "')]");
+ Object fn = request.getCustomRequestParam().get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_METRIC_FN);
+ String measurements = databaseService.getValueByKeyInSchemaDoc(dataSource, "measurements");
+ List<Map<String, String>> read = JsonPath.read(measurements, "$.field_discovery_metric['" + metric + "'][?(@.fn == '" + fn + "')]");
if (!read.isEmpty()) {
String column = read.get(0).get("column");
totalMetric = read.get(0).get("fn") + "(" + column + ")";
}
}
- Map<String, Long> map = getCount(dataSource, newFilter, totalMetric);
+ Map<String, Long> map = getCount(dataSource, segmentFilter, totalMetric);
long currentCount = map.get("logCount");
if (currentCount == 0) {
return;
}
- long count = jobInfo.getCount() + currentCount;
- executeUpdate(String.format(Objects.requireNonNull(env.getProperty("JOB_UPDATE_FIELD_VALUE")), JobConfig.DETAIL, JobConfig.COUNT, jobInfo.getId(), count));
- jobInfo.setCount(count);
- List<Future<Boolean>> list = Lists.newArrayList();
- List<String> fields = (List<String>) param.get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_FIELDS);
+ Map<String, Future<List<Map<String, Object>>>> taskCallbackList = Maps.newHashMap();
+ List<String> fields = (List<String>) request.getCustomRequestParam().get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_FIELDS);
+
for (String field : fields) {
- list.add(taskExecuteService.executeFieldDiscovery(jobInfo.getId(), param.get(JobConfig.QUERY_DATA_SOURCE).toString(), newFilter, field,
- param.get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_METRIC), param.get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_METRIC_FN), map.get("totalMetric")));
+ taskCallbackList.put(field, CompletableFuture.supplyAsync(() -> taskExecuteService.executeFieldDiscovery(id, request.getDataSource(), field
+ , request.getCustomRequestParam().get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_METRIC)
+ , request.getCustomRequestParam().get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_METRIC_FN)
+ , segmentFilter), taskThreadPool));
}
- waitAsyncResultAndUpdateProcess(jobInfo, list);
+ waitAsyncResultAndUpdateProcess(id, jobInfo, taskCallbackList, request, map);
}
- private void waitAsyncResultAndUpdateProcess(JobInfo jobInfo, List<Future<Boolean>> taskList) {
- double taskCompletedProcess = 0;
+ private void waitAsyncResultAndUpdateProcess(String id, JobInfo jobInfo, Map<String, Future<List<Map<String, Object>>>> taskCallbackList, DSLQueryRequestParam request, Map<String, Long> mapCurrent) throws ExecutionException, InterruptedException {
+ List<String> updatedResult = Lists.newArrayList();
while (true) {
if (jobInfo.isTimeout()) {
- log.error("execute job timeout: job-{}", jobInfo);
- executeUpdate(String.format(Objects.requireNonNull(env.getProperty("JOB_UPDATE_FIELD_VALUE")), JobConfig.DETAIL, JobConfig.IS_FAILED, jobInfo.getId(), true));
- taskList.forEach(o -> o.cancel(true));
+ log.error("execute job timeout, job id is: {}", id);
+ markJobFailure(id, "execute job timeout");
+ taskCallbackList.keySet().forEach(o -> taskCallbackList.get(o).cancel(true));
break;
}
- long completedTaskCount = taskList.stream().filter(Future::isDone).count();
+ if (updatedResult.size() >= taskCallbackList.size()) {
+ break;
+ }
+ long completedTaskCount = taskCallbackList.keySet().stream().filter(o -> taskCallbackList.get(o).isDone()).count();
+ if ((completedTaskCount - updatedResult.size()) * 1.0 / taskCallbackList.size() < 0.01 && completedTaskCount < taskCallbackList.size()) {
+ continue;
+ }
+ log.info("id :{}, done size: {}, task size: {}", id, completedTaskCount, taskCallbackList.size());
DecimalFormat format = getDecimalFormat();
+ double jobCompletedProcess = Double.parseDouble(format.format(jobInfo.getCurrentTimes() * 1.0 / jobInfo.getTotalTimes() - ((1 - completedTaskCount * 1.0 / taskCallbackList.size()) / jobInfo.getTotalTimes())));
+ List<String> queryDoneField = taskCallbackList.keySet().stream().filter(o -> taskCallbackList.get(o).isDone()).collect(Collectors.toList());
+ for (String field : queryDoneField) {
+ if (updatedResult.contains(field)) {
+ continue;
+ }
+ updatedResult.add(field);
+ Long currentTotalMetric = mapCurrent.get("totalMetric");
+ String fnDefault = "count";
+ String value = "count";
+ Object metric = request.getCustomRequestParam().get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_METRIC);
+ Object fn = request.getCustomRequestParam().get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_METRIC_FN);
+ if (StringUtil.isNotEmpty(metric)) {
+ String measurements = databaseService.getValueByKeyInSchemaDoc(request.getDataSource(), "measurements");
+ List<Map<String, String>> read = JsonPath.read(measurements, "$.field_discovery_metric['" + metric + "'][?(@.fn == '" + fn + "')]");
+ if (!read.isEmpty()) {
+ fnDefault = fn.toString();
+ value = read.get(0).get("value");
+ }
+ }
+ Future<List<Map<String, Object>>> listFuture = taskCallbackList.get(field);
+ List<Map<String, Object>> data = listFuture.get();
+ data.forEach(x -> {
+ for (String k : x.keySet()) {
+ if (k.startsWith(JobConfig.FIELD_DISCOVERY_TOPK_METRIC_PREFIX)) {
+ Object v = x.get(k);
+ x.put(k.replace(JobConfig.FIELD_DISCOVERY_TOPK_METRIC_PREFIX, ""), v);
+ x.remove(k);
+ }
+ }
+ });
+ QueryCache queryCache = HazelcastInstanceMapUtil.get(id);
+ if (StrUtil.isEmptyIfStr(queryCache)) {
+ markJobFailure(id, "Cache expiration.");
+ log.error("field_discovery task query lastTopK Error: Cache expiration.");
+ return;
+ }
+ Object lastTotalMetric = 0;
+ if (StringUtil.isNotEmpty(queryCache.getBaseResult().getData())) {
+ List<Map> map = (List<Map>) queryCache.getBaseResult().getData();
+ for (Map map1 : map) {
+ if (CollectionUtil.isNotEmpty(map1) && map1.containsKey(field) && !StrUtil.isEmptyIfStr(map1.get(field))) {
+ Map item = (Map) map1.get(field);
+ List<Map<String, Object>> topk = (List<Map<String, Object>>) item.get("topk");
+ lastTotalMetric = item.get(value);
+ Map<String, String> metrics = Maps.newHashMap();
+ metrics.put(value, "count".equalsIgnoreCase(fnDefault) ? "sum" : fnDefault);
+ data = mergeData(data, topk, Lists.newArrayList("value"), metrics);
+ }
+
+ }
+ }
+ data = sortDataAndSetMaxSize(data, value, DataTypeMapping.LONG, false);
+ updateResult(id, field, currentTotalMetric, lastTotalMetric, value, data, jobCompletedProcess);
- double jobCompletedProcess = Double.parseDouble(format.format(jobInfo.getCurrentTimes() * 1.0 / jobInfo.getTotalTimes() - ((1 - completedTaskCount * 1.0 / taskList.size()) / jobInfo.getTotalTimes())));
- if (jobCompletedProcess - taskCompletedProcess >= 0.1 && jobCompletedProcess < 1) {
- taskCompletedProcess = jobCompletedProcess;
- execute(String.format(Objects.requireNonNull(env.getProperty("JOB_UPDATE_PROGRESS")),
- jobInfo.getId(), false, jobCompletedProcess, System.currentTimeMillis() - jobInfo.startTime));
}
- if (completedTaskCount >= taskList.size()) {
+ if (completedTaskCount >= taskCallbackList.size()) {
break;
}
}
}
+ private void updateResult(String id, String field, long currentTotalMetric, Object lastTotalMetric, String value, List<Map<String, Object>> topList, double jobCompletedProcess) {
+ Map<String, Object> map = Maps.newHashMap();
+ map.put("topk", topList);
+ map.put("distinct_count", Math.min(topList.size(), 100));
+ map.put(value, currentTotalMetric + Long.parseLong(lastTotalMetric.toString()));
+ try {
+ HazelcastInstanceMapUtil.retrieveMap().lock(id);
+ QueryCache queryCache = HazelcastInstanceMapUtil.get(id);
+ BaseResult<Object> baseResult = queryCache.getBaseResult();
+ if (jobCompletedProcess < 1) {
+ Map<String, Object> job = queryCache.getBaseResult().getJob();
+ job.put(JobConfig.DONE_PROGRESS, jobCompletedProcess);
+ }
+
+ Object data1 = baseResult.getData();
+ if (StrUtil.isEmptyIfStr(data1)) {
+ List<Object> list = Lists.newArrayList();
+ Map<Object, Object> item = Maps.newLinkedHashMap();
+ item.put(field, map);
+ list.add(item);
+ baseResult.setData(list);
+ HazelcastInstanceMapUtil.put(id, queryCache);
+ return;
+ }
+ List<Map<String, Object>> list = (List<Map<String, Object>>) data1;
+ for (int i = 0; i < list.size(); i++) {
+ Map<String, Object> map1 = list.get(i);
+ if (map1.containsKey(field)) {
+ map1.put(field, map);
+ break;
+ }
+ }
+ if (list.stream().noneMatch(o -> o.containsKey(field))) {
+ Map<String, Object> objectObjectHashMap = Maps.newHashMap();
+ objectObjectHashMap.put(field, map);
+ list.add(objectObjectHashMap);
+ }
+ HazelcastInstanceMapUtil.put(id, queryCache);
+ } finally {
+ HazelcastInstanceMapUtil.retrieveMap().unlock(id);
+ }
+ }
+
+ private List<Map<String, Object>> sortDataAndSetMaxSize(List<Map<String, Object>> data, String sortElement, String sortType, boolean isAsc) {
+ if (DataTypeMapping.INT.equalsIgnoreCase(sortType) || DataTypeMapping.LONG.equalsIgnoreCase(sortType)) {
+ data.sort(Comparator.comparing(o -> Long.valueOf(StringUtil.isEmpty(o.get(sortElement)) ? Long.MIN_VALUE + "" : o.get(sortElement).toString())));
+ } else if (DataTypeMapping.FLOAT.equalsIgnoreCase(sortType) || DataTypeMapping.DOUBLE.equalsIgnoreCase(sortType)) {
+ data.sort(Comparator.comparing(o -> Double.valueOf(StringUtil.isEmpty(o.get(sortElement)) ? Long.MIN_VALUE + "" : o.get(sortElement).toString())));
+ } else {
+ data.sort(Comparator.comparing(o -> String.valueOf(o.get(sortElement))));
+ }
+ if (!isAsc) {
+ Collections.reverse(data);
+ }
+ return data.size() > engineConfigSource.getMaxCacheNum() ? data.subList(0, engineConfigSource.getMaxCacheNum()) : data;
+ }
+
+ private List<Map<String, Object>> mergeData(List<Map<String, Object>> data1, List<Map<String, Object>> date2, List<String> dimensions, Map<String, String> metrics) {
+ Collection<Map<String, Object>> data = CollectionUtil.addAll(data1, date2);
+ Map<String, Map<String, Object>> result = Maps.newHashMap();
+ for (Map<String, Object> datum : data) {
+ StringBuilder key = new StringBuilder();
+ dimensions.forEach(k -> key.append(datum.get(k)));
+ if (!result.containsKey(key.toString())) {
+ result.put(key.toString(), datum);
+ continue;
+ }
+ Map<String, Object> item = result.get(key.toString());
+ for (String k : metrics.keySet()) {
+ String action = metrics.get(k);
+ Object o1 = item.get(k);
+ Object o2 = datum.get(k);
+ if (StringUtil.isEmpty(o1) || StringUtil.isEmpty(o2)) {
+ item.put(k, (StringUtil.isEmpty(o1) && StringUtil.isEmpty(o2)) ? null : StringUtil.isEmpty(o1) ? o2 : o1);
+ continue;
+ }
+ Number number1 = NumberUtil.parseNumber(o1.toString());
+ Number number2 = NumberUtil.parseNumber(o2.toString());
+ if (action.equals("sum")) {
+ item.put(k, NumberUtil.add(number1, number2));
+ continue;
+ }
+ if (NumberUtil.isDouble(o1.toString()) || NumberUtil.isDouble(o2.toString())) {
+ double v1 = Double.parseDouble(number1.toString());
+ double v2 = Double.parseDouble(number2.toString());
+ switch (action) {
+ case "max":
+ item.put(k, NumberUtil.max(v1, v2));
+ break;
+ case "min":
+ item.put(k, NumberUtil.min(v1, v2));
+ break;
+ case "avg":
+ item.put(k, NumberUtil.div(NumberUtil.add(v1, v2), 2));
+ break;
+ }
+ } else if (NumberUtil.isLong(o1.toString()) || NumberUtil.isLong(o2.toString())) {
+ long v1 = Long.parseLong(number1.toString());
+ long v2 = Long.parseLong(number2.toString());
+ switch (action) {
+ case "max":
+ item.put(k, NumberUtil.max(v1, v2));
+ break;
+ case "min":
+ item.put(k, NumberUtil.min(v1, v2));
+ break;
+ case "avg":
+ item.put(k, NumberUtil.div(NumberUtil.add(v1, v2), 2));
+ break;
+ }
+ } else {
+ log.error("task merge data error, data is: {}, {}", data1, date2);
+ throw new BusinessException("task merge data error.");
+ }
+ }
+ }
+ return new ArrayList<>(result.values());
+ }
+
private boolean isCancel(String id) {
- QueryProfile build = QueryProfile.builder().query(String.format(Objects.requireNonNull(env.getProperty("FIELD_RESULT")), JobConfig.DETAIL, JobConfig.LAST_QUERY_TIME, id)).
- option(QueryOptionEnum.REAL_TIME.getValue()).build();
- BaseResult baseResult = queryService.executeQuery(build);
+ QueryCache queryCache = HazelcastInstanceMapUtil.get(id);
+ if (queryCache == null) {
+ log.info("Can't find the cache, job id is: {}", id);
+ return true;
+ }
+ long currentTime = System.currentTimeMillis();
+ if (currentTime - queryCache.getLatestQueryTimeMs() > jobCfg.getInteractiveTimeout()) {
+ queryCache.getBaseResult().getJob().put(JobConfig.IS_CANCELED, true);
+ HazelcastInstanceMapUtil.put(id, queryCache);
+ log.info("Interactive timeout, job id is: {}", id);
+ return true;
+ }
+ return false;
+ }
+
+ @Deprecated
+ private boolean isCancelLongTerm(String id) {
+ SQLQueryContext build = SQLQueryContext.builder().originalSQL(String.format(Objects.requireNonNull(env.getProperty("FIELD_RESULT")), JobConfig.DETAIL, JobConfig.LAST_QUERY_TIME, id)).
+ option(QueryOption.REAL_TIME.getValue()).build();
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(build);
if (baseResult.isSuccess() && (StringUtil.isNotEmpty(baseResult.getData()))) {
List<Map<String, Object>> list = (List<Map<String, Object>>) baseResult.getData();
long statTime = Long.parseLong(list.get(0).get("value").toString());
@@ -281,10 +734,36 @@ public class JobExecuteService implements EnvironmentAware {
return false;
}
+ @Deprecated
+ private void waitAsyncResultAndUpdateProcessLongTerm(JobInfo jobInfo, List<Future<Boolean>> taskList) {
+ double taskCompletedProcess = 0;
+ while (true) {
+ if (jobInfo.isTimeout()) {
+ log.error("execute job timeout: job-{}", jobInfo);
+ executeUpdate(String.format(Objects.requireNonNull(env.getProperty("JOB_UPDATE_FIELD_VALUE")), JobConfig.DETAIL, JobConfig.IS_FAILED, jobInfo.getId(), true));
+ taskList.forEach(o -> o.cancel(true));
+ break;
+ }
+ long completedTaskCount = taskList.stream().filter(Future::isDone).count();
+ DecimalFormat format = getDecimalFormat();
+
+ double jobCompletedProcess = Double.parseDouble(format.format(jobInfo.getCurrentTimes() * 1.0 / jobInfo.getTotalTimes() - ((1 - completedTaskCount * 1.0 / taskList.size()) / jobInfo.getTotalTimes())));
+ if (jobCompletedProcess - taskCompletedProcess >= 0.1 && jobCompletedProcess < 1) {
+ taskCompletedProcess = jobCompletedProcess;
+ execute(String.format(Objects.requireNonNull(env.getProperty("JOB_UPDATE_PROGRESS")),
+ jobInfo.getId(), false, jobCompletedProcess, System.currentTimeMillis() - jobInfo.startTime));
+ }
+ if (completedTaskCount >= taskList.size()) {
+ break;
+ }
+ }
+ }
+
+
private Map<String, Long> getCount(String logType, String filter, String totalMetric) {
String sql = String.format(Objects.requireNonNull(env.getProperty("JOB_LOG_COUNT")),
totalMetric, logType, StrUtil.isBlankIfStr(filter) ? "" : "WHERE ".concat(filter));
- BaseResult baseResult = queryService.executeQuery(QueryProfile.builder().query(sql).build());
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(SQLQueryContext.builder().originalSQL(sql).build());
if (!baseResult.isSuccess()) {
log.error("Job-get log count failed, message: {}", baseResult.getMessage());
throw new QGWBusinessException(baseResult.getStatus(), baseResult.getCode(), "Job-get log count failed, message: " + baseResult.getMessage());
@@ -296,7 +775,7 @@ public class JobExecuteService implements EnvironmentAware {
return map;
}
- private DecimalFormat getDecimalFormat() {
+ private static DecimalFormat getDecimalFormat() {
DecimalFormat format = new DecimalFormat("#0.####");
format.applyPattern("0.0000");
format.setRoundingMode(RoundingMode.FLOOR);
@@ -330,7 +809,7 @@ public class JobExecuteService implements EnvironmentAware {
private PlainSelect getWhereOuter(String sql) throws JSQLParserException {
Select parse = (Select) CCJSqlParserUtil.parse(sql);
- return (PlainSelect) parse.getSelectBody();
+ return (PlainSelect) parse.getSelectBody();
}
private Interval parseInterval(List<Expression> list, Interval interval, String partitionKey) {
@@ -382,7 +861,7 @@ public class JobExecuteService implements EnvironmentAware {
}
private long getCriticalValue(String str) {
- return Long.parseLong(SQLFunctionUtil.getQueryValue("toUnixTimestamp(" + str + ")", DBTypeEnum.CLICKHOUSE.getValue()));
+ return Long.parseLong(SQLFunctionUtil.getQueryValue("toUnixTimestamp(" + str + ")", DBEngineType.CLICKHOUSE.getValue()));
}
@Data
@@ -398,7 +877,7 @@ public class JobExecuteService implements EnvironmentAware {
}
@Data
- class JobInfo {
+ class JobInfo implements Serializable {
private String id;
private long startTime = System.currentTimeMillis();
private long count = 0;
@@ -414,8 +893,48 @@ public class JobExecuteService implements EnvironmentAware {
}
}
- @Override
+ @Autowired
public void setEnvironment(Environment environment) {
this.env = environment;
}
+
+ @Autowired
+ public void setSqlSyncQueryService(SQLSyncQueryService sqlSyncQueryService) {
+ this.sqlSyncQueryService = sqlSyncQueryService;
+ }
+
+ @Autowired
+ public void setTaskExecuteService(TaskExecuteService taskExecuteService) {
+ this.taskExecuteService = taskExecuteService;
+ }
+
+ @Autowired
+ public void setDatabaseService(DatabaseService databaseService) {
+ this.databaseService = databaseService;
+ }
+
+ @Autowired
+ public void setDslService(DSLService dslService) {
+ this.dslService = dslService;
+ }
+
+ @Autowired
+ public void setPacketCombineDslService(PacketCombineDslService packetCombineDslService) {
+ this.packetCombineDslService = packetCombineDslService;
+ }
+
+ @Autowired
+ public void setTrafficSpectrumDslService(TrafficSpectrumDslService trafficSpectrumDslService) {
+ this.trafficSpectrumDslService = trafficSpectrumDslService;
+ }
+
+ @Autowired
+ public void setEngineConfigSource(EngineConfigSource engineConfigSource) {
+ this.engineConfigSource = engineConfigSource;
+ }
+
+ @Autowired
+ public void setJobCfg(JobConfig jobCfg) {
+ this.jobCfg = jobCfg;
+ }
}
diff --git a/src/main/java/com/mesalab/services/service/impl/JobServiceImp.java b/src/main/java/com/mesalab/services/service/impl/JobServiceImpl.java
index 6a2c0b7a..8f45d0f1 100644
--- a/src/main/java/com/mesalab/services/service/impl/JobServiceImp.java
+++ b/src/main/java/com/mesalab/services/service/impl/JobServiceImpl.java
@@ -1,6 +1,7 @@
package com.mesalab.services.service.impl;
import cn.hutool.core.collection.CollectionUtil;
+import cn.hutool.core.date.DatePattern;
import cn.hutool.core.util.BooleanUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.crypto.digest.DigestUtil;
@@ -8,6 +9,7 @@ import cn.hutool.json.JSONUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSON;
+import com.geedgenetworks.utils.DateUtils;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.jfinal.plugin.activerecord.Db;
@@ -19,15 +21,12 @@ import com.mesalab.common.exception.BusinessException;
import com.mesalab.common.utils.SavedQueryResultUtils;
import com.mesalab.common.utils.sqlparser.SQLFunctionUtil;
import com.mesalab.common.utils.sqlparser.SQLHelper;
-import com.mesalab.qgw.constant.QGWMessageConst;
import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
-import com.mesalab.qgw.model.basic.QueryProfile;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.SQLQueryContext;
import com.mesalab.qgw.model.basic.HBaseAPISource;
-import com.mesalab.qgw.service.QueryService;
-import com.mesalab.qgw.service.MetadataService;
-import com.mesalab.services.common.enums.MetricFunction;
-import com.mesalab.services.common.enums.MetricType;
+import com.mesalab.qgw.service.SQLSyncQueryService;
+import com.mesalab.qgw.service.DatabaseService;
import com.mesalab.services.configuration.JobConfig;
import com.mesalab.services.service.JobService;
import com.mesalab.services.common.property.SqlPropertySourceFactory;
@@ -60,13 +59,13 @@ import java.util.stream.Collectors;
*/
@Service("jobService")
@PropertySource(value = "classpath:job-sql-template.sql", factory = SqlPropertySourceFactory.class)
-public class JobServiceImp implements JobService, EnvironmentAware {
+public class JobServiceImpl implements JobService, EnvironmentAware {
private static final Log log = LogFactory.get();
private static final String URL_PREFIX = "jdbc:phoenix:";
@Autowired
- QueryService queryService;
+ SQLSyncQueryService sqlSyncQueryService;
@Autowired
- MetadataService metadataService;
+ DatabaseService databaseService;
@Autowired
JobExecuteService jobExecuteService;
@Autowired
@@ -103,24 +102,22 @@ public class JobServiceImp implements JobService, EnvironmentAware {
@Override
public BaseResult commitAdHocQuery(HashMap<String, Object> reqBody) {
validateAdHocQuery(reqBody);
- String id = DigestUtil.md5Hex16(String.valueOf(reqBody.get(JobConfig.KEY_QUERY_TYPE)).toLowerCase())
- .concat(JobConfig.DELIMIT)
- .concat(DigestUtil.md5Hex16(String.valueOf(reqBody)));
+ String id = DigestUtil.md5Hex(String.valueOf(reqBody));
String queryType = String.valueOf(reqBody.get(JobConfig.KEY_QUERY_TYPE));
if (isExist(id)) {
log.info("Hoc Job already exist, ID is {}, params is {}", id, JSON.toJSONString(reqBody));
- return BaseResultGenerator.successCreate(buildJobResource(id, queryType));
+ return BaseResultGenerator.successCreate(buildJobResource(id));
}
log.info("Add Hoc Job, ID is {}, params is {}", id, JSON.toJSONString(reqBody));
Map<String, Object> property = Maps.newLinkedHashMap();
if (JobConfig.FIELD_DISCOVERY.equals(queryType)) {
initFieldDiscoveryJob(id, reqBody, property);
- jobExecuteService.addExecutorFieldDiscovery(id, reqBody);
+ //jobExecuteService.addExecutorFieldDiscovery(id, reqBody);
} else if (JobConfig.STATISTICS.equals(queryType)) {
initStatisticsJob(id, reqBody, property);
jobExecuteService.addExecutorStatistics(id, reqBody, property);
}
- return BaseResultGenerator.successCreate(buildJobResource(id, queryType));
+ return BaseResultGenerator.successCreate(buildJobResource(id));
}
private void initStatisticsJob(String id, HashMap<String, Object> reqBody, Map<String, Object> property) {
@@ -151,39 +148,31 @@ public class JobServiceImp implements JobService, EnvironmentAware {
validateReport(map);
String sql = JSONUtil.quote(map.get(JobConfig.KEY_CUSTOM_SAVED_QUERY_SQL).toString(), false).replace("'", "\\'");
String id = getSameJob(sql);
- String queryType = String.valueOf(map.get(JobConfig.KEY_QUERY_TYPE));
if (StringUtil.isNotBlank(id)) {
- return BaseResultGenerator.successCreate(buildJobResource(id, queryType));
+ return BaseResultGenerator.successCreate(buildJobResource(id));
}
map.put("UUID", System.currentTimeMillis());
- id = DigestUtil.md5Hex16(JobConfig.SAVED_QUERY)
- .concat(JobConfig.DELIMIT)
- .concat(DigestUtil.md5Hex16(String.valueOf(map)));
+ id = DigestUtil.md5Hex(String.valueOf(map));
long currentTime = System.currentTimeMillis() / 1000;
Db.update(String.format(Objects.requireNonNull(env.getProperty("SAVED_QUERY_JOB_INIT")), id, sql, currentTime, currentTime));
- return BaseResultGenerator.successCreate(buildJobResource(id, queryType));
+ return BaseResultGenerator.successCreate(buildJobResource(id));
}
@Override
public BaseResult getSavedQueryStatus(String jobId) {
- Map<String, Object> job = buildJobStatus(jobId);
- return BaseResultGenerator.success("ok", Lists.newArrayList(job));
- }
-
- private Map<String, Object> buildJobStatus(String jobId) {
- List<Object> query = Db.query(String.format(Objects.requireNonNull(env.getProperty("SAVED_QUERY_JOB_STATUS")), jobId, 1));
+ List<Record> query = Db.find(String.format(Objects.requireNonNull(env.getProperty("SAVED_QUERY_JOB_STATUS")), jobId, 1));
if (CollectionUtil.isEmpty(query)) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(), "No job found with the given ID");
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(), "Not Find Matching Job.");
}
- return buildJobStatus(Arrays.asList((Object[]) query.get(0)));
+ Map<String, Object> jobInfo = buildJobStatus(query.get(0).getColumns());
+ return BaseResultGenerator.success(null, jobInfo, null, null, null);
}
@Override
public BaseResult getSavedQueryResult(String jobId) {
BaseResult statusBaseResult = getSavedQueryStatus(jobId);
if (statusBaseResult.isSuccess()) {
- List<HashMap<String, Object>> statusData = (List<HashMap<String, Object>>) statusBaseResult.getData();
- HashMap<String, Object> jobInfo = statusData.get(0);
+ Map<String, Object> jobInfo = (Map<String, Object>) statusBaseResult.getJob();
boolean isDone = Boolean.parseBoolean(jobInfo.get(JobConfig.IS_DONE).toString());
boolean isCanceled = Boolean.parseBoolean(jobInfo.get(JobConfig.IS_CANCELED).toString());
boolean isFailed = Boolean.parseBoolean(jobInfo.get(JobConfig.IS_FAILED).toString());
@@ -191,26 +180,26 @@ public class JobServiceImp implements JobService, EnvironmentAware {
List<BaseResult> resultList = buildBaseResults(Lists.newArrayList(jobId), Lists.newArrayList(jobInfo));
return resultList.get(0);
} else {
- return BaseResultGenerator.success(null, jobInfo, null, null);
+ return BaseResultGenerator.success(null, jobInfo, statusBaseResult.getOutputMode(), null, null);
}
}
- throw new BusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), "Get job status error: " + statusBaseResult.getMessage()));
+ throw new BusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Get job status error: " + statusBaseResult.getMessage()));
}
private List<BaseResult> buildBaseResults(List<String> ids, List<Map<String, Object>> jobStatus) {
BaseResult<Object> resultBaseResult = SavedQueryResultUtils.getByRowKey(ids);
if (!resultBaseResult.isSuccess()) {
- throw new BusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), "Get job result error: " + resultBaseResult.getMessage()));
+ throw new BusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Get job result error: " + resultBaseResult.getMessage()));
}
List<Map<String, Object>> resultData = (List<Map<String, Object>>) resultBaseResult.getData();
List<BaseResult> resultList = Lists.newArrayList();
for (Map<String, Object> data : resultData) {
BaseResult result;
Map map = JSON.parseObject((String) data.get("result"), Map.class);
- result = BaseResultGenerator.generate(Integer.parseInt(String.valueOf(map.get("status"))), ResultCodeEnum.SUCCESS.getCode(), String.valueOf(map.get("message")),
- map.get("data"), map.get("meta"), (Map<String, Object>) (map.get(JobConfig.STATISTICS)), QueryFormatEnum.JSON.getValue());
+ result = BaseResultGenerator.generate(Integer.parseInt(String.valueOf(map.get("status"))), ResultCodeEnum.SUCCESS.getCode(), true, String.valueOf(map.get("message")),
+ map.get("data"), map.get("meta"), (Map<String, Object>) (map.get(JobConfig.STATISTICS)), OutputMode.JSON.getValue());
String rowKey = String.valueOf(data.get("ROWKEY"));
jobStatus.forEach(o -> {
if (StrUtil.equals(rowKey, String.valueOf(o.get(JobConfig.JOB_ID)))) {
@@ -225,23 +214,23 @@ public class JobServiceImp implements JobService, EnvironmentAware {
@Override
public BaseResult cancelSavedQuery(String jobId) {
Db.update(String.format(Objects.requireNonNull(env.getProperty("SAVED_QUERY_JOB_UPDATE")), "is_valid", 0, jobId));
- return BaseResultGenerator.success(ResultStatusEnum.ACCEPTED.getCode(), "ok", null);
+ return BaseResultGenerator.success(HttpStatusCodeEnum.ACCEPTED.getCode(), "ok", null);
}
@Override
public List<Map<String, Object>> batchSavedQueryStatus(List<String> ids) {
- List<Object> data = Db.query(String.format(Objects.requireNonNull(env.getProperty("SAVED_QUERY_JOB_STATUS")), StrUtil.join("', '", ids), ids.size()));
+ List<Record> data = Db.find(String.format(Objects.requireNonNull(env.getProperty("SAVED_QUERY_JOB_STATUS")), StrUtil.join("', '", ids), ids.size()));
List<Map<String, Object>> result = Lists.newArrayList();
data.forEach(x -> {
- Map<String, Object> job = buildJobStatus(Arrays.asList((Object[]) x));
+ Map<String, Object> job = buildJobStatus(x.getColumns());
result.add(job);
});
if (result.size() != ids.size()) {
List<String> resultIds = Lists.newArrayList();
result.forEach(o -> resultIds.add(String.valueOf(o.get(JobConfig.JOB_ID))));
List<String> collect = ids.stream().filter(item -> !resultIds.contains(item)).collect(Collectors.toList());
- throw new BusinessException(ResultStatusEnum.NOT_FOUND.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), "Not Find Jobs: " + StrUtil.join(",", collect)));
+ throw new BusinessException(HttpStatusCodeEnum.NOT_FOUND.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "Not Find Jobs: " + StrUtil.join(",", collect)));
}
return result;
}
@@ -272,8 +261,8 @@ public class JobServiceImp implements JobService, EnvironmentAware {
}
return resultList;
}
- throw new BusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), "Get job status error: " + statusList));
+ throw new BusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "Get job status error: " + statusList));
}
@Override
@@ -286,17 +275,22 @@ public class JobServiceImp implements JobService, EnvironmentAware {
return buildLongTermResult(id);
}
-
- private Map<String, Object> buildJobStatus(List<Object> list) {
+ private Map<String, Object> buildJobStatus(Map<String, Object> data) {
Map<String, Object> job = Maps.newLinkedHashMap();
- job.put(JobConfig.JOB_ID, list.get(8));
- job.put("is_done", "DONE".equalsIgnoreCase(String.valueOf(list.get(0))));
- job.put("done_progress", Math.min(Double.parseDouble(String.valueOf(list.get(1))), 1));
- job.put("is_canceled", !BooleanUtil.toBoolean(String.valueOf(list.get(3))));
- job.put("is_failed", BooleanUtil.toBoolean(String.valueOf(list.get(2))));
- job.put("start_time", list.get(4));
- job.put("end_time", list.get(5));
- job.put("count", list.get(6));
+ job.put(JobConfig.JOB_ID, data.get("job_id"));
+ job.put(JobConfig.IS_DONE, "DONE".equalsIgnoreCase(String.valueOf(data.get("state"))));
+ job.put(JobConfig.DONE_PROGRESS, Math.min(Double.parseDouble(String.valueOf(data.get("done_progress"))), 1.0));
+ job.put(JobConfig.IS_CANCELED, !BooleanUtil.toBoolean(String.valueOf(data.get("is_valid"))));
+ job.put(JobConfig.IS_FAILED, BooleanUtil.toBoolean(String.valueOf(data.get("is_failed"))));
+ job.put(JobConfig.REASON, data.get("result_message"));
+ job.put(JobConfig.START_TIME, Optional.ofNullable(data.get("start_time")).map(s -> DateUtils.convertTimestampToString(Long.parseLong(data.get("start_time").toString()), DatePattern.UTC_PATTERN))
+ .orElse(null));
+ job.put(JobConfig.END_TIME, Optional.ofNullable(data.get("end_time")).map(s -> DateUtils.convertTimestampToString(Long.parseLong(data.get("end_time").toString()), DatePattern.UTC_PATTERN))
+ .orElse(null));
+ Map<String, Object> links = Maps.newLinkedHashMap();
+ links.put(JobConfig.LINKS_STATUS, "/v1/query/job/" + job.get(JobConfig.JOB_ID));
+ links.put(JobConfig.LINKS_RESULT, "/v1/query/job/" + job.get(JobConfig.JOB_ID) + "/result");
+ job.put(JobConfig.LINKS, links);
return job;
}
@@ -337,7 +331,7 @@ public class JobServiceImp implements JobService, EnvironmentAware {
job.put(JobConfig.DONE_PROGRESS, StringUtil.isEmpty(map.get(JobConfig.DONE_PROGRESS)) ? 0 : map.get(JobConfig.DONE_PROGRESS));
job.put(JobConfig.IS_CANCELED, StringUtil.isEmpty(map.get(JobConfig.IS_CANCELED)) ? false : map.get(JobConfig.IS_CANCELED));
job.put(JobConfig.IS_FAILED, StringUtil.isEmpty(map.get(JobConfig.IS_FAILED)) ? false : map.get(JobConfig.IS_FAILED));
- return BaseResultGenerator.success(statistics, job, meta, resultList);
+ return BaseResultGenerator.success(statistics, job, null, meta, resultList);
}
private BaseResult<List<Map<String, Object>>> buildResultFieldDiscovery(String id) {
@@ -375,15 +369,15 @@ public class JobServiceImp implements JobService, EnvironmentAware {
job.put(JobConfig.DONE_PROGRESS, StringUtil.isEmpty(map.get(JobConfig.DONE_PROGRESS)) ? 0 : map.get(JobConfig.DONE_PROGRESS));
job.put(JobConfig.IS_CANCELED, StringUtil.isEmpty(map.get(JobConfig.IS_CANCELED)) ? false : map.get(JobConfig.IS_CANCELED));
job.put(JobConfig.IS_FAILED, StringUtil.isEmpty(map.get(JobConfig.IS_FAILED)) ? false : map.get(JobConfig.IS_FAILED));
- return BaseResultGenerator.success(statistics, job, null, resultList);
+ return BaseResultGenerator.success(statistics, job, OutputMode.JSON.getValue(), null, resultList);
}
private boolean isExist(String id) {
String sql = String.format(Objects.requireNonNull(env.getProperty("JOB_DETAIL")), id);
- BaseResult baseResult = queryService.executeQuery(QueryProfile.builder().query(sql).build());
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(SQLQueryContext.builder().originalSQL(sql).build());
if (!baseResult.isSuccess()) {
log.error("Query job status error: {}", baseResult.getMessage());
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), "Query job status error: " + baseResult.getMessage());
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), "Query job status error: " + baseResult.getMessage());
}
List<Map<String, Object>> data = (List<Map<String, Object>>) baseResult.getData();
Map<String, Object> map;
@@ -403,9 +397,9 @@ public class JobServiceImp implements JobService, EnvironmentAware {
property.put(JobConfig.JOB_PROPERTY_TYPE, JobConfig.FIELD_DISCOVERY);
List<String> fields = (List<String>) body.get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_FIELDS);
List<String> interim = Lists.newArrayList(fields);
- BaseResult baseResult = queryService.executeQuery(QueryProfile.builder().query(env.getProperty("JOB_GET_FIELD_DISCOVERY_ALL_COLUMN")).option(QueryOptionEnum.REAL_TIME.getValue()).build());
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(SQLQueryContext.builder().originalSQL(env.getProperty("JOB_GET_FIELD_DISCOVERY_ALL_COLUMN")).option(QueryOption.REAL_TIME.getValue()).build());
if (!baseResult.isSuccess()) {
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
"Init job error: get field_discovery all column error: ".concat(baseResult.getMessage()));
}
List<Map<String, Object>> meta = (List<Map<String, Object>>) baseResult.getMeta();
@@ -526,8 +520,8 @@ public class JobServiceImp implements JobService, EnvironmentAware {
property.put(JobConfig.JOB_PROPERTY_K_OPTION, JobConfig.JOB_PROPERTY_V_OPTION_TOP);
}
} catch (JSQLParserException | RuntimeException e) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), "long_term job init error: ".concat(e.getMessage())));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "long_term job init error: ".concat(e.getMessage())));
}
}
@@ -569,32 +563,32 @@ public class JobServiceImp implements JobService, EnvironmentAware {
private PlainSelect checkParam(String sql, net.sf.jsqlparser.statement.Statement statement) {
SelectBody selectBody;
if (!(statement instanceof Select) || !((selectBody = ((Select) statement).getSelectBody()) instanceof PlainSelect)) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), "long_term job init error, not support, SQL is: " + sql));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), "long_term job init error, not support, SQL is: " + sql));
}
PlainSelect plainSelect = (PlainSelect) selectBody;
PlainSelect plainSelectInner = SQLHelper.getSelectInner(sql);
if (StringUtil.isEmpty(plainSelect.getGroupBy()) && StringUtil.isEmpty(plainSelectInner.getGroupBy())) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), "long_term job init error, not support non-aggregate SQL is: " + sql));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), "long_term job init error, not support non-aggregate SQL is: " + sql));
}
return plainSelect;
}
private List<Map<String, Object>> validationData(String id) {
- BaseResult baseResult = queryService.executeQuery(
- QueryProfile.builder().option(QueryOptionEnum.REAL_TIME.getValue())
- .query(String.format(Objects.requireNonNull(env.getProperty("JOB_RESULT")), id))
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(
+ SQLQueryContext.builder().option(QueryOption.REAL_TIME.getValue())
+ .originalSQL(String.format(Objects.requireNonNull(env.getProperty("JOB_RESULT")), id))
.build());
if (!baseResult.isSuccess()) {
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),
- String.format(ResultCodeEnum.UNKNOWN_EXCEPTION.getMessage(), "get Job Result Error: ".concat(baseResult.getMessage())));
+ throw new QGWBusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.UNKNOWN_EXCEPTION.getMessage(), "get Job Result Error: ".concat(baseResult.getMessage())));
}
List<Map<String, Object>> data = (List<Map<String, Object>>) baseResult.getData();
if (StringUtil.isEmpty(data) || StringUtil.isEmpty(data.get(0).get(JobConfig.JOB_PROPERTY))) {
- throw new QGWBusinessException(ResultStatusEnum.NOT_FOUND.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), "Not Found This Job: ".concat(id)));
+ throw new QGWBusinessException(HttpStatusCodeEnum.NOT_FOUND.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), "Not Found This Job: ".concat(id)));
}
return data;
}
@@ -606,36 +600,24 @@ public class JobServiceImp implements JobService, EnvironmentAware {
private void validateAdHocQuery(HashMap<String, Object> body) {
Object type = body.get(JobConfig.KEY_QUERY_TYPE);
if (JobConfig.FIELD_DISCOVERY.equals(type)) {
- validCustomParamsOfFieldDiscovery(body);
- List<String> fields = (List<String>) body.get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_FIELDS);
- body.put(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_FIELDS, fields.stream().distinct().collect(Collectors.toList()));
+ //validCustomParamsOfFieldDiscovery(body);
+ //List<String> fields = (List<String>) body.get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_FIELDS);
+ //body.put(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_FIELDS, fields.stream().distinct().collect(Collectors.toList()));
syntaxValidationFieldDiscovery(body);
} else if (JobConfig.STATISTICS.equals(type)) {
syntaxValidationStatistics(body);
}
}
- private void validCustomParamsOfFieldDiscovery(HashMap<String, Object> body) {
- Object metric = body.get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_METRIC);
- if (StringUtil.isEmpty(metric)) {
- return;
- }
- boolean validMetric = MetricType.isValid(String.valueOf(metric));
- boolean validFn = MetricFunction.isValid(String.valueOf(body.get(JobConfig.KEY_CUSTOM_FIELD_DISCOVERY_METRIC_FN)));
- if (!validMetric || !validFn) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.QUERY_CUSTOM_FIELD_DISCOVERY_ERROR));
- }
- }
private void validateReport(HashMap<String, Object> map) {
- QueryProfile queryProfile = QueryProfile.builder().query(String.valueOf(map.get(JobConfig.KEY_CUSTOM_SAVED_QUERY_SQL)))
- .option(QueryOptionEnum.SYNTAX_PARSE.getValue())
+ SQLQueryContext queryProfile = SQLQueryContext.builder().originalSQL(String.valueOf(map.get(JobConfig.KEY_CUSTOM_SAVED_QUERY_SQL)))
+ .option(QueryOption.SYNTAX_PARSE.getValue())
.build();
- BaseResult baseResult = queryService.executeQuery(queryProfile);
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(queryProfile);
if (!baseResult.isSuccess()) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), baseResult.getMessage()));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), baseResult.getMessage()));
}
}
@@ -652,20 +634,20 @@ public class JobServiceImp implements JobService, EnvironmentAware {
List field = (List) map.get("custom.field_discovery.fields");
Object filter = map.get("custom.field_discovery.filter");
String validationSql = String.format(syntaxValidation, String.join(",", field), map.get("query.data_source"), StringUtil.isEmpty(filter) ? " 1 = 1" : filter);
- QueryProfile queryProfile = QueryProfile.builder().query(validationSql).option(QueryOptionEnum.SYNTAX_VALIDATION.getValue()).build();
- BaseResult baseResult = queryService.executeQuery(queryProfile);
+ SQLQueryContext queryProfile = SQLQueryContext.builder().originalSQL(validationSql).option(QueryOption.SYNTAX_VALIDATION.getValue()).build();
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(queryProfile);
if (!baseResult.isSuccess()) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), baseResult.getMessage()));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), baseResult.getMessage()));
}
}
private void syntaxValidationStatistics(HashMap<String, Object> body) {
String sql = String.valueOf(body.get(JobConfig.KEY_CUSTOM_STATISTICS_SQL));
- BaseResult baseResult = queryService.executeQuery(QueryProfile.builder().query(sql).option(QueryOptionEnum.SYNTAX_VALIDATION.getValue()).build());
+ BaseResult baseResult = sqlSyncQueryService.executeQuery(SQLQueryContext.builder().originalSQL(sql).option(QueryOption.SYNTAX_VALIDATION.getValue()).build());
if (!baseResult.isSuccess()) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), baseResult.getMessage()));
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), baseResult.getMessage()));
}
}
@@ -674,7 +656,7 @@ public class JobServiceImp implements JobService, EnvironmentAware {
java.sql.Statement statement = null;
int i;
try {
- log.info("{} engine execute update: {}", DBTypeEnum.HBASE.getValue(), sql);
+ log.info("{} engine execute update: {}", DBEngineType.HBASE.getValue(), sql);
PROPERTIES.setProperty("hbase.rpc.timeout", hBaseAPISource.getRpcTimeout());
PROPERTIES.setProperty("hbase.client.scanner.timeout.period", hBaseAPISource.getRpcTimeout());
PROPERTIES.setProperty("phoenix.query.timeoutMs", hBaseAPISource.getRpcTimeout());
@@ -684,18 +666,17 @@ public class JobServiceImp implements JobService, EnvironmentAware {
i = statement.executeUpdate(sql);
conn.commit();
} catch (RuntimeException | SQLException e) {
- log.error("{} engine error: {}", DBTypeEnum.HBASE.getValue(), e);
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(), e.getMessage());
+ log.error("{} engine error: {}", DBEngineType.HBASE.getValue(), e);
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(), e.getMessage());
} finally {
close(statement, conn);
}
return i == 1;
}
- private Map<String, Object> buildJobResource(String id, String queryType) {
+ private Map<String, Object> buildJobResource(String id) {
Map<String, Object> result = Maps.newHashMap();
result.put(JobConfig.JOB_ID, id);
- result.put(JobConfig.JOB_RESOURCE_PATH, id.concat("/").concat(queryType));
return result;
}
@@ -703,7 +684,7 @@ public class JobServiceImp implements JobService, EnvironmentAware {
Connection conn = null;
java.sql.Statement statement = null;
try {
- log.info("DB Engine is :{}, Execute is: {}", DBTypeEnum.HBASE.getValue(), sql);
+ log.info("DB Engine is :{}, Execute is: {}", DBEngineType.HBASE.getValue(), sql);
PROPERTIES.setProperty("hbase.rpc.timeout", hBaseAPISource.getRpcTimeout());
PROPERTIES.setProperty("hbase.client.scanner.timeout.period", hBaseAPISource.getRpcTimeout());
PROPERTIES.setProperty("phoenix.query.timeoutMs", hBaseAPISource.getRpcTimeout());
@@ -713,9 +694,9 @@ public class JobServiceImp implements JobService, EnvironmentAware {
statement.execute(sql);
conn.commit();
} catch (RuntimeException | SQLException e) {
- log.error("DB Engine is :{}, Query is error: {}", DBTypeEnum.HBASE.getValue(), e);
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage()));
+ log.error("DB Engine is :{}, Query is error: {}", DBEngineType.HBASE.getValue(), e);
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage()));
} finally {
diff --git a/src/main/java/com/mesalab/services/service/impl/KBServiceImp.java b/src/main/java/com/mesalab/services/service/impl/KBServiceImp.java
new file mode 100644
index 00000000..465de54d
--- /dev/null
+++ b/src/main/java/com/mesalab/services/service/impl/KBServiceImp.java
@@ -0,0 +1,362 @@
+package com.mesalab.services.service.impl;
+
+import cn.hutool.core.collection.CollectionUtil;
+import cn.hutool.core.map.MapUtil;
+import cn.hutool.core.util.StrUtil;
+import cn.hutool.core.util.XmlUtil;
+import cn.hutool.crypto.digest.DigestUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.geedgenetworks.utils.DateUtils;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.entity.BaseResultGenerator;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.common.exception.BusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.common.utils.RandomUtil;
+import com.mesalab.qgw.model.basic.HttpConfig;
+import com.mesalab.qgw.model.basic.HttpResponseResult;
+import com.mesalab.qgw.service.impl.HttpClientServiceV2;
+import com.mesalab.services.common.entity.KnowledgeBaseRequest;
+import com.mesalab.services.common.entity.KnowledgeConstant;
+import com.mesalab.services.configuration.HosConfig;
+import com.mesalab.services.service.KBService;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+import org.springframework.web.multipart.MultipartFile;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.reflect.Field;
+import java.util.*;
+import java.util.stream.Collectors;
+
+@Service("kbService")
+public class KBServiceImp implements KBService {
+ private static final Log log = LogFactory.get();
+ @Autowired
+ private HosConfig hosConfig;
+ @Autowired
+ private HttpClientServiceV2 httpClientServiceV2;
+ @Autowired
+ private HttpConfig httpConfig;
+ private static final String X_HOS_METADATA_PREFIX = "x-hos-meta-";
+ private static final String INITIAL_VERSION = "1.0";
+ private static final String NAME = "Name";
+ private static final String CATEGORY = "Category";
+ private static final String ORIGIN_URL = "OriginUrl";
+ private static final String GENERATED_TIME = "GeneratedTime";
+ private static final String LAST_UPDATE_TIME = "LastUpdateTime";
+ private static final String FORMAT = "Format";
+ private static final String SHA_256 = "Sha256";
+ private static final String VERSION = "Version";
+ private static final String IS_VALID = "IsValid";
+ private static final String KEY = "Key";
+ private static final String SIZE = "Size";
+ private static final String USER_DEFINED_META = "UserDefinedMeta";
+ private static final String KEY_COUNT = "KeyCount";
+ private static final String CONTENTS = "Contents";
+ private static final String KB_ID = "KbId";
+
+ @Override
+ public BaseResult publishKnowledge(MultipartFile file, KnowledgeBaseRequest request) {
+ List<Map<String, Object>> dataList;
+ if (StrUtil.isBlank(request.getKbId()) && StrUtil.isBlank(request.getVersion())) {
+ request.setKbId(RandomUtil.getUUID());
+ request.setVersion(INITIAL_VERSION);
+ } else {
+ if (StrUtil.isBlankIfStr(request.getVersion())) {
+ request.setVersion(generateLatestVersion(request));
+ }
+ }
+ List<String> modifiedVersionList = Lists.newArrayList();
+ publishFile(file, request);
+ modifiedVersionList.add(request.getVersion());
+ generateLatestFile(file, request);
+ modifiedVersionList.add(KnowledgeConstant.LATEST);
+ String getFileListUrl = hosConfig.getUri().concat("/").concat(hosConfig.getBucket()).concat("?").concat("prefix").concat("=").concat(request.getKbId());
+ dataList = filterResult("version", modifiedVersionList, generateResult(getContentList(getFileListUrl)));
+ return BaseResultGenerator.success(dataList);
+ }
+
+
+ @Override
+ public BaseResult updateKnowledge(MultipartFile file, KnowledgeBaseRequest request) {
+ List<Map<String, Object>> dataList = Lists.newArrayList();
+ Map<String, String> headers = buildHeaderMap(request);
+ publishFile(file, request);
+ dataList.add(generateSingleResult(request, headers));
+ return BaseResultGenerator.success(dataList);
+ }
+
+ @Override
+ public BaseResult deleteKnowledge(KnowledgeBaseRequest request) {
+ List<Map<String, Object>> dataList = Lists.newArrayList();
+ if (StrUtil.isBlankIfStr(request.getVersion())) {
+ String getFileListUrl = hosConfig.getUri().concat("/").concat(hosConfig.getBucket()).concat("?").concat("prefix").concat("=").concat(request.getKbId());
+ dataList = generateResult(getContentList(getFileListUrl));
+ deleteFileById(request, getContentList(getFileListUrl));
+ } else {
+ deleteFile(request);
+ dataList.add(generateSingleResult(request, Maps.newHashMap()));
+ }
+ return BaseResultGenerator.success(dataList);
+ }
+
+ @Override
+ public BaseResult updateStatus(KnowledgeBaseRequest request) {
+ List<Map<String, Object>> dataList = Lists.newArrayList();
+ if (StrUtil.isBlankIfStr(request.getVersion())) {
+ String getFileListUrl = hosConfig.getUri().concat("/").concat(hosConfig.getBucket()).concat("?").concat("prefix").concat("=").concat(request.getKbId());
+ dataList = generateResult(getContentList(getFileListUrl));
+ updateFileStatusById(request, getContentList(getFileListUrl));
+ } else {
+ updateFileStatus(request, buildHeaderMap(request));
+ dataList.add(generateSingleResult(request, buildHeaderMap(request)));
+ }
+ return BaseResultGenerator.success(dataList);
+ }
+
+
+ @Override
+ public BaseResult getList(String kbId, String category) {
+ List<Map<String, Object>> dataList;
+ StringBuilder getListUrl = new StringBuilder();
+ if (StrUtil.isNotBlank(kbId)) {
+ getListUrl.append(hosConfig.getUri()).append("/").append(hosConfig.getBucket()).append("?prefix=").append(kbId).append("_");
+ if (StrUtil.isNotBlank(category)) {
+ getListUrl.append("&").append(X_HOS_METADATA_PREFIX).append("category=").append(category);
+ }
+ dataList = filterResult("kb_id", Arrays.asList(kbId), generateFileListResult(getContentList(getListUrl.toString())));
+ } else {
+ getListUrl.append(hosConfig.getUri()).append("/").append(hosConfig.getBucket());
+ if (StrUtil.isNotBlank(category)) {
+ getListUrl.append("?").append(X_HOS_METADATA_PREFIX).append("category=").append(category);
+ }
+ dataList = generateFileListResult(getContentList(getListUrl.toString()));
+ }
+ return BaseResultGenerator.success(dataList);
+ }
+
+ private void generateLatestFile(MultipartFile file, KnowledgeBaseRequest request) {
+ request.setVersion(KnowledgeConstant.LATEST);
+ String currentDate = DateUtils.getCurrentDate("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
+ request.setOriginUrl(hosConfig.getUri().concat("/").concat(hosConfig.getBucket()).concat("/").concat(request.getKbId().concat("_").concat("latest")));
+ request.setGeneratedTime(StrUtil.isBlank(request.getGeneratedTime()) ? currentDate : request.getGeneratedTime());
+ request.setLastUpdateTime(StrUtil.isBlank(request.getLastUpdateTime()) ? currentDate : request.getLastUpdateTime());
+ publishFile(file, request);
+ }
+
+ private String generateLatestVersion(KnowledgeBaseRequest request) {
+ String getLatestFileUrl = hosConfig.getUri().concat("/").concat(hosConfig.getBucket()).concat("?").concat("prefix").concat("=").concat(request.getKbId());
+ List<Map<String, String>> contentList = getContentList(getLatestFileUrl);
+ int majorMax = 1;
+ int minorMax = 0;
+ if (CollectionUtil.isNotEmpty(contentList)) {
+ for (Map contents : contentList) {
+ Map<String, String> udfMeta = (Map<String, String>) contents.get(USER_DEFINED_META);
+ if (MapUtil.isNotEmpty(udfMeta)) {
+ if (!String.valueOf(udfMeta.get(VERSION)).equals(KnowledgeConstant.LATEST)) {
+ String[] versionParts = udfMeta.get(VERSION).split("\\.");
+ int major = Integer.parseInt(versionParts[0]);
+ int minor = Integer.parseInt(versionParts[1]);
+ majorMax = Math.max(majorMax, major);
+ minorMax = Math.max(minorMax, minor);
+ }
+ }
+ }
+ }
+ return incrementVersion(majorMax + "." + minorMax);
+ }
+
+ private List<Map<String, String>> getContentList(String getLatestFileUrl) {
+ List<Map<String, String>> contentList = Lists.newArrayList();
+ Map<String, String> headers = new HashMap<>();
+ headers.put(KnowledgeConstant.TOKEN, hosConfig.getToken());
+ HttpResponseResult httpResponseResult = httpClientServiceV2.get(getLatestFileUrl, 60000, headers);
+ Map<String, Object> resultMap = XmlUtil.xmlToMap(String.valueOf(httpResponseResult.getResponseBody()));
+ int count = Integer.parseInt(String.valueOf(resultMap.get(KEY_COUNT)));
+ switch (count) {
+ case 0:
+ return contentList;
+ case 1:
+ contentList.add((Map<String, String>) resultMap.get(CONTENTS));
+ break;
+ default:
+ contentList = (List<Map<String, String>>) resultMap.get(CONTENTS);
+ }
+ return contentList;
+ }
+
+
+ private Map<String, String> buildHeaderMap(KnowledgeBaseRequest request) {
+ Map<String, String> headers = Maps.newHashMap();
+ String currentDate = DateUtils.getCurrentDate("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
+ request.setOriginUrl(StrUtil.isBlank(request.getOriginUrl()) ? hosConfig.getUri().concat("/").concat(hosConfig.getBucket()).concat("/").concat(request.getKbId().concat("_").concat(request.getVersion())) : request.getOriginUrl());
+ request.setGeneratedTime(StrUtil.isBlank(request.getGeneratedTime()) ? currentDate : request.getGeneratedTime());
+ request.setLastUpdateTime(StrUtil.isBlank(request.getLastUpdateTime()) ? currentDate : request.getLastUpdateTime());
+ headers.put(KnowledgeConstant.TOKEN, hosConfig.getToken());
+ Field[] declaredFields = request.getClass().getDeclaredFields();
+ for (Field field : declaredFields) {
+ field.setAccessible(true);
+ String fieldName = field.getName();
+ if (!fieldName.equals("kb_id")) {
+ try {
+ Object o = field.get(request);
+ if (o != null) {
+ headers.put(X_HOS_METADATA_PREFIX.concat(StrUtil.toSymbolCase(fieldName, '-')), o.toString());
+ }
+ } catch (IllegalAccessException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+ return headers;
+ }
+
+ private void publishFile(MultipartFile file, KnowledgeBaseRequest request) {
+ String putFileUrl = hosConfig.getUri().concat("/").concat(hosConfig.getBucket()).concat("/").concat(request.getKbId().concat("_").concat(request.getVersion()));
+ try {
+ request.setSha256(DigestUtil.sha256Hex(file.getBytes()));
+ Map<String, String> headers = buildHeaderMap(request);
+ InputStream in = new ByteArrayInputStream(file.getBytes());
+ HttpResponseResult put = httpClientServiceV2.put(putFileUrl, in, httpConfig.getServerResponseTimeOut(), headers);
+ if (HttpStatusCodeEnum.SUCCESS.getCode() != put.getStatusCode()) {
+ log.error("knowledge base file upload failed.");
+ throw new BusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), "knowledge base file upload failed.");
+ }
+ } catch (IOException e) {
+ throw new BusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), e.getMessage());
+ }
+ }
+
+ private void deleteFileById(KnowledgeBaseRequest request, List<Map<String, String>> contentList) {
+ for (Map contents : contentList) {
+ Map<String, String> udfMeta = (Map<String, String>) contents.get(USER_DEFINED_META);
+ request.setVersion(String.valueOf(udfMeta.get(VERSION)));
+ deleteFile(request);
+ }
+ }
+
+ private void deleteFile(KnowledgeBaseRequest request) {
+ String fileName = request.getKbId().concat("_").concat(request.getVersion());
+ String deleteFileUrl = hosConfig.getUri().concat("/").concat(hosConfig.getBucket()).concat("/").concat(fileName);
+ Map<String, String> headers = Maps.newHashMap();
+ headers.put(KnowledgeConstant.TOKEN, hosConfig.getToken());
+ try {
+ httpClientServiceV2.delete(deleteFileUrl, httpConfig.getServerRequestTimeOut(), headers);
+ } catch (RuntimeException e) {
+ log.error("knowledge base file delete error: {}", e.getMessage());
+ throw new BusinessException(e.getMessage());
+ }
+ }
+
+ private void updateFileStatusById(KnowledgeBaseRequest request, List<Map<String, String>> contentList) {
+ for (Map contents : contentList) {
+ Map<String, String> udfMeta = (Map<String, String>) contents.get(USER_DEFINED_META);
+ request.setVersion(String.valueOf(udfMeta.get(VERSION)));
+ updateFileStatus(request, buildHeaderMap(request));
+ }
+ }
+
+ private void updateFileStatus(KnowledgeBaseRequest request, Map<String, String> headers) {
+ String fileName = request.getKbId().concat("_").concat(request.getVersion());
+ String updateStatusFileUrl = hosConfig.getUri().concat("/").concat(hosConfig.getBucket()).concat("/").concat(fileName).concat("?metadata");
+ HttpResponseResult put = httpClientServiceV2.put(updateStatusFileUrl, null, httpConfig.getServerResponseTimeOut(), headers);
+ if (put.getStatusCode() != HttpStatusCodeEnum.SUCCESS.getCode()) {
+ log.error("knowledge base file update status failed.");
+ throw new BusinessException(HttpStatusCodeEnum.SERVER_ERROR.getCode(), CommonErrorCode.UNKNOWN_EXCEPTION.getCode(), "knowledge base file update status failed.");
+ }
+ }
+
+ private List<Map<String, Object>> generateResult(List<Map<String, String>> contentList) {
+ List<Map<String, Object>> dataList = Lists.newArrayList();
+ if (CollectionUtil.isNotEmpty(contentList)) {
+ for (Map contents : contentList) {
+ Map<String, Object> map1 = Maps.newHashMap();
+ Map<String, String> udfMeta = (Map<String, String>) contents.get(USER_DEFINED_META);
+ map1.put("version", String.valueOf(udfMeta.get(VERSION)));
+ map1.put("url", String.valueOf(udfMeta.get(ORIGIN_URL)));
+ map1.put("generated_time", String.valueOf(udfMeta.get(GENERATED_TIME)));
+ map1.put("last_update_time", String.valueOf(udfMeta.get(LAST_UPDATE_TIME)));
+ map1.put("kb_id", String.valueOf(udfMeta.get(KB_ID)));
+ dataList.add(map1);
+ }
+ }
+ return dataList;
+ }
+
+ private Map<String, Object> generateSingleResult(KnowledgeBaseRequest request, Map<String, String> headers) {
+ Map<String, Object> dataMap = Maps.newHashMap();
+ String currentDate = DateUtils.getCurrentDate("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
+ dataMap.put("version", request.getVersion());
+ dataMap.put("url", hosConfig.getUri().concat("/").concat(hosConfig.getBucket()).concat("/").concat(request.getKbId().concat("_").concat(request.getVersion())));
+ dataMap.put("generated_time", StrUtil.isNotBlank(headers.get(X_HOS_METADATA_PREFIX.concat("generated-time"))) ? headers.get(X_HOS_METADATA_PREFIX.concat("generated-time")) : currentDate);
+ dataMap.put("last_update_time", StrUtil.isNotBlank(headers.get(X_HOS_METADATA_PREFIX.concat("last-update-time"))) ? headers.get(X_HOS_METADATA_PREFIX.concat("last-update-time")) : currentDate);
+ dataMap.put("kb_id", request.getKbId());
+ return dataMap;
+ }
+
+ private List<Map<String, Object>> generateFileListResult(List<Map<String, String>> contentList) {
+ List<Map<String, Object>> dataList = Lists.newArrayList();
+ if (CollectionUtil.isNotEmpty(contentList)) {
+ for (Map contents : contentList) {
+ Map<String, Object> map1 = Maps.newHashMap();
+ if (MapUtil.isNotEmpty((Map<?, ?>) contents.get(USER_DEFINED_META))) {
+ Map<String, String> udfMeta = (Map<String, String>) contents.get(USER_DEFINED_META);
+ map1.put("kb_id", StrUtil.isBlankIfStr(udfMeta.get(KB_ID)) ? "" : udfMeta.get(KB_ID));
+ map1.put("name", StrUtil.isBlankIfStr(udfMeta.get(NAME)) ? "" : udfMeta.get(NAME));
+ map1.put("category", StrUtil.isBlankIfStr(udfMeta.get(CATEGORY)) ? "" : udfMeta.get(CATEGORY));
+ map1.put("path", StrUtil.isBlankIfStr(udfMeta.get(ORIGIN_URL)) ? "" : udfMeta.get(ORIGIN_URL));
+ map1.put("generated_time", StrUtil.isBlankIfStr(udfMeta.get(GENERATED_TIME)) ? "" : udfMeta.get(GENERATED_TIME));
+ map1.put("last_update_time", StrUtil.isBlankIfStr(udfMeta.get(LAST_UPDATE_TIME)) ? "" : udfMeta.get(LAST_UPDATE_TIME));
+ map1.put("format", StrUtil.isBlankIfStr(udfMeta.get(FORMAT)) ? "" : udfMeta.get(FORMAT));
+ map1.put("sha256", StrUtil.isBlankIfStr(udfMeta.get(SHA_256)) ? "" : udfMeta.get(SHA_256));
+ map1.put("version", StrUtil.isBlankIfStr(udfMeta.get(VERSION)) ? "" : udfMeta.get(VERSION));
+ map1.put("is_valid", StrUtil.isBlankIfStr(udfMeta.get(IS_VALID)) ? 1 : Integer.parseInt(udfMeta.get(IS_VALID)));
+ } else {
+ map1.put("name", "");
+ map1.put("category", "");
+ map1.put("path", "");
+ map1.put("generated_time", "");
+ map1.put("last_update_time", "");
+ map1.put("format", "");
+ map1.put("sha256", "");
+ map1.put("version", "");
+ map1.put("is_valid", 1);
+ map1.put("kb_id", StrUtil.subBefore(String.valueOf(contents.get(KEY)), "_", true));
+ }
+ map1.put("size", StrUtil.isBlankIfStr(contents.get(SIZE)) ? 0 : Integer.parseInt(String.valueOf(contents.get(SIZE))));
+ dataList.add(map1);
+ }
+ }
+ return dataList;
+ }
+
+ private List<Map<String, Object>> filterResult(String filterKey, List<String> filterValues, List<Map<String, Object>> list) {
+ return list.stream()
+ .filter(map -> filterValues.contains(map.get(filterKey)))
+ .collect(Collectors.toList());
+ }
+
+ private String incrementVersion(String version) {
+ String[] parts = version.split("\\.");
+ int[] nums = new int[parts.length];
+ for (int i = 0; i < parts.length; i++) {
+ nums[i] = Integer.parseInt(parts[i]);
+ }
+
+ int idx = nums.length - 1;
+ nums[idx]++;
+
+ StringBuilder sb = new StringBuilder();
+ for (int num : nums) {
+ sb.append(num);
+ sb.append('.');
+ }
+ sb.setLength(sb.length() - 1);
+ return sb.toString();
+ }
+}
diff --git a/src/main/java/com/mesalab/services/service/impl/KnowledgeBaseServiceImpl.java b/src/main/java/com/mesalab/services/service/impl/KnowledgeBaseServiceImpl.java
deleted file mode 100644
index 09a28305..00000000
--- a/src/main/java/com/mesalab/services/service/impl/KnowledgeBaseServiceImpl.java
+++ /dev/null
@@ -1,516 +0,0 @@
-package com.mesalab.services.service.impl;
-
-import cn.hutool.core.collection.CollectionUtil;
-import cn.hutool.core.io.FileUtil;
-import cn.hutool.core.io.file.FileMode;
-import cn.hutool.core.text.StrFormatter;
-import cn.hutool.core.util.NumberUtil;
-import cn.hutool.core.util.XmlUtil;
-import cn.hutool.crypto.digest.DigestUtil;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.alibaba.fastjson2.JSON;
-import com.alibaba.nacos.api.config.ConfigService;
-import com.alibaba.nacos.api.exception.NacosException;
-import com.google.common.collect.Maps;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.common.exception.BusinessException;
-import com.mesalab.common.nacos.NacosConst;
-import com.mesalab.qgw.model.basic.HttpConfig;
-import com.mesalab.qgw.service.impl.HttpClientService;
-import com.mesalab.services.common.entity.KnowledgeBase;
-import com.mesalab.services.common.entity.KnowledgeConstant;
-import com.mesalab.services.configuration.HosConfig;
-import com.mesalab.services.service.KnowledgeBaseService;
-import com.geedgenetworks.utils.DateUtils;
-import com.geedgenetworks.utils.StringUtil;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.recipes.locks.InterProcessMutex;
-import org.apache.http.Header;
-import org.apache.http.HttpResponse;
-import org.apache.http.message.BasicHeader;
-import org.springframework.beans.BeanUtils;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-import org.springframework.web.multipart.MultipartFile;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-
-import javax.xml.xpath.XPathConstants;
-import java.io.*;
-import java.util.*;
-import java.util.concurrent.TimeUnit;
-import java.util.stream.Collectors;
-
-
-@Service
-public class KnowledgeBaseServiceImpl implements KnowledgeBaseService {
-
- private static final Log log = LogFactory.get();
-
- @Autowired
- private ConfigService pubConfigService;
-
- @Autowired
- private HosConfig hosConfig;
-
- @Autowired
- private HttpClientService httpClientService;
-
- @Autowired
- private HttpConfig httpConfig;
-
- @Autowired
- private CuratorFramework client;
-
- private static final String ZK_LOCK_PATH = "/zkLock";
- private static final String HOS_ETAG = "ETag";
- private static final String HOS_PART = "Part";
- private static final String HOS_PART_NUM = "PartNumber";
- private static final String HOS_COMPLETE_MULTIPART_UPLOAD = "CompleteMultipartUpload";
-
-
- @Override
- public BaseResult queryKnowledge() {
- List<KnowledgeBase> knowledgeBaseList = queryKnowledgeBaseList();
- return BaseResultGenerator.success(knowledgeBaseList);
- }
-
- @Override
- public BaseResult publishKnowledge(MultipartFile multipartFile, KnowledgeBase knowledge) throws Exception {
- InterProcessMutex lock = new InterProcessMutex(client, ZK_LOCK_PATH);
- try {
- String id = null;
- if (lock.acquire(10, TimeUnit.SECONDS)) {
- List<KnowledgeBase> knowledgeBaseList = queryKnowledgeBaseList();
- String expirePath = buildKnowledgeBaseList(multipartFile, knowledge, knowledgeBaseList);
- saveKnowledgeBaseList(knowledgeBaseList);
- id = knowledge.getId();
- if (StringUtil.isNotEmpty(expirePath)) {
- Header header = new BasicHeader(KnowledgeConstant.TOKEN, hosConfig.getToken());
- httpClientService.httpDelete(expirePath, httpConfig.getServerResponseTimeOut(), header);
- }
- }
- Map<String, Object> map = Maps.newHashMap();
- map.put("id", id);
- return BaseResultGenerator.success(map);
- } catch (BusinessException e) {
- log.error("publish knowledge error: {}", e.getMessage());
- throw new BusinessException(e.getStatus(), e.getCode(), e.getMessage());
- } finally {
- lock.release();
- }
- }
-
- @Override
- public BaseResult updateKnowledge(MultipartFile file, KnowledgeBase knowledgeBase) throws Exception {
- InterProcessMutex lock = new InterProcessMutex(client, ZK_LOCK_PATH);
- try {
- // 尝试获取锁,最长等待10s,超时放弃获取
- if (lock.acquire(10, TimeUnit.SECONDS)) {
- List<KnowledgeBase> knowledgeBaseList = queryKnowledgeBaseList();
- String expirePath = null;
- String newPath = null;
- String fileSha256 = DigestUtil.sha256Hex(file.getBytes());
- String currentDate = DateUtils.getCurrentDate("yyyy-MM-dd'T'HH:mm:ssZ");
- for (KnowledgeBase knowledgeBaseRep : knowledgeBaseList) {
- if (StringUtil.equals(knowledgeBase.getId(), knowledgeBaseRep.getId())) {
- if (StringUtil.isBlank(newPath)) {
- newPath = uploadFile(file);
- expirePath = knowledgeBaseRep.getPath();
- }
- if (StringUtil.isNotBlank(knowledgeBase.getName())) {
- knowledgeBaseRep.setName(knowledgeBase.getName());
- }
- if (StringUtil.isNotBlank(knowledgeBase.getFormat())) {
- knowledgeBaseRep.setFormat(knowledgeBase.getFormat());
- }
- if (StringUtil.isNotBlank(knowledgeBase.getType())) {
- knowledgeBaseRep.setType(knowledgeBase.getType());
- }
- if (StringUtil.isNotEmpty(knowledgeBase.getIsValid())) {
- knowledgeBaseRep.setIsValid(knowledgeBase.getIsValid());
- }
- if (StringUtil.isNotBlank(knowledgeBase.getOriginUrl())) {
- knowledgeBaseRep.setOriginUrl(knowledgeBase.getOriginUrl());
- }
- knowledgeBaseRep.setPath(newPath);
- knowledgeBaseRep.setSha256(fileSha256);
- knowledgeBaseRep.setSize(file.getSize());
- knowledgeBaseRep.setUpdateTime(currentDate);
-
- }
- }
- if (StringUtil.isNotBlank(newPath)) {
- saveKnowledgeBaseList(knowledgeBaseList);
- Header header = new BasicHeader(KnowledgeConstant.TOKEN, hosConfig.getToken());
- httpClientService.httpDelete(expirePath, httpConfig.getServerResponseTimeOut(), header);
- }
- }
- } catch (RuntimeException e) {
- log.error("update knowledge error: {}", e.getMessage());
- throw new BusinessException(e.getMessage());
- } finally {
- lock.release();
- }
- Map<String, Object> map = Maps.newHashMap();
- map.put("id", knowledgeBase.getId());
- return BaseResultGenerator.success(map);
- }
-
- @Override
- public BaseResult deleteKnowledge(String id) throws Exception {
- KnowledgeBase knowledgeBase = null;
- InterProcessMutex lock = new InterProcessMutex(client, ZK_LOCK_PATH);
- try {
- if (lock.acquire(10, TimeUnit.SECONDS)) {
- List<KnowledgeBase> knowledgeBaseList = queryKnowledgeBaseList();
- int originalSize = knowledgeBaseList.size();
- Iterator<KnowledgeBase> iterator = knowledgeBaseList.iterator();
- while (iterator.hasNext()) {
- knowledgeBase = iterator.next();
- if (StringUtil.equals(id, knowledgeBase.getId())) {
- iterator.remove();
- }
- }
- if (originalSize > knowledgeBaseList.size()) {
- saveKnowledgeBaseList(knowledgeBaseList);
- String path = knowledgeBase.getPath();
- Header header = new BasicHeader(KnowledgeConstant.TOKEN, hosConfig.getToken());
- httpClientService.httpDelete(path, httpConfig.getServerResponseTimeOut(), header);
- }
- }
- } catch (RuntimeException e) {
- log.error("delete knowledge error: {}", e.getMessage());
- throw new BusinessException(e.getMessage());
- } finally {
- lock.release();
- }
-
-
- Map map = new HashMap();
- map.put("id", id);
- return BaseResultGenerator.success(map);
- }
-
- @Override
- public BaseResult toggleKnowledge(KnowledgeBase knowledgeBase) throws Exception {
- InterProcessMutex lock = new InterProcessMutex(client, ZK_LOCK_PATH);
- try {
- if (lock.acquire(10, TimeUnit.SECONDS)) {
- List<KnowledgeBase> knowledgeBaseList = queryKnowledgeBaseList();
- for (int i = 0; i < knowledgeBaseList.size(); i++) {
- KnowledgeBase knowledgeBaseRep = knowledgeBaseList.get(i);
- if (StringUtil.equals(knowledgeBase.getId(), knowledgeBaseRep.getId())) {
- knowledgeBaseRep.setIsValid(knowledgeBase.getIsValid());
- knowledgeBaseList.set(i,knowledgeBaseRep);
- }
- }
- saveKnowledgeBaseList(knowledgeBaseList);
- }
- } catch (RuntimeException e) {
- log.error("toggle knowledge error: {}", e.getMessage());
- throw new BusinessException(e.getMessage());
- } finally {
- lock.release();
- }
-
- Map<String, String> map = Maps.newHashMap();
- map.put("id", knowledgeBase.getId());
- return BaseResultGenerator.success(map);
- }
-
- public List<KnowledgeBase> queryKnowledgeBaseList() {
- List<KnowledgeBase> knowledgeBaseList = new ArrayList<>();
- try {
- String config = pubConfigService.getConfig(NacosConst.KNOWLEDGE_BASE_DATA_ID, NacosConst.DEFAULT_GROUP, httpConfig.getServerRequestTimeOut());
- if (StringUtil.isNotEmpty(config)) {
- knowledgeBaseList = JSON.parseArray(config, KnowledgeBase.class);
- knowledgeBaseList = knowledgeBaseList.stream().sorted(Comparator.comparing(KnowledgeBase::getUpdateTime).reversed()).collect(Collectors.toList());
- }
- } catch (NacosException e) {
- log.error("get knowledge meta error: {}", e.getMessage());
- throw new BusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),e.getMessage());
- }
- return knowledgeBaseList;
- }
-
-
- private String buildKnowledgeBaseList(MultipartFile file, KnowledgeBase knowledge, List<KnowledgeBase> knowledgeBaseList) throws Exception {
- if (StringUtil.isEmpty(knowledge.getIsValid())) {
- knowledge.setIsValid(KnowledgeConstant.VALID);
- }
- String newPath = uploadFile(file);
- knowledge.setPath(newPath);
- knowledge.setSha256(DigestUtil.sha256Hex(file.getBytes()));
- knowledge.setSize(file.getSize());
- knowledge.setUpdateTime(DateUtils.getCurrentDate("yyyy-MM-dd'T'HH:mm:ssZ"));
-
- if (StringUtils.isNotBlank(knowledge.getId()) && knowledgeBaseList.stream().anyMatch(o -> StringUtil.equals(o.getId(), knowledge.getId()))) {
- KnowledgeBase knowledgeRep = new KnowledgeBase();
- String expirePath = null;
- for (KnowledgeBase item : knowledgeBaseList) {
- if (StringUtil.equals(knowledge.getId(), item.getId())) {
- expirePath = item.getPath();
- BeanUtils.copyProperties(item, knowledgeRep);
- break;
- }
- }
- for (KnowledgeBase item : knowledgeBaseList) {
- if (StringUtil.equals(knowledge.getId(), item.getId())) {
- knowledge.setVersion(item.getVersion());
- if (StringUtil.equals(item.getVersion(), KnowledgeConstant.LATEST)) {
- BeanUtils.copyProperties(knowledge, item);
- item.setVersion(KnowledgeConstant.LATEST);
- } else {
- BeanUtils.copyProperties(knowledge, item);
- }
- }
- if (StringUtil.equals(item.getName(), knowledgeRep.getName()) && StringUtil.equals(item.getType(), knowledgeRep.getType()) && StringUtil.equals(item.getVersion(), KnowledgeConstant.LATEST)) {
- BeanUtils.copyProperties(knowledge, item);
- item.setVersion(KnowledgeConstant.LATEST);
- }
- }
- return expirePath;
- } else {
- String lastVersion = queryLastKnowledgeVersion(knowledgeBaseList, knowledge);
- String version = generateVersion(lastVersion);
- if (StringUtil.isBlank(knowledge.getId())) {
- knowledge.setId(DigestUtil.md5Hex16(knowledge.getType()).concat("-").concat(DigestUtil.md5Hex16(knowledge.getName().concat(knowledge.getFormat()).concat(version))));
- }
- knowledge.setVersion(version);
- knowledgeBaseList.add(knowledge);
- addKnowledgeLatest(knowledge, knowledgeBaseList);
- return null;
- }
- }
-
- private void addKnowledgeLatest(KnowledgeBase knowledge, List<KnowledgeBase> knowledgeBaseList) {
- boolean hadLatestRep = knowledgeBaseList.stream().anyMatch(o -> StringUtil.equals(o.getName(), knowledge.getName())
- && StringUtil.equals(o.getType(), knowledge.getType())
- && StringUtil.equals(o.getVersion(), KnowledgeConstant.LATEST));
- if (hadLatestRep) {
- for (KnowledgeBase item : knowledgeBaseList) {
- if (!StringUtil.equals(item.getName(), knowledge.getName())
- || !StringUtil.equals(item.getType(), knowledge.getType())
- || !StringUtil.equals(item.getVersion(), KnowledgeConstant.LATEST)) {
- continue;
- }
- BeanUtils.copyProperties(knowledge, item);
- item.setVersion(KnowledgeConstant.LATEST);
- }
- } else {
- KnowledgeBase knowledgeLatest = new KnowledgeBase();
- BeanUtils.copyProperties(knowledge, knowledgeLatest);
- knowledgeLatest.setVersion(KnowledgeConstant.LATEST);
- knowledgeBaseList.add(knowledgeLatest);
- }
- }
-
- private String queryLastKnowledgeVersion(List<KnowledgeBase> knowledgeBaseList, KnowledgeBase knowledge) {
- if (CollectionUtil.isEmpty(knowledgeBaseList)) {
- return null;
- }
- knowledgeBaseList = knowledgeBaseList.stream().sorted(Comparator.comparing(KnowledgeBase::getName)
- .thenComparingInt(o -> {
- String version = o.getVersion();
- int i = version.indexOf("_");
- String dateVersion = i > 0 ? version.substring(0, i) : "0";
- return Integer.parseInt(
- NumberUtil.isInteger(dateVersion) ? dateVersion : "0");
- }
- )
- .thenComparingInt(o -> {
- String version = o.getVersion();
- int i = version.indexOf("_");
- String serial = i > 0 && i < version.length() - 1 ? version.substring(i + 1) : "0";
- return Integer.parseInt(
- NumberUtil.isInteger(serial) ? serial : "0");
- }
- )
- .reversed()).collect(Collectors.toList());
- for (KnowledgeBase knowledgeBase : knowledgeBaseList) {
- if (!StringUtil.equals(knowledgeBase.getVersion(), KnowledgeConstant.LATEST)
- && StringUtil.equals(knowledgeBase.getType(), knowledge.getType())
- && StringUtil.equals(knowledgeBase.getName(), knowledge.getName())) {
- return knowledgeBase.getVersion();
- }
- }
- return null;
- }
-
- private void saveKnowledgeBaseList(List<KnowledgeBase> knowledgeBaseList) throws BusinessException, InterruptedException {
-
- try {
- boolean b = pubConfigService.publishConfig(NacosConst.KNOWLEDGE_BASE_DATA_ID, NacosConst.DEFAULT_GROUP, JSON.toJSONString(knowledgeBaseList));
- if (!b) {
- throw new BusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),"update knowledge meta error.");
- }
- } catch (RuntimeException | NacosException e) {
- log.error("update knowledge meta error: {}", e.getMessage());
- throw new BusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),e.getMessage());
- }
- while (true) {
- Thread.sleep(100);
- String local = JSON.toJSONString(knowledgeBaseList.stream().sorted(Comparator.comparing(KnowledgeBase::getUpdateTime).reversed()).collect(Collectors.toList()));
- String nacos = JSON.toJSONString(queryKnowledgeBaseList());
- if (StringUtil.equals(local, nacos)) {
- break;
- }
- }
- }
-
-
- public String uploadFile(MultipartFile file) {
- String uri = hosConfig.getUri();
- String bucket = hosConfig.getBucket();
- Header header = new BasicHeader(KnowledgeConstant.TOKEN, hosConfig.getToken());
- String[] split = file.getOriginalFilename().split("\\.");
- String url = uri.concat("/").concat(bucket).concat("/").concat(UUID.randomUUID().toString())
- .concat("-").concat(Base64.getEncoder().encodeToString(split[0].getBytes())).concat(".").concat(split[1]);
- try {
- if (file.getBytes().length < hosConfig.getMultiUploadStartSize()) {
- InputStream in = new ByteArrayInputStream(file.getBytes());
- HttpResponse httpResponse = httpClientService.httpPutStream(url, in, httpConfig.getServerResponseTimeOut(), header);
- if (StringUtil.isEmpty(httpResponse) || httpResponse.getStatusLine().getStatusCode() != ResultStatusEnum.SUCCESS.getCode()) {
- log.error("execute upload file failed.");
- throw new BusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),"upload file failed.");
- }
- } else {
- File fileTemp = null;
- try {
- fileTemp = new File(System.currentTimeMillis() + file.getOriginalFilename());
- String resBody = createMultipartUpload(url, header);
- Document resXml = XmlUtil.parseXml(resBody);
- Object uploadId = XmlUtil.getByXPath("//InitiateMultipartUploadResult/UploadId", resXml, XPathConstants.STRING);
- Map<Integer, String> parts = Maps.newHashMap();
- FileUtils.copyInputStreamToFile(file.getInputStream(), fileTemp);
- RandomAccessFile randomAccessFile = FileUtil.createRandomAccessFile(fileTemp, FileMode.r);
- long totalLength = randomAccessFile.length();
- long hadReadLength = 0;
- int count = 1;
- while (hadReadLength < totalLength) {
- byte[] waitUploadBytes = new byte[hosConfig.getMultiUploadPartSize() <= (totalLength - hadReadLength) ? hosConfig.getMultiUploadPartSize().intValue() : (int) (totalLength - hadReadLength)];
- hadReadLength = hadReadLength + randomAccessFile.read(waitUploadBytes);
- InputStream inputStream = new ByteArrayInputStream(waitUploadBytes);
- HttpResponse httpResponse = uploadPart(url, uploadId, count, inputStream, header);
- Header[] eTags = httpResponse.getHeaders(HOS_ETAG);
- parts.put(count++, eTags[0].getValue());
- }
- Document completeBody = buildCompleteXmlReqBody(parts);
- completeMultipartUpload(url, uploadId, completeBody, header);
- } catch (IOException | RuntimeException ex) {
- log.error("execute upload part file failed.");
- throw new BusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),ex.getMessage());
- } finally {
- if (fileTemp != null && fileTemp.exists()) {
- FileUtil.del(fileTemp);
- }
- }
- }
- } catch (IOException e) {
- throw new BusinessException(ResultStatusEnum.SERVER_ERROR.getCode(),ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(),e.getMessage());
- }
- return url;
- }
-
- private String createMultipartUpload(String url, Header header) throws BusinessException {
- String repStr = httpClientService.httpPost(url + "?uploads", null, header);
- if ("-1".equals(repStr)) {
- throw new BusinessException("create multipart upload failed");
- }
- return repStr;
- }
-
- private HttpResponse uploadPart(String url, Object uploadId, int times, InputStream inputStream, Header header) throws RuntimeException {
- String urlParams = StrFormatter.format("?partNumber={}&uploadId={}", times, uploadId);
- HttpResponse httpResponse = httpClientService.httpPutStream(url.concat(urlParams), inputStream, httpConfig.getServerResponseTimeOut(), header);
- if (StringUtil.isEmpty(httpResponse) || httpResponse.getStatusLine().getStatusCode() != ResultStatusEnum.SUCCESS.getCode()) {
- log.error("upload part file failed.");
- throw new BusinessException("upload part file failed.");
- }
- return httpResponse;
- }
-
- private void completeMultipartUpload(String url, Object uploadId, Document resXmlBody, Header header) throws RuntimeException {
- String repStr = httpClientService.httpPost(url.concat(StrFormatter.format("?uploadId={}", uploadId)), XmlUtil.toStr(resXmlBody), header);
- if ("-1".equals(repStr)) {
- throw new BusinessException("complete multipart upload failed");
- }
- }
-
- private Document buildCompleteXmlReqBody(Map<Integer, String> parts) {
- Document xml = XmlUtil.createXml();
- xml.setXmlStandalone(true);
- Element document = xml.createElement(HOS_COMPLETE_MULTIPART_UPLOAD);
- for (Integer partNum : parts.keySet()) {
- Element part = xml.createElement(HOS_PART);
- Element eTag = xml.createElement(HOS_ETAG);
- Element partNumber = xml.createElement(HOS_PART_NUM);
- eTag.setTextContent(parts.get(partNum));
- partNumber.setTextContent(String.valueOf(partNum));
- part.appendChild(eTag);
- part.appendChild(partNumber);
- document.appendChild(part);
- }
- xml.appendChild(document);
- return xml;
- }
-
- private KnowledgeBase saveLatestKnowledge(KnowledgeBase knowledgeBase) {
- KnowledgeBase base = new KnowledgeBase();
- BeanUtils.copyProperties(knowledgeBase, base);
- base.setVersion(KnowledgeConstant.LATEST);
- return base;
- }
-
- private Boolean updateLatestKnowledge(List<KnowledgeBase> knowledgeBaseList, KnowledgeBase knowledgeBase) {
-
- String name = knowledgeBase.getName();
- String type = knowledgeBase.getType();
-
- for (int i = 0; i < knowledgeBaseList.size(); i++) {
-
- KnowledgeBase knowledge = knowledgeBaseList.get(i);
- if (StringUtil.equals(knowledge.getName(), name) && StringUtil.equals(knowledge.getType(), type)
- && StringUtil.equals(knowledge.getVersion(), KnowledgeConstant.LATEST)) {
- KnowledgeBase base = new KnowledgeBase();
- BeanUtils.copyProperties(knowledge, base);
- base.setId(knowledgeBase.getId());
- base.setPath(knowledgeBase.getPath());
- base.setSize(knowledgeBase.getSize());
- base.setSha256(knowledgeBase.getSha256());
- base.setOriginUrl(knowledgeBase.getOriginUrl());
- base.setUpdateTime(knowledgeBase.getUpdateTime());
- base.setIsValid(knowledgeBase.getIsValid());
- knowledgeBaseList.set(i, base);
- return false;
- }
- }
- return true;
- }
-
- public String generateVersion(String lastVersion) {
-
- String date = DateUtils.getCurrentDate("yyMMdd");
- if (StringUtils.isEmpty(lastVersion)) {
- return date.concat("_1");
- } else {
- String[] str = lastVersion.split("_");
- String index = str[1];
- if (StringUtil.equals(date, str[0])) {
- return date.concat("_").concat(String.valueOf(Integer.parseInt(index) + 1));
- } else {
- return date.concat("_1");
- }
- }
-
- }
-
-
-}
diff --git a/src/main/java/com/mesalab/services/service/impl/RelationServiceImpl.java b/src/main/java/com/mesalab/services/service/impl/RelationServiceImpl.java
deleted file mode 100644
index 4f37ff63..00000000
--- a/src/main/java/com/mesalab/services/service/impl/RelationServiceImpl.java
+++ /dev/null
@@ -1,326 +0,0 @@
-package com.mesalab.services.service.impl;
-
-
-import cn.hutool.core.collection.CollectionUtil;
-import cn.hutool.core.util.ObjectUtil;
-import cn.hutool.crypto.digest.DigestUtil;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.alibaba.fastjson2.JSON;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Lists;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.knowledge.enums.MatchEnum;
-import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.model.basic.QueryProfile;
-import com.mesalab.qgw.model.basic.EntityConfigSource;
-import com.mesalab.qgw.service.QueryService;
-import com.mesalab.services.common.dsl.ComDSLObject;
-import com.mesalab.services.common.dsl.ComDSLParse;
-import com.mesalab.services.common.enums.EntityQueryType;
-import com.mesalab.services.common.property.SqlPropertySourceFactory;
-import com.mesalab.services.service.RelationService;
-import com.geedgenetworks.utils.DateUtils;
-import com.geedgenetworks.utils.StringUtil;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.EnvironmentAware;
-import org.springframework.context.annotation.PropertySource;
-import org.springframework.core.env.Environment;
-import org.springframework.stereotype.Service;
-
-import java.util.*;
-import java.util.stream.Collectors;
-
-import static java.util.stream.Collectors.groupingBy;
-
-@Service
-@PropertySource(value = "classpath:http-sql-template.sql", factory = SqlPropertySourceFactory.class)
-public class RelationServiceImpl implements RelationService , EnvironmentAware {
-
-
- @Value("${arango.maxrows}")
- private String defLimit;//查询最大条数
- @Autowired
- private QueryService queryService;
- @Autowired
- private EntityConfigSource entityConfigSource;
-
- private Environment env;
- private static final Log log = LogFactory.get();
-
- @Override
- public BaseResult getRealRelation(String param, ComDSLObject comDSLObject) {
- BaseResult baseResult;
- if (param.equals(EntityQueryType.SUBSCRIBERIDPOOL.getType())) {
- baseResult = formatResult(queryService.executeQuery(queryBuild(comDSLObject.getQuery())));
- } else if (param.equals(EntityQueryType.GTPC.getType())) {
- String querySql = generateQuerySql(comDSLObject.getQuery());
- QueryProfile queryProfile = QueryProfile.builder().option("real_time").query(querySql).build();
- baseResult = deduplicationData(queryService.executeQuery(queryProfile),comDSLObject.getQuery());
- } else {
- baseResult = BaseResultGenerator.failure(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(), "No match queryType");
- }
- if (!baseResult.isSuccess()) {
- BaseResult result = JSON.parseObject(baseResult.getMessage(), BaseResult.class);
- throw new QGWBusinessException(baseResult.getStatus(), baseResult.getCode(), result.getMessage());
- }
- return baseResult;
- }
-
-
- private QueryProfile queryBuild(ComDSLObject.Query query) {
- String range = ComDSLParse.parseRange(query.getParameters().getRange());
- List<ComDSLObject.Query.FilterBean> match = getMatchAfterTranslation(query.getParameters().getMatch());
- String fieldWhere = ComDSLParse.parseMath(match);
- String timeWhere = ComDSLParse.parseInterval(query, "last_update_time", true);
- String sql = String.format(Objects.requireNonNull(env.getProperty("RELATION_SUBSCRIBER_ID")), timeWhere.concat(range).concat(fieldWhere), ObjectUtil.isEmpty(query.getLimit()) ? defLimit : query.getLimit());
- log.info("real time relation subscribe id query sql: {}", sql);
- return QueryProfile.builder().option("real_time").query(sql).build();
- }
-
- private List<ComDSLObject.Query.FilterBean> getMatchAfterTranslation(List<ComDSLObject.Query.FilterBean> match) {
- if (ObjectUtil.isEmpty(match)) {
- return Lists.newArrayList();
- }
- for (ComDSLObject.Query.FilterBean filterBean : match) {
- if (StringUtil.equals(MatchEnum.EXACTLY.getType(), filterBean.getType())) {
- filterBean.setType(MatchEnum.PREFIX.getType());
- filterBean.setFieldKey("ROWKEY");
- List<String> fieldValues = filterBean.getFieldValues();
- fieldValues.replaceAll(s -> DigestUtil.md5Hex(s).concat("|"));
- } else {
- filterBean.setFieldKey("account");
- }
- }
- return match;
- }
-
- private String generateQuerySql(ComDSLObject.Query query) {
-
- List<ComDSLObject.Query.FilterBean> match = query.getParameters().getMatch();
- StringBuffer fieldWhere = new StringBuffer();
- if (ObjectUtil.isNotEmpty(match)){
- StringBuffer filter = new StringBuffer();
- for (ComDSLObject.Query.FilterBean filterBean : match) {
- match = new ArrayList<>();
- if (StringUtil.equals(MatchEnum.REGEX.getType(), filterBean.getType())) {
- Map<String, ComDSLObject.Query.FilterBean> map = transformData(filterBean);
- for (Map.Entry<String, ComDSLObject.Query.FilterBean> entry : map.entrySet()) {
- ComDSLObject.Query.FilterBean bean = entry.getValue();
- match.add(generateRowKey(bean));
- }
- filter.append(ComDSLParse.parseMath(match));
- } else {
- match.add(generateRowKey(filterBean));
- filter.append(ComDSLParse.parseMath(match));
- }
-
- }
- StringBuffer sb = new StringBuffer(" AND (");
- List<String> list = match.stream().filter(e -> !StringUtil.equals(e.getFieldKey(), "ROWKEY")).map(e -> e.getFieldKey()).distinct().collect(Collectors.toList());
- for (String str : list) {
- if (StringUtil.equals(str,"imei")){
- sb.append("ROWKEY like '0%'");
- } else if (StringUtil.equals(str,"imsi")){
- sb.append("ROWKEY like '1%'");
- } else if (StringUtil.equals(str,"phone_number")){
- sb.append("ROWKEY like '2%'");
- } else if (StringUtil.equals(str,"apn")){
- sb.append("ROWKEY like '3%'");
- }
- sb.append(" OR ");
- }
-
- if (CollectionUtil.isNotEmpty(list)) {
- fieldWhere = sb.delete(sb.length() - 3, sb.length()).append(")").append(filter);
- } else {
- fieldWhere = filter;
- }
- }
- String range = ComDSLParse.parseRange(query.getParameters().getRange());
-
- String sql = String.format(Objects.requireNonNull(env.getProperty("GTPC_KNOWLEDGE_BASE")), range.concat(fieldWhere.toString()), entityConfigSource.getTopGTPCDefaultSize());
- log.info("real time relation gtpc query sql: {}", sql);
- return sql;
- }
-
- private ComDSLObject.Query.FilterBean generateRowKey(ComDSLObject.Query.FilterBean filterBean) {
-
-
- //Phone Number,APN 倒排
- String fieldKey = filterBean.getFieldKey();
- List<String> fieldValues = filterBean.getFieldValues();
- String type = filterBean.getType();
- //imei,imsi前缀匹配查询时,应使用Row Key进行筛选
- if (StringUtil.equals(type, MatchEnum.PREFIX.getType())
- && (StringUtil.equals(fieldKey, "imei") || StringUtil.equals(fieldKey, "imsi"))) {
-
- if (StringUtil.equals(fieldKey, "imei")) {
- for (int i = 0; i < fieldValues.size(); i++) {
- fieldValues.set(i,"0".concat(fieldValues.get(i)));
- }
- //phone_number,apn 后缀匹配查询时,应使用Row Key进行筛选
- } else if (StringUtil.equals(fieldKey, "imsi")) {
- for (int i = 0; i < fieldValues.size(); i++) {
- fieldValues.set(i,"1".concat(fieldValues.get(i)));
- }
- //phone_number,apn 后缀匹配查询时,应使用Row Key进行筛选
- }
- filterBean.setFieldKey("ROWKEY");
- //phone_number,apn 后缀匹配查询时,应使用Row Key进行筛选
- } else if (StringUtil.equals(type, MatchEnum.SUFFIX.getType())
- && (StringUtil.equals(fieldKey, "phone_number") || StringUtil.equals(fieldKey, "apn"))) {
- for (int i = 0; i < fieldValues.size(); i++) {
- fieldValues.set(i, new StringBuffer(fieldValues.get(i)).reverse().toString());
- }
- if (StringUtil.equals(fieldKey, "phone_number")) {
- for (int i = 0; i < fieldValues.size(); i++) {
- fieldValues.set(i,"2".concat(fieldValues.get(i)));
- }
- //phone_number,apn 后缀匹配查询时,应使用Row Key进行筛选
- } else if (StringUtil.equals(fieldKey, "apn")) {
- for (int i = 0; i < fieldValues.size(); i++) {
- fieldValues.set(i,"3".concat(fieldValues.get(i)));
- }
- //phone_number,apn 后缀匹配查询时,应使用Row Key进行筛选
- }
- filterBean.setFieldKey("ROWKEY");
- filterBean.setType(MatchEnum.PREFIX.getType());
- }
-
- return filterBean;
- }
-
- private Map<String, ComDSLObject.Query.FilterBean> transformData(ComDSLObject.Query.FilterBean filterBean) {
-
- Map<String, ComDSLObject.Query.FilterBean> map = new HashMap<>();
- List<String> fieldValues = filterBean.getFieldValues();
- String fieldKey = filterBean.getFieldKey();
- for (String value : fieldValues) {
- String type = getTypeByValue(value);
- String val = value.replaceAll("\\$", "").replaceAll("\\*", "");
- ComDSLObject.Query.FilterBean filter = map.get(type);
- if (ObjectUtil.isEmpty(filter)) {
- filter = new ComDSLObject.Query.FilterBean();
- List<String> fieldValueList = new ArrayList<>();
- filter.setType(type);
- filter.setFieldKey(fieldKey);
- fieldValueList.add(val);
- filter.setFieldValues(fieldValueList);
- map.put(type, filter);
- } else {
- filter.getFieldValues().add(val);
- }
- }
- return map;
- }
-
- private String getTypeByValue(String value) {
-
- String type;
- if (value.startsWith("$")) {
- type = MatchEnum.EXACTLY.getType();
- } else if (value.startsWith("*") && !value.endsWith("*")) {
- type = MatchEnum.SUFFIX.getType();
- } else if (value.endsWith("*") && !value.startsWith("*")) {
- type = MatchEnum.PREFIX.getType();
- } else if (value.startsWith("*") && value.endsWith("*")) {
- type = MatchEnum.SUBSTRING.getType();
- } else {
- type = MatchEnum.SUBSTRING.getType();
- }
- return type;
-
- }
-
- /**
- * 排序
- * @param mapList
- * @param key
- * @return
- */
- private List<Map<String,Object>> sortByKey(List<Map<String,Object>> mapList ,String key){
- Collections.sort(mapList, new Comparator<Map<String, Object>>() {
- @Override
- public int compare(Map<String, Object> o1, Map<String, Object> o2) {
- String name1 = o1.get(key).toString();
- String name2 = o2.get(key).toString();
- return name2.compareTo(name1);
- }
- });
- return mapList;
- }
-
-
- private BaseResult formatResult(BaseResult baseResult) {
-
- if (String.valueOf(ResultStatusEnum.SUCCESS.getCode()).equals(String.valueOf(baseResult.getStatus()))) {
- baseResult.setMeta(null);
- List<Map<String,Object>> resultList = new ObjectMapper().convertValue(baseResult.getData(), new TypeReference<List<Map<String,Object>>>() {
- });
- LinkedHashMap<Object, List<Map<String,Object>>> listLinkedHashMap = resultList.stream()
- .collect(groupingBy(e -> e.get("VSYS_ID") + "_" + e.get("IP"), LinkedHashMap::new, Collectors.toList()));
- List<Map<String,Object>> list = new ArrayList<>();
- for (Map.Entry<Object, List<Map<String,Object>>> entry : listLinkedHashMap.entrySet()) {
- List<Map<String, Object>> mapList = sortByKey(entry.getValue(), "LAST_FOUND_TIME");
- Map<String, Object> map = mapList.get(0);
- map.put("FIRST_FOUND_TIME", DateUtils.convertTimestampToString(Long.parseLong(String.valueOf(map.get("FIRST_FOUND_TIME"))), DateUtils.YYYY_MM_DD_HH24_MM_SS));
- map.put("LAST_FOUND_TIME", DateUtils.convertTimestampToString(Long.parseLong(String.valueOf(map.get("LAST_FOUND_TIME"))), DateUtils.YYYY_MM_DD_HH24_MM_SS));
- list.add(map);
- }
-
- return BaseResultGenerator.success(list);
- } else {
- baseResult = BaseResultGenerator.error(baseResult.getStatus(),baseResult.getMessage());
- }
- return baseResult;
- }
-
- private BaseResult deduplicationData(BaseResult baseResult, ComDSLObject.Query query) {
- Object data = baseResult.getData();
- if (ObjectUtil.isNotEmpty(data)) {
- List<Map<String, Object>> mapList = (List<Map<String, Object>>) data;
- List<Map<String, Object>> deduplicationPhoneList = removeRepeatMapByKey(mapList, "phone_number");
- List<Map<String, Object>> deduplicationIMSIList = removeRepeatMapByKey(deduplicationPhoneList, "imsi");
- List<Map<String, Object>> list = removeRepeatMapByKey(deduplicationIMSIList, "teid");
- String limit = query.getLimit();
- if (StringUtil.isNotEmpty(limit) && Long.parseLong(limit) < list.size()) {
- baseResult.setData(list.subList(0, Integer.parseInt(query.getLimit())));
- } else {
- baseResult.setData(list);
- }
- }
- return baseResult;
- }
-
- /**
- * 根据map中的某个key 去除List中重复的map
- * @param resultList
- * @param mapKey
- * @return list
- */
-
- public List<Map<String, Object>> removeRepeatMapByKey(List<Map<String, Object>> resultList, String mapKey) {
-
- LinkedHashMap<Object, List<Map<String,Object>>> listLinkedHashMap = resultList.stream()
- .collect(groupingBy(e -> e.get("vsys_id") + "_" + e.get(mapKey), LinkedHashMap::new, Collectors.toList()));
- List<Map<String,Object>> list = new ArrayList<>();
- for (Map.Entry<Object, List<Map<String,Object>>> entry : listLinkedHashMap.entrySet()) {
- List<Map<String, Object>> mapList = sortByKey(entry.getValue(), "last_update_time");
- Map<String, Object> map = mapList.get(0);
- list.add(map);
- }
- return list;
-
- }
-
- @Override
- public void setEnvironment(Environment environment) {
- this.env = environment;
- }
-}
diff --git a/src/main/java/com/mesalab/services/service/impl/SQLDatasetServiceImp.java b/src/main/java/com/mesalab/services/service/impl/SQLDatasetServiceImp.java
deleted file mode 100644
index 475e4b83..00000000
--- a/src/main/java/com/mesalab/services/service/impl/SQLDatasetServiceImp.java
+++ /dev/null
@@ -1,421 +0,0 @@
-package com.mesalab.services.service.impl;
-
-import cn.hutool.core.util.StrUtil;
-import cn.hutool.core.util.URLUtil;
-import cn.hutool.crypto.digest.DigestUtil;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.alibaba.fastjson2.JSON;
-import com.geedgenetworks.utils.Encodes;
-import com.geedgenetworks.utils.StringUtil;
-import com.google.common.base.CaseFormat;
-import com.google.common.base.Stopwatch;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.jayway.jsonpath.JsonPath;
-import com.jfinal.plugin.activerecord.Db;
-import com.jfinal.plugin.activerecord.Record;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.*;
-import com.mesalab.common.nacos.NacosConst;
-import com.mesalab.common.utils.sqlparser.SQLHelper;
-import com.mesalab.common.utils.sqlparser.SQLVisitorUtil;
-import com.mesalab.qgw.constant.QGWMessageConst;
-import com.mesalab.qgw.dialect.Dialect;
-import com.mesalab.qgw.dialect.HbaseDialect;
-import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.exception.QGWErrorCode;
-import com.mesalab.qgw.model.basic.*;
-import com.mesalab.qgw.service.MetadataService;
-import com.mesalab.qgw.service.QueryService;
-import com.mesalab.qgw.service.impl.HttpClientService;
-import com.mesalab.services.common.property.SqlPropertySourceFactory;
-import com.mesalab.services.service.SQLDatasetService;
-import lombok.Data;
-import net.sf.jsqlparser.JSQLParserException;
-import net.sf.jsqlparser.parser.CCJSqlParserUtil;
-import net.sf.jsqlparser.statement.select.Select;
-import org.apache.commons.collections.CollectionUtils;
-import org.apache.http.NameValuePair;
-import org.apache.http.client.utils.URLEncodedUtils;
-import org.apache.http.util.EntityUtils;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.annotation.PropertySource;
-import org.springframework.core.env.Environment;
-import org.springframework.http.ResponseEntity;
-import org.springframework.stereotype.Service;
-
-import java.io.IOException;
-import java.lang.reflect.Constructor;
-import java.nio.charset.Charset;
-import java.util.*;
-import java.util.concurrent.TimeUnit;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * TODO
- *
- * @Classname SQLDatasetServiceImp
- * @Date 2023/8/10 13:54
- * @Author wWei
- */
-@Service("SQLDatasetService")
-@PropertySource(value = "classpath:http-sql-template.sql", factory = SqlPropertySourceFactory.class)
-public class SQLDatasetServiceImp
- implements SQLDatasetService
-{
- private static final Log log = LogFactory.get();
- private static Pattern pFieldVariable = Pattern.compile("\\$\\{(metric|dimension)_(.*?)\\}", Pattern.CASE_INSENSITIVE);
- private static Pattern pLeftRightFlag = Pattern.compile("\\[\\[(.*?)\\]\\]", Pattern.CASE_INSENSITIVE);
- private final static String TEMPLATE = "template";
- private final static String EXECUTE_ENGINE = "execute_engine";
-
- public final static String queryIdSeparator = ":";
-
- @Autowired
- Environment env;
- @Autowired
- MetadataService metadataService;
- @Autowired
- QueryService queryService;
- @Autowired
- private HttpClientService httpClientService;
- @Autowired
- private HttpConfig httpConfig;
- @Autowired
- private ClickHouseHttpSource clickHouseHttpSource;
- @Autowired
- private DruidIoHttpSource druidIoHttpSource;
-
- @Override
- public BaseResult getProcessesByQueryId(String queryId) {
- Dialect dialect = buildAndGetDialect(queryId);
- return dialect.getProcesses(queryId);
- }
-
- @Override
- public BaseResult deleteQueryTask(String queryId) {
- Dialect dialect = buildAndGetDialect(queryId);
- return dialect.executeKillQuery(queryId);
- }
-
- @Override
- public String getCustomQueryId(String resultId, String query) {
- String tableName = SQLHelper.getTableName(query).get(0);
- String dbType = metadataService.getDBTypeByTableName(tableName);
- return DigestUtil.md5Hex(dbType) + queryIdSeparator + DigestUtil.md5Hex(resultId + query.trim());
- }
-
- @Override
- public List<LinkedHashMap> getVariable()
- {
- Object codeInfo = metadataService.getCfg(NacosConst.SQL_DATASETS_VARIABLES);
- if (StringUtil.isNotEmpty(codeInfo)) {
- Object json = JSON.toJSON(codeInfo);
- return JSON.parseArray(json.toString(), LinkedHashMap.class);
- }
- return Lists.newArrayList();
- }
-
- @Override
- public Map<String, Object> getDataset(String datasetId)
- {
- List<Map<String, Object>> results = new ArrayList<>();
- List<Record> list = Db.find(buildGetDatasetSQL(Lists.newArrayList(datasetId), null, null));
- if (CollectionUtils.isEmpty(list)) {
- return Maps.newHashMap();
- }
- list.forEach(record -> results.add(record.getColumns()));
- Map<String, Object> result = results.get(0);
- log.info("SQL Dataset, ID is: {}, SQL is: {}", datasetId, result);
- return result;
- }
-
- @Override
- public Map<String, Object> getDatasets(List<String> ids, String category, String title)
- {
- String sql = buildGetDatasetSQL(ids, category, title);
- List<Map<String, Object>> data = new ArrayList<>();
- List<Record> list = Db.find(sql);
- if (CollectionUtils.isEmpty(list)) {
- return Maps.newHashMap();
- }
- list.forEach(record -> data.add(record.getColumns()));
- Map<String, Object> result = Maps.newHashMap();
- result.put("list", data);
- return result;
- }
-
- @Override
- public BaseResult getPreview(String datasetId, String option) {
- Map<String, Object> dataset = getDataset(datasetId);
- if (dataset.isEmpty() || StrUtil.isBlankIfStr(dataset.get(TEMPLATE))) {
- return BaseResultGenerator.success();
- }
- String sqlTemplate = String.valueOf(dataset.get(TEMPLATE));
- String execute_engine = String.valueOf(dataset.get(EXECUTE_ENGINE));
- String sql = buildExecSQL(getVariable(), sqlTemplate);
- log.info("Dataset Preview, ID is: {}, Exec SQL is: {}", datasetId, sql);
- if (DiagnosisOptionEnum.EXECUTION.getValue().equalsIgnoreCase(option) || StrUtil.isBlank(option)) {
- if (DBTypeEnum.CLICKHOUSE.getValue().equalsIgnoreCase(execute_engine)) {
- return generateClickHouseBaseResult(sql);
- }
- if (DBTypeEnum.DRUID.getValue().equalsIgnoreCase(execute_engine)) {
- return generateDruidBaseResult(sql);
- }
- if (DBTypeEnum.HBASE.getValue().equalsIgnoreCase(execute_engine)) {
- return generateHbaseBaseResult(sql);
- }
- return getBaseResultByEngine(QueryProfile.builder().option(QueryOptionEnum.REAL_TIME.getValue()).query(sql).build());
- } else if (DiagnosisOptionEnum.PARSE.getValue().equalsIgnoreCase(option)) {
- return getBaseResultByEngine(QueryProfile.builder().option(QueryOptionEnum.SYNTAX_PARSE.getValue()).query(sql).build());
- } else if (DiagnosisOptionEnum.VALIDATION.getValue().equalsIgnoreCase(option)) {
- return getBaseResultByEngine(QueryProfile.builder().option(QueryOptionEnum.SYNTAX_VALIDATION.getValue()).query(sql).build());
- } else {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(),
- ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.OPTION_IS_NOT_SUPPORTED));
- }
- }
-
-
- @Override
- public String buildExecSQL(List<LinkedHashMap> variables, String sql)
- {
- Matcher matcher = pLeftRightFlag.matcher(sql);
- while (matcher.find()) {
- sql = processOptionalClause(sql, "[[", "]]");
- }
- sql = processFieldVariable(sql);
- for (LinkedHashMap linkedHashMap : variables) {
- String variable = String.valueOf(linkedHashMap.get("key"));
- String def = String.valueOf(linkedHashMap.get("default"));
- if ("filter".equals(variable) && StringUtil.isBlank(def)) {
- def = " 1 = 1";
- }
- String parameter = "${".concat(variable).concat("}");
- sql = sql.replace(parameter, def);
- }
- return sql;
- }
-
- public Dialect buildAndGetDialect(String queryId) {
- QueryProfile queryProfile = new QueryProfile();
- queryProfile.setDbType(parseDBType(queryId));
- return getDialect(queryProfile);
- }
-
- private String parseDBType(String queryId) {
- for (DBTypeEnum dbTypeEnum : DBTypeEnum.values()) {
- if (queryId.startsWith(DigestUtil.md5Hex(dbTypeEnum.getValue()).concat(queryIdSeparator))) {
- return dbTypeEnum.getValue();
- }
- }
- if (queryId.contains(queryIdSeparator)) {
- throw new QGWBusinessException(ResultStatusEnum.NOT_FOUND.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(),QGWMessageConst.QUERY_ID_IS_NOT_SUPPORTED));
- }
- return DBTypeEnum.CLICKHOUSE.getValue();
- }
-
- private Dialect getDialect(QueryProfile queryProfile) {
- Dialect dialect;
- try {
- Class dialectClazz = Class.forName("com.mesalab.qgw.dialect." +
- CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_CAMEL, queryProfile.getDbType()) + "Dialect");
- Constructor constructor = dialectClazz.getConstructor(QueryProfile.class);
- dialect = (Dialect) constructor.newInstance(queryProfile);
- } catch (ReflectiveOperationException | RuntimeException e) {
- log.error("Dialect conversion instance exception:{}", e);
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(),e.getMessage()));
- }
- return dialect;
- }
-
- private String buildGetDatasetSQL(List<String> ids, String category, String title)
- {
- List<String> filterList = Lists.newArrayList();
- if (!ids.isEmpty()) {
- filterList.add(" identifier_name IN ( '".concat(String.join("', '", ids)).concat("')"));
- }
- if (StrUtil.isNotBlank(category)) {
- filterList.add(" category = '".concat(category.replace("'", "\\'")).concat("'"));
- }
- if (StrUtil.isNotBlank(title)) {
- filterList.add(" title = '".concat(title.replace("'", "\\'")).concat("'"));
- }
- String filter = String.join(" AND ", filterList);
- return String.format(Objects.requireNonNull(env.getProperty("SQL_DATASETS")), StrUtil.isNotBlank(filter) ? "WHERE ".concat(filter) : "");
- }
-
- private BaseResult getBaseResultByEngine(QueryProfile queryProfile) {
- return queryService.executeQuery(queryProfile);
- }
-
- private BaseResult generateDruidBaseResult(String sql) {
- BaseResult baseResult;
- Object dataObject;
- Stopwatch watch = Stopwatch.createUnstarted();
- watch.start();
- Map<String, String> results = druidExecuteHttpPost(sql);
- if (Integer.parseInt(results.get("status")) == ResultStatusEnum.SUCCESS.getCode()) {
- baseResult = BaseResultGenerator.generate(Integer.valueOf(results.get("status")), ResultCodeEnum.SUCCESS.getCode(), "ok",
- (List<Object>) JSON.parseObject(results.get("result"), Object.class), null, null, QueryFormatEnum.JSON.getValue());
- dataObject = baseResult.getData();
- List<Object> allResults = (List<Object>) dataObject;
- Map<String, Object> statistics = Maps.newLinkedHashMap();
- statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS));
- statistics.put("rows_read", allResults.size());
- baseResult.setData(allResults);
- baseResult.setStatistics(statistics);
- } else {
- baseResult = BaseResultGenerator.generate(Integer.parseInt(results.get("status")), QGWErrorCode.SQL_BUILDER_EXCEPTION.getCode(), results.get("message"),
- results.get("result"), null, null, QueryFormatEnum.JSON.getValue());
- dataObject = baseResult.getData();
- baseResult.setMessage("SQL Execution Error: ".concat(JsonPath.read(dataObject.toString(), "$.errorMessage")));
- }
- return baseResult;
- }
-
-
- private Map<String, String> druidExecuteHttpPost(String sql) {
- String queryURL = URLUtil.normalize(druidIoHttpSource.getUrl());
- DruidQueryParam druidQueryParam = new DruidQueryParam();
- druidQueryParam.setQuery(sql);
- druidQueryParam.getContext().put("skipEmptyBuckets", druidIoHttpSource.getSkipEmptyBuckets());
- druidQueryParam.setResultFormat("object");
- log.info("DB Engine is :{}, Execute Query is: {}", DBTypeEnum.DRUID.getValue(), JSON.toJSONString(druidQueryParam));
- int socketTimeOut = httpConfig.getDruidSocketTimeOut();
- return httpClientService.httpPost(queryURL, JSON.toJSONString(druidQueryParam), socketTimeOut);
- }
-
-
- /**
- * @param sql
- * @return com.mesalab.common.base.BaseResult
- * @Description ck SQL测试集在ClickHouse数据源查询并获取查询结果
- * @author wanghao
- * @date 2021/9/9 17:39
- */
- private BaseResult generateClickHouseBaseResult(String sql) {
- BaseResult baseResult;
- Object dataObject;
- Map<String, String> results = clickHouseExecuteHttpPost(sql);
- if (Integer.parseInt(results.get("status")) == ResultStatusEnum.SUCCESS.getCode()) {
- baseResult = BaseResultGenerator.generate(Integer.parseInt(results.get("status")), ResultCodeEnum.SUCCESS.getCode(), "ok",
- JSON.parseObject(results.get("result"), Map.class), null, null, QueryFormatEnum.JSON.getValue());
- dataObject = baseResult.getData();
- Map<String, Object> resultMap = (Map<String, Object>) dataObject;
- Map<String, Object> statisticMap = (Map<String, Object>) resultMap.get("statistics");
- Map<String, Object> statistics = Maps.newLinkedHashMap();
- statistics.put("elapsed", Math.round(Double.parseDouble(String.valueOf(statisticMap.get("elapsed"))) * 1000));
- statistics.put("rows_read", statisticMap.get("rows_read"));
- baseResult.setData(resultMap.get("data"));
- baseResult.setStatistics(statistics);
- } else {
- baseResult = BaseResultGenerator.generate(Integer.parseInt(results.get("status")), QGWErrorCode.SQL_BUILDER_EXCEPTION.getCode(), null,
- results.get("result"), null, null, QueryFormatEnum.JSON.getValue());
- dataObject = baseResult.getData();
- baseResult.setStatus((baseResult.getStatus() < 500) || (baseResult.getStatus() > 400) ? ResultStatusEnum.SERVER_ERROR.getCode() : baseResult.getStatus());
- baseResult.setMessage("SQL Execution Error: ".concat(JsonPath.read(dataObject.toString(), "$.errorMessage")));
- }
- return baseResult;
- }
-
- private Map<String, String> clickHouseExecuteHttpPost(String sql) {
- String ckQuery = Encodes.urlEncode(sql);
- String queryURL = URLUtil.normalize(clickHouseHttpSource.getUrl() + "/?");
- StringBuilder queryParamBuilder = new StringBuilder("user=")
- .append(clickHouseHttpSource.getRealTimeAccountUserName()).append("&")
- .append("password=").append(clickHouseHttpSource.getRealTimeAccountPin()).append("&")
- .append("database=").append(clickHouseHttpSource.getDbName());
- ClickHouseHttpQuery clickHouseHttpQuery = new ClickHouseHttpQuery();
- clickHouseHttpQuery.setSocketTimeOut(httpConfig.getCkRealTimeAccountSocketTimeOut());
- clickHouseHttpQuery.setQueryParameter(queryParamBuilder.toString());
- log.info("DB Engine is :{}, Execute Http Query is: {}", DBTypeEnum.CLICKHOUSE.getValue(), JSON.toJSONString(clickHouseHttpQuery));
- List<NameValuePair> values = URLEncodedUtils.parse(clickHouseHttpQuery.getQueryParameter(), Charset.forName("UTF-8"));
- return httpClientService.httpPost(queryURL + URLEncodedUtils.format(values, "utf-8"), sql.concat(" FORMAT ").concat(QueryFormatEnum.JSON.getValue().toUpperCase()), clickHouseHttpQuery.getSocketTimeOut());
- }
-
-
-
- /**
- * @param sql
- * @return com.mesalab.common.base.BaseResult
- * @Description hbase SQL测试集在hbase数据源查询并获取查询结果
- * @author wanghao
- * @date 2021/11/11 10:01
- */
- private BaseResult generateHbaseBaseResult(String sql) {
- BaseResult baseResult = null;
- HbaseDialect hbaseDialect = new HbaseDialect(queryBuild(sql));
- baseResult = hbaseDialect.generateBaseResult(converter(sql), Optional.of("OK"));
- return baseResult;
- }
-
- private String converter(String sql) {
- try {
- Select select = (Select) CCJSqlParserUtil.parse(sql);
- select.getSelectBody().accept(SQLVisitorUtil.getVisitorOfEscapeMetadataWithDoubleQuote());
- return select.toString();
- } catch (JSQLParserException | RuntimeException e) {
- log.error("SQL Syntax Error: Converter SQL Syntax Error, SQL is:{}, Error is:{}", sql, e);
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
- String.format(QGWErrorCode.SQL_SYNTAX_PARSE_EXCEPTION.getMessage(),e.getMessage()));
- }
- }
-
- private QueryProfile queryBuild(String sql) {
- return QueryProfile.builder().format(QueryFormatEnum.JSON.getValue()).query(sql).build();
- }
-
-
- private String processFieldVariable(String str)
- {
- Matcher matcher = pFieldVariable.matcher(str);
- StringBuffer sb = new StringBuffer();
- while (matcher.find()) {
- matcher.appendReplacement(sb, matcher.group(2));
- }
- matcher.appendTail(sb);
- return sb.toString();
- }
-
- private String processOptionalClause(String str, String leftFlag, String rightFlag)
- {
- String left = "";
- String right = "";
- String center = "";
- boolean leftMark = false;
- for (int i = 0; i < str.length(); i++) {
- String element = str.substring(i, i + 1);
- if (leftFlag.startsWith(element) && leftFlag.equals(str.substring(i, i + leftFlag.length()))) {
- left = str.substring(0, i);
- leftMark = true;
- continue;
- }
- if (leftMark) {
- if (rightFlag.startsWith(element) && rightFlag.equals(str.substring(i, i + rightFlag.length()))) {
- right = str.substring(i + rightFlag.length());
- center = str.substring(left.length(), str.length() - (right.length()));
- break;
- }
- }
- }
- if (StrUtil.isNotEmpty(center)) {
- String substring = center.substring(leftFlag.length(), center.length() - leftFlag.length());
- str = left.concat(substring).concat(right);
- }
- return str;
- }
- @Data
- class DruidQueryParam {
- private String query;
- private Map<String, String> context = Maps.newHashMap();
- private String resultFormat;
- }
-
-}
diff --git a/src/main/java/com/mesalab/services/service/impl/SQLDatasetServiceImpl.java b/src/main/java/com/mesalab/services/service/impl/SQLDatasetServiceImpl.java
new file mode 100644
index 00000000..82580f21
--- /dev/null
+++ b/src/main/java/com/mesalab/services/service/impl/SQLDatasetServiceImpl.java
@@ -0,0 +1,88 @@
+package com.mesalab.services.service.impl;
+
+import cn.hutool.crypto.digest.DigestUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.google.common.base.CaseFormat;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.common.enums.*;
+import com.mesalab.common.utils.sqlparser.SQLHelper;
+import com.mesalab.qgw.constant.QGWMessageConst;
+import com.mesalab.qgw.dialect.Dialect;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import com.mesalab.common.exception.CommonErrorCode;
+import com.mesalab.qgw.model.basic.*;
+import com.mesalab.qgw.service.DatabaseService;
+import com.mesalab.services.common.property.SqlPropertySourceFactory;
+import com.mesalab.services.service.SQLDatasetService;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.stereotype.Service;
+
+import java.lang.reflect.Constructor;
+
+/**
+ * TODO
+ *
+ * @Classname SQLDatasetServiceImp
+ * @Date 2023/8/10 13:54
+ * @Author wWei
+ */
+@Service("SQLDatasetService")
+@PropertySource(value = "classpath:http-sql-template.sql", factory = SqlPropertySourceFactory.class)
+public class SQLDatasetServiceImpl
+ implements SQLDatasetService {
+ private static final Log log = LogFactory.get();
+
+ public final static String QUERY_ID_SEPARATOR = ":";
+
+ @Autowired
+ private DatabaseService databaseService;
+
+
+ @Override
+ public BaseResult getProcessesByQueryId(String queryId) {
+ Dialect dialect = buildAndGetDialect(queryId);
+ return dialect.getProcesses(queryId);
+ }
+
+ @Override
+ public BaseResult deleteQueryTask(String queryId) {
+ Dialect dialect = buildAndGetDialect(queryId);
+ return dialect.executeKillQuery(queryId);
+ }
+
+ public Dialect buildAndGetDialect(String queryId) {
+ SQLQueryContext queryContext = new SQLQueryContext();
+ queryContext.setDbEngine(parseDBEngineType(queryId));
+ return getDialect(queryContext);
+ }
+
+ private String parseDBEngineType(String queryId) {
+ for (DBEngineType dbTypeEnum : DBEngineType.values()) {
+ if (queryId.startsWith(DigestUtil.md5Hex(dbTypeEnum.getValue()).concat(QUERY_ID_SEPARATOR))) {
+ return dbTypeEnum.getValue();
+ }
+ }
+ if (queryId.contains(QUERY_ID_SEPARATOR)) {
+ throw new QGWBusinessException(HttpStatusCodeEnum.NOT_FOUND.getCode(), CommonErrorCode.PARAMETER_ERROR.getCode(),
+ String.format(CommonErrorCode.PARAMETER_ERROR.getMessage(), QGWMessageConst.QUERY_ID_IS_NOT_SUPPORTED));
+ }
+ return DBEngineType.CLICKHOUSE.getValue();
+ }
+
+ private Dialect getDialect(SQLQueryContext queryContext) {
+ Dialect dialect;
+ try {
+ Class dialectClazz = Class.forName("com.mesalab.qgw.dialect." +
+ CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_CAMEL, queryContext.getDbEngine()) + "Dialect");
+ Constructor constructor = dialectClazz.getConstructor(SQLQueryContext.class);
+ dialect = (Dialect) constructor.newInstance(queryContext);
+ } catch (ReflectiveOperationException | RuntimeException e) {
+ log.error("Dialect conversion instance exception:{}", e);
+ throw new QGWBusinessException(HttpStatusCodeEnum.BAD_REQUEST.getCode(), CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getCode(),
+ String.format(CommonErrorCode.BAD_REQUEST_SQL_SYNTAX_PARSE_EXCEPTION.getMessage(), e.getMessage()));
+ }
+ return dialect;
+ }
+}
diff --git a/src/main/java/com/mesalab/services/service/impl/TaskExecuteService.java b/src/main/java/com/mesalab/services/service/impl/TaskExecuteService.java
index 78a16931..733f7fc9 100644
--- a/src/main/java/com/mesalab/services/service/impl/TaskExecuteService.java
+++ b/src/main/java/com/mesalab/services/service/impl/TaskExecuteService.java
@@ -7,35 +7,30 @@ import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
-import com.alibaba.fastjson.JSONArray;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.jayway.jsonpath.JsonPath;
import com.mesalab.common.entity.BaseResult;
import com.mesalab.common.entity.DataTypeMapping;
-import com.mesalab.common.enums.QueryOptionEnum;
+import com.mesalab.common.enums.QueryOption;
import com.mesalab.common.exception.BusinessException;
-import com.mesalab.qgw.model.basic.QueryProfile;
+import com.mesalab.qgw.model.basic.SQLQueryContext;
import com.mesalab.qgw.model.basic.EngineConfigSource;
-import com.mesalab.qgw.service.QueryService;
-import com.mesalab.qgw.service.MetadataService;
+import com.mesalab.qgw.service.SQLSyncQueryService;
+import com.mesalab.qgw.service.DatabaseService;
import com.mesalab.services.common.property.SqlPropertySourceFactory;
import com.mesalab.services.configuration.JobConfig;
import com.geedgenetworks.utils.StringUtil;
-import org.apache.avro.LogicalType;
-import org.apache.avro.Schema;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.EnvironmentAware;
import org.springframework.context.annotation.PropertySource;
import org.springframework.core.env.Environment;
-import org.springframework.scheduling.annotation.Async;
-import org.springframework.scheduling.annotation.AsyncResult;
import org.springframework.stereotype.Service;
import java.util.*;
-import java.util.concurrent.Future;
-import static com.mesalab.services.service.impl.JobServiceImp.executeUpdate;
+import static com.mesalab.services.service.impl.JobExecuteService.markJobFailure;
+import static com.mesalab.services.service.impl.JobServiceImpl.executeUpdate;
/**
* TODO
@@ -49,78 +44,51 @@ import static com.mesalab.services.service.impl.JobServiceImp.executeUpdate;
public class TaskExecuteService implements EnvironmentAware {
private static final Log log = LogFactory.get();
@Autowired
- Environment env;
+ private Environment env;
@Autowired
- QueryService queryService;
+ SQLSyncQueryService sqlSyncQueryService;
@Autowired
- MetadataService metadataService;
+ DatabaseService databaseService;
@Autowired
EngineConfigSource engineConfigSource;
- @Async("taskExecutor")
- public Future<Boolean> executeFieldDiscovery(String id, String logType, String filter, String field, Object metric, Object fn, long currentTotalMetric) {
+ public List<Map<String, Object>> executeFieldDiscovery(String id, String dataSource, String field, Object metric, Object fn, String filter) {
+ String fnDefault = "count";
+ String column = "*";
+ String value = "count";
try {
- String fnDefault = "count";
- String column = "*";
- String value = "count";
if (StringUtil.isNotEmpty(metric)) {
- String measurements = metadataService.getValueByKeyInSchemaDoc(logType, "measurements");
- List<Map<String, String>> read = JsonPath.read(measurements, "$.aggregates['" + metric + "'][?(@.fn == '" + fn + "')]");
+ String measurements = databaseService.getValueByKeyInSchemaDoc(dataSource, "measurements");
+ List<Map<String, String>> read = JsonPath.read(measurements, "$.field_discovery_metric['" + metric + "'][?(@.fn == '" + fn + "')]");
if (!read.isEmpty()) {
fnDefault = fn.toString();
column = read.get(0).get("column");
value = read.get(0).get("value");
}
}
-
- BaseResult currentTop = queryService.executeQuery(QueryProfile.builder()
- .option(QueryOptionEnum.REAL_TIME.getValue())
- .query(buildTopSQL(logType, filter, field, fnDefault, column, value))
+ BaseResult currentTop = sqlSyncQueryService.executeQuery(SQLQueryContext.builder()
+ .option(QueryOption.REAL_TIME.getValue())
+ .originalSQL(buildTopSQL(dataSource, filter, field, fnDefault, column, value))
.build());
if (!currentTop.isSuccess()) {
- setJobFailed(id);
- log.error("Field-Discovery task query currentTopK Error: ", currentTop.getMessage());
- return new AsyncResult<>(false);
+ markJobFailure(id, "field_discovery task of Job error. message: " + currentTop.getMessage());
+ log.error("field_discovery task of Job id is: {}, query currentTopK Error: {}", id, currentTop.getMessage());
+ return Lists.newArrayList();
}
List<Map<String, Object>> data = (List<Map<String, Object>>) currentTop.getData();
-
- BaseResult lastTop = queryService.executeQuery(QueryProfile.builder()
- .option(QueryOptionEnum.REAL_TIME.getValue())
- .query(String.format(Objects.requireNonNull(env.getProperty("FIELD_RESULT")), JobConfig.FIELD_DISCOVERY, field, id))
- .build());
- if (!lastTop.isSuccess()) {
- setJobFailed(id);
- log.error("Field-Discovery task query lastTopK Error: ", lastTop.getMessage());
- return new AsyncResult<>(false);
- }
- Object lastTotalMetric = 0;
- if (StringUtil.isNotEmpty(lastTop.getData())) {
- List<Map> map = (List<Map>) lastTop.getData();
- if (StringUtil.isNotEmpty(map.get(0).get("value"))) {
- Map item = JSONUtil.toBean(String.valueOf(map.get(0).get("value")), Map.class);
- if (StringUtil.isNotEmpty(item)) {
- List<Map<String, Object>> topk = (List<Map<String, Object>>) item.get("topk");
- lastTotalMetric = item.get(value);
- Map<String, String> metrics = Maps.newHashMap();
- metrics.put(value, "count".equalsIgnoreCase(fnDefault) ? "sum" : fnDefault);
- data = mergeData((List<Map<String, Object>>) JSONArray.parse(JSONUtil.toJsonStr(data)), topk, Lists.newArrayList("value"), metrics);
- }
- }
- }
- data = sortDataAndSetMaxSize(data, value, DataTypeMapping.LONG, false);
- updateResult(id, field, currentTotalMetric, lastTotalMetric, value, data);
+ return Lists.newArrayList(data);
} catch (RuntimeException ex) {
- log.error("Field-Discovery task execute error, message is: {}", ex.getMessage());
- setJobFailed(id);
+ log.error("field_discovery task of Job id is: {}, execute error, message is: {}", id, ex.getMessage());
+ markJobFailure(id, ex.getMessage());
}
- return new AsyncResult<>(true);
+ return Lists.newArrayList();
}
- @Async("taskExecutor")
- public Future<Boolean> executeLongTerm(String id, String sql) {
+ @Deprecated
+ public Boolean executeLongTerm(String id, String sql) {
try {
- BaseResult lastResult = queryService.executeQuery(QueryProfile.builder().query(sql).build());
- BaseResult preResult = queryService.executeQuery(QueryProfile.builder().query(String.format(Objects.requireNonNull(env.getProperty("JOB_RESULT")), id)).build());
+ BaseResult lastResult = sqlSyncQueryService.executeQuery(SQLQueryContext.builder().originalSQL(sql).build());
+ BaseResult preResult = sqlSyncQueryService.executeQuery(SQLQueryContext.builder().originalSQL(String.format(Objects.requireNonNull(env.getProperty("JOB_RESULT")), id)).build());
List<Map<String, Object>> lastData = (List<Map<String, Object>>) lastResult.getData();
List<Map<String, Object>> lastMeta = (List<Map<String, Object>>) lastResult.getMeta();
Map preDataFirst = ((List<Map>) preResult.getData()).get(0);
@@ -138,21 +106,23 @@ public class TaskExecuteService implements EnvironmentAware {
executeUpdate(String.format(Objects.requireNonNull(env.getProperty("JOB_UPDATE_FIELD_VALUE_SINGLE_QUOTATION")), JobConfig.LONG_TERM_COLUMN_FAMILY, JobConfig.LONG_TERM_RESULT, id, JSONUtil.quote(JSONUtil.toJsonStr(result), false).replace("'", "\\'")));
} catch (RuntimeException ex) {
log.error("Statistics task execute error, message is: {}", ex.getMessage());
- setJobFailed(id);
+ setJobFailedLongTerm(id);
}
- return new AsyncResult<>(true);
+ return true;
}
- private void setJobFailed(String id) {
+ @Deprecated
+ private void setJobFailedLongTerm(String id) {
executeUpdate(String.format(Objects.requireNonNull(env.getProperty("JOB_UPDATE_FIELD_VALUE")), JobConfig.DETAIL, JobConfig.IS_FAILED, id, true));
}
+
private void topOrRareData(List<Map<String, Object>> lastData, List<Map<String, Object>> lastMeta, Map preDataFirst, Map<String, Object> valueMap) {
List<Map<String, Object>> preData;
List<String> dimensions = JsonPath.read(JSONUtil.toJsonStr(preDataFirst.get(JobConfig.JOB_PROPERTY)), "$.sub_option.dimension");
Map<String, String> metrics = JsonPath.read(JSONUtil.toJsonStr(preDataFirst.get(JobConfig.JOB_PROPERTY)), "$.sub_option.metric");
Map<String, Boolean> isAsc = JsonPath.read(JSONUtil.toJsonStr(preDataFirst.get(JobConfig.JOB_PROPERTY)), "$.sub_option.isAsc");
- String sortKey = isAsc.keySet().stream().findFirst().get();
+ String sortKey = isAsc.keySet().stream().findFirst().orElse(null);
if (StringUtil.isNotEmpty(preDataFirst.get(JobConfig.LONG_TERM_RESULT))) {
Map<String, Object> item = JSONUtil.toBean(preDataFirst.get(JobConfig.LONG_TERM_RESULT).toString(), Map.class);
preData = (List<Map<String, Object>>) item.get("data");
@@ -160,7 +130,7 @@ public class TaskExecuteService implements EnvironmentAware {
lastData = mergeData(lastData, preData, dimensions, metrics);
Map<String, String> meta = Maps.newHashMap();
lastMeta.forEach(o -> meta.put(String.valueOf(o.get("name")), String.valueOf(o.get("type"))));
- lastData = sortDataAndSetMaxSize(lastData, sortKey, meta.get(sortKey), isAsc.values().stream().findFirst().get());
+ lastData = sortDataAndSetMaxSize(lastData, sortKey, meta.get(sortKey), isAsc.values().stream().findFirst().orElse(true));
}
valueMap.put("data", lastData);
valueMap.put("meta", lastMeta);
@@ -175,7 +145,7 @@ public class TaskExecuteService implements EnvironmentAware {
Map<String, Boolean> isAsc = JsonPath.read(JSONUtil.toJsonStr(preDataFirst.get(JobConfig.JOB_PROPERTY)), "$.sub_option.isAsc");
Map<String, String> meta = Maps.newHashMap();
lastMeta.forEach(o -> meta.put(String.valueOf(o.get("name")), String.valueOf(o.get("type"))));
- lastData = appendData(lastData, preData, isAsc.values().stream().findFirst().get());
+ lastData = appendData(lastData, preData, isAsc.values().stream().findFirst().orElse(true));
}
valueMap.put("meta", lastMeta);
valueMap.put("data", lastData);
@@ -267,34 +237,32 @@ public class TaskExecuteService implements EnvironmentAware {
}
private String buildTopSQL(String tableName, String filter, String fieldName, String fn, String column, String value) {
- Schema.Field field = metadataService.getSchemaByName(tableName).getField(fieldName);
- if (StringUtil.isNotEmpty(field)) {
- Schema schema = field.schema();
- String type = schema.getType().getName();
- String logicalType = schema.getProp(LogicalType.LOGICAL_TYPE_PROP);
- if (StrUtil.equalsIgnoreCase(Schema.Type.ARRAY.getName(), type)
- || StrUtil.equalsIgnoreCase(Schema.Type.ARRAY.getName(), logicalType)) {
- return String.format(Objects.requireNonNull(env.getProperty("JOB_TOPK_ARRAY")),
- "count".equalsIgnoreCase(fn) ? "SUM" : fn, value, fieldName, fn + "(" + column + ")", tableName, fieldName, StrUtil.isBlankIfStr(filter) ? "" : " AND ".concat(filter), value);
- } else if (StrUtil.equalsIgnoreCase("bit", type)
- || StrUtil.equalsIgnoreCase("bit", logicalType)) {
-
- return String.format(Objects.requireNonNull(env.getProperty("JOB_TOPK_BIT")),
- "count".equalsIgnoreCase(fn) ? "SUM" : fn, value, fieldName, fn + "(" + column + ")", tableName, StrUtil.isBlankIfStr(filter) ? "" : " WHERE ".concat(filter), value);
+ List<Map> fields = (List<Map>) databaseService.getSchemaByName(tableName).get("fields");
+ for (Map next : fields) {
+ if (next.get("name").equals(fieldName)) {
+ String type;
+ String logicalType;
+ if (next.get("type") instanceof String) {
+ type = String.valueOf(next.get("type"));
+ logicalType = StrUtil.EMPTY;
+ } else {
+ Map typeMap = (Map) next.get("type");
+ type = String.valueOf(typeMap.get("type"));
+ logicalType = StrUtil.isNotBlank(String.valueOf(typeMap.get("logicalType"))) ? String.valueOf(typeMap.get("logicalType")) : StrUtil.EMPTY;
+ }
+ if (StrUtil.equalsIgnoreCase("array", type)
+ || StrUtil.equalsIgnoreCase("array", logicalType)) {
+ return String.format(Objects.requireNonNull(env.getProperty("JOB_TOPK_ARRAY")),
+ "count".equalsIgnoreCase(fn) ? "SUM" : fn, JobConfig.FIELD_DISCOVERY_TOPK_METRIC_PREFIX, value, fieldName,fieldName,fieldName, fn + "(" + column + ")", tableName, StrUtil.isBlankIfStr(filter) ? "" : " WHERE ".concat(filter), JobConfig.FIELD_DISCOVERY_TOPK_METRIC_PREFIX, value, engineConfigSource.getFieldDiscoveryTopK());
+ } else if (StrUtil.equalsIgnoreCase("bit", type)
+ || StrUtil.equalsIgnoreCase("bit", logicalType)) {
+ return String.format(Objects.requireNonNull(env.getProperty("JOB_TOPK_BIT")),
+ "count".equalsIgnoreCase(fn) ? "SUM" : fn, JobConfig.FIELD_DISCOVERY_TOPK_METRIC_PREFIX, value, fieldName, fn + "(" + column + ")", tableName, StrUtil.isBlankIfStr(filter) ? "" : " WHERE ".concat(filter), JobConfig.FIELD_DISCOVERY_TOPK_METRIC_PREFIX, value, engineConfigSource.getFieldDiscoveryTopK());
+ }
}
-
}
return String.format(Objects.requireNonNull(env.getProperty("JOB_TOPK_DEFAULT")),
- fieldName, fn, column, value, tableName, StrUtil.isBlankIfStr(filter) ? "" : " WHERE ".concat(filter), value);
- }
-
- private void updateResult(String id, String field, long currentTotalMetric, Object lastTotalMetric, String value, List<Map<String, Object>> topList) {
- Map<String, Object> map = Maps.newHashMap();
- map.put("topk", topList);
- map.put("distinct_count", Math.min(topList.size(), 100));
- map.put(value, currentTotalMetric + Long.parseLong(lastTotalMetric.toString()));
- String result = JSONUtil.quote(JSONUtil.toJsonStr(map), false).replace("'", "\\'");
- executeUpdate(String.format(Objects.requireNonNull(env.getProperty("JOB_UPDATE_FIELD_VALUE_SINGLE_QUOTATION")), JobConfig.FIELD_DISCOVERY, field, id, result));
+ fieldName, fn, column, JobConfig.FIELD_DISCOVERY_TOPK_METRIC_PREFIX, value, tableName, StrUtil.isBlankIfStr(filter) ? "" : " WHERE ".concat(filter), JobConfig.FIELD_DISCOVERY_TOPK_METRIC_PREFIX, value, engineConfigSource.getFieldDiscoveryTopK());
}
@Override
diff --git a/src/main/java/com/mesalab/services/service/impl/UnstructuredServiceImpl.java b/src/main/java/com/mesalab/services/service/impl/UnstructuredServiceImpl.java
deleted file mode 100644
index e3fad8d0..00000000
--- a/src/main/java/com/mesalab/services/service/impl/UnstructuredServiceImpl.java
+++ /dev/null
@@ -1,222 +0,0 @@
-package com.mesalab.services.service.impl;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.alibaba.fastjson2.JSON;
-import com.clearspring.analytics.util.Lists;
-import com.google.common.base.Stopwatch;
-import com.google.common.collect.Maps;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.entity.BaseResultGenerator;
-import com.mesalab.common.enums.QueryFormatEnum;
-import com.mesalab.common.enums.ResultCodeEnum;
-import com.mesalab.common.enums.ResultStatusEnum;
-import com.mesalab.qgw.constant.QGWMessageConst;
-import com.mesalab.qgw.exception.QGWBusinessException;
-import com.mesalab.qgw.model.basic.QueryProfile;
-import com.mesalab.qgw.model.basic.ClickHouseHttpSource;
-import com.mesalab.qgw.model.basic.EngineConfigSource;
-import com.mesalab.qgw.service.QueryService;
-import com.mesalab.qgw.service.MetadataService;
-import com.mesalab.services.common.dsl.ComDSLObject;
-import com.mesalab.services.common.enums.UnstructuredDataType;
-import com.mesalab.services.common.property.SqlPropertySourceFactory;
-import com.mesalab.services.service.UnstructuredService;
-import com.geedgenetworks.utils.StringUtil;
-import org.apache.avro.Schema;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.EnvironmentAware;
-import org.springframework.context.annotation.PropertySource;
-import org.springframework.core.env.Environment;
-import org.springframework.stereotype.Service;
-
-
-import java.util.*;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.TimeUnit;
-import java.util.function.Function;
-import java.util.function.Predicate;
-import java.util.stream.Collectors;
-
-@Service("UnstructuredService")
-@PropertySource(value = "classpath:http-sql-template.sql", factory = SqlPropertySourceFactory.class)
-public class UnstructuredServiceImpl implements UnstructuredService, EnvironmentAware {
-
- private static final Log log = LogFactory.get();
- private final static Map<String, String> fileType = Maps.newHashMap();
- private static final String COMMONFILE = "common";
- private static final String[] tables = new String[]{"session_record", "security_event", "transaction_record"};
- static {
- fileType.put("common_packet_capture_file", UnstructuredDataType.PCAP.getType());
- fileType.put("rtp_pcap_path", UnstructuredDataType.PCAP.getType());
- fileType.put("packet_url", UnstructuredDataType.PCAP.getType());
- fileType.put("mail_eml_file", UnstructuredDataType.MAIL.getType());
- fileType.put("http_request_body", UnstructuredDataType.HTTP.getType());
- fileType.put("http_response_body", UnstructuredDataType.HTTP.getType());
- }
-
- @Autowired
- private MetadataService metadataService;
- @Autowired
- private ClickHouseHttpSource clickHouseHttpSource;
- @Autowired
- private EngineConfigSource engineConfigSource;
- @Autowired
- private QueryService queryService;
- private Environment env;
-
- @Override
- public Map<String, Object> getUnstructuredFields() {
- Map<String, Object> fileTypeInSchema = Maps.newHashMap();
- List<String> allTable = metadataService.getAllTable();
- for (String tableName : allTable) {
- Schema schema = metadataService.getSchemaByName(tableName);
- Map schemaMap = JSON.parseObject(schema.toString(), Map.class);
- List<String> fieldList = new ArrayList<>();
- if (!clickHouseHttpSource.getDbName().equals(schema.getNamespace())) continue;
- List<Map> fields = (List<Map>) schemaMap.get("fields");
- for (Map field : fields) {
- Map schemaDoc = JSON.parseObject(field.get("doc").toString(), Map.class);
- if (schemaDoc.containsKey("constraints") && schemaDoc.get("visibility").equals("enabled")) {
- Map<String, String> SchemaConstraints = (Map<String, String>) schemaDoc.get("constraints");
- String typeStr = SchemaConstraints.containsKey("type") ? SchemaConstraints.get("type") : StringUtil.EMPTY;
- if (typeStr.equals("file") || typeStr.equals("files")) {
- fieldList.add((String) field.get("name"));
- }
- }
- }
-
- Map<String, List<String>> fileTypeMap = Maps.newHashMap();
- if (fieldList.size() != 0) {
- for (String field : fieldList) {
- String fileTypeStr = StringUtil.isNotEmpty(fileType.get(field)) ? fileType.get(field).toString() : COMMONFILE;
- if (fileTypeMap.containsKey(fileTypeStr)) {
- List<String> preList = fileTypeMap.get(fileTypeStr);
- preList.add(field);
- fileTypeMap.put(fileTypeStr, preList);
- } else {
- List<String> newList = new ArrayList<>();
- newList.add(field);
- fileTypeMap.put(fileTypeStr, newList);
- }
- }
- }
- if (!fileTypeMap.isEmpty()) {
- fileTypeInSchema.put(tableName, fileTypeMap);
- }
- }
- return fileTypeInSchema;
- }
-
- @Override
- public BaseResult getUnstructuredData(String param, ComDSLObject dslObject) {
- Stopwatch watch = Stopwatch.createStarted();
- List<Map> dataList = Lists.newArrayList();
- String dataSource = dslObject.getQuery().getDataSource();
- validationDataSource(dataSource);
- Map<String, Object> fileTypeInSchema = getUnstructuredFields();
- if (!fileTypeInSchema.containsKey(dataSource) && !dataSource.equals(clickHouseHttpSource.getDbName())) {
- return build(dataList, watch);
- }
- int limit = StringUtil.isBlank(dslObject.getQuery().getLimit()) ? engineConfigSource.getDefaultResultNum() : Integer.parseInt(dslObject.getQuery().getLimit());
- if (!param.equalsIgnoreCase(UnstructuredDataType.ALL.getType())) {
- if (!dataSource.equals(clickHouseHttpSource.getDbName())) {
- dataList = getData(fileTypeInSchema, limit, param, dslObject, dataSource);
- } else {
- for (String tableName : fileTypeInSchema.keySet()) {
- dataList.addAll(getData(fileTypeInSchema, limit, param, dslObject, tableName));
- }
- }
- } else {
- if (!dataSource.equals(clickHouseHttpSource.getDbName())) {
- Map<String, List<String>> fileType = (Map<String, List<String>>) fileTypeInSchema.get(dataSource);
- for (String s : fileType.keySet()) {
- dataList.addAll(getData(fileTypeInSchema, limit, s, dslObject, dataSource));
- }
- } else {
- for (String tableName : fileTypeInSchema.keySet()) {
- Map<String, Object> fileType = (Map<String, Object>) fileTypeInSchema.get(tableName);
- for (String s : fileType.keySet()) {
- dataList.addAll(getData(fileTypeInSchema, limit, s, dslObject, tableName));
- }
- }
- }
- }
-
- List<Map> distinctList = dataList.stream()
- .filter(distinctByValue(x -> x.get("file_path")))
- .collect(Collectors.toList());
-
- distinctList = distinctList.size() > limit ? distinctList.subList(0, Integer.parseInt(dslObject.getQuery().getLimit())) : distinctList;
- return build(distinctList, watch);
- }
-
- private BaseResult build(List<Map> dataList, Stopwatch watch) {
- BaseResult baseResult;
- Map statistics = Maps.newHashMap();
- statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS));
- statistics.put("result_rows", dataList.size());
- statistics.put("result_bytes", dataList.toString().getBytes().length);
- baseResult = BaseResultGenerator.success("ok", dataList, statistics);
- return baseResult;
- }
-
- private List<Map> getData(Map<String, Object> fileTypeMap, int limit, String param, ComDSLObject dslObject, String dataSource) {
- List<Map> dataResult = Lists.newArrayList();
- Map<String, List<String>> tableFileType = (Map<String, List<String>>) fileTypeMap.get(dataSource);
- if (tableFileType.containsKey(param)) {
- List<String> listFields = tableFileType.get(param);
- for (String field : listFields) {
- String sql = String.format(
- Arrays.asList(tables).contains(dataSource) ? Objects.requireNonNull(env.getProperty("UNSTRUCTURED_DATA_PATH")) : Objects.requireNonNull(env.getProperty("UNSTRUCTURED_DATA_PATH_OLD"))
- , field, dataSource
- , generateWhereOfTime(Arrays.asList(tables).contains(dataSource) ? "recv_time" : "common_recv_time", dslObject.getQuery())
- , field, limit);
- BaseResult result = queryService.executeQuery(queryBuild(sql));
- if (!result.isSuccess()) {
- log.error("Query unstructured data status error: {}", result.getMessage());
- throw new QGWBusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.UNKNOWN_EXCEPTION.getCode(), "Query unstructured data status error: " + result.getMessage());
- }
- List<Map> datalist = (List<Map>) result.getData();
- for (Map data : datalist) {
- data.put("log_type", dataSource);
- dataResult.add(data);
- }
- }
- }
- return dataResult;
- }
-
- private String generateWhereOfTime(String timeKey, ComDSLObject.Query query) {
- String[] intervals = getIntervals(query.getParameters().getIntervals());
- String whereOfTime = timeKey + " >= UNIX_TIMESTAMP('" + intervals[0] + "') AND " + timeKey + " < UNIX_TIMESTAMP('" + intervals[1] + "')";
- return whereOfTime;
- }
-
- private String[] getIntervals(List<String> intervals) {
- return intervals.get(0).split("/");
- }
-
- private static <T> Predicate<T> distinctByValue(Function<? super T, Object> keyExtractor) {
- Map<Object, Boolean> seen = new ConcurrentHashMap<>();
- return t -> seen.putIfAbsent(keyExtractor.apply(t), Boolean.TRUE) == null;
- }
-
-
- private QueryProfile queryBuild(String sql) {
- return QueryProfile.builder().format(QueryFormatEnum.JSON.getValue()).query(sql).build();
- }
-
- private void validationDataSource(String dataSource) {
- List<String> allTable = metadataService.getAllTable();
- if (!allTable.contains(dataSource) && !dataSource.equals(clickHouseHttpSource.getDbName())) {
- throw new QGWBusinessException(ResultStatusEnum.BAD_REQUEST.getCode(), ResultCodeEnum.PARAMETER_ERROR.getCode(),
- String.format(ResultCodeEnum.PARAMETER_ERROR.getMessage(), QGWMessageConst.DATASOURCE_NOT_EXIST));
- }
- }
-
- @Override
- public void setEnvironment(Environment environment) {
- this.env = environment;
- }
-}
diff --git a/src/main/resources/dsl-sql-template.sql b/src/main/resources/dsl-sql-template.sql
index 4533a2bc..9906ba88 100644
--- a/src/main/resources/dsl-sql-template.sql
+++ b/src/main/resources/dsl-sql-template.sql
@@ -4,9 +4,6 @@ SELECT SUM(asymmetric_c2s_flows + asymmetric_s2c_flows) AS asymmetric_flows, SUM
#sql("APPLICATION_AND_PROTOCOL_SUMMARY_TRAFFIC_STAT")
SELECT SUM(fragmentation_packets) AS fragmentation_packets, SUM(total_bytes) AS total_bytes, SUM(total_packets) AS total_packets, SUM(total_sessions) AS total_sessions, AVG(total_bytes) * 8 / $granularity_seconds AS data_rate FROM ( SELECT SUM(c2s_fragments + s2c_fragments) AS fragmentation_packets, SUM(c2s_bytes + s2c_bytes) AS total_bytes, SUM(c2s_pkts + s2c_pkts) AS total_packets, SUM(sessions) AS total_sessions FROM $table WHERE $intervals_and_filter AND protocol_stack_id = 'ETHERNET' GROUP BY TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time), '$granularity')) LIMIT 1
#end
-#sql("APPLICATION_AND_PROTOCOL_SUMMARY_UNIQUE_CLIENT_IP_STAT")
-SELECT APPROX_COUNT_DISTINCT_DS_HLL(client_ip_sketch) AS uniq_client_ip FROM $table WHERE $intervals_and_filter AND protocol_stack_id = 'ETHERNET' LIMIT 1
-#end
#sql("APPLICATION_AND_PROTOCOL_SUMMARY_TCP_STAT")
SELECT SUM(c2s_tcp_retransmitted_pkts + s2c_tcp_retransmitted_pkts) AS tcp_retransmissions_packets, SUM(c2s_pkts + s2c_pkts) AS tcp_total_packets FROM $table WHERE $intervals_and_filter AND RIGHT(protocol_stack_id, 4) = '.TCP' LIMIT 1
#end
@@ -17,23 +14,38 @@ SELECT SUM(c2s_bytes + s2c_bytes) as unknown_app_bytes FROM $table WHERE $interv
SELECT protocol_stack_id, SUM(sessions) as sessions,SUM(c2s_bytes) as c2s_bytes, SUM(c2s_pkts) as c2s_pkts, SUM(s2c_bytes) as s2c_bytes, SUM(s2c_pkts) as s2c_pkts FROM $table WHERE $intervals_and_filter GROUP BY protocol_stack_id
#end
#sql("APPLICATION_AND_PROTOCOL_TREE_THROUGHPUT")
-SELECT FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time), '$granularity', 'zero')) as stat_time, protocol_stack_id as type, sum(c2s_bytes + s2c_bytes) as bytes from $table where $intervals_and_filter group by FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time), '$granularity', 'zero')), protocol_stack_id order by stat_time asc
+SELECT FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time), '$granularity', 'zero')) as stat_time, protocol_stack_id as type, sum(c2s_bytes + s2c_bytes) as bytes, SUM(c2s_bytes + s2c_bytes) * 8 / $granularity_seconds AS bit_rate from $table where $intervals_and_filter group by FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time), '$granularity', 'zero')), protocol_stack_id order by stat_time asc
#end
#sql("APPLICATION_AND_PROTOCOL_TOP_APPS")
-SELECT app_name as app_name, SUM(bytes) as bytes, SUM(sessions) as sessions, MAX(bytes_rate) as max_rate, AVG(bytes_rate) as avg_rate FROM (SELECT app_name as app_name, SUM(sessions) as sessions, SUM(c2s_bytes + s2c_bytes) as bytes, SUM(c2s_bytes + s2c_bytes) * 8 / $granularity_seconds as bytes_rate FROM $table WHERE $intervals_and_filter AND notEmpty(app_name) GROUP BY app_name ORDER BY bytes DESC ) GROUP BY app_name ORDER BY bytes DESC $limit
+SELECT app_name as app_name, SUM(bytes) as bytes, SUM(sessions) as sessions, MAX(bit_rate) as max_rate, SUM(bytes) * 8/$interval_seconds as avg_rate FROM (SELECT app_name as app_name, SUM(sessions) as sessions, SUM(c2s_bytes + s2c_bytes) as bytes, SUM(c2s_bytes + s2c_bytes) * 8 / $granularity_seconds as bit_rate FROM $table WHERE $intervals_and_filter AND notEmpty(app_name) GROUP BY app_name, TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time), '$granularity') ORDER BY bytes DESC ) GROUP BY app_name ORDER BY bytes DESC $limit
#end
#sql("APPLICATION_AND_PROTOCOL_APP_RELATED_INTERNAL_IPS")
SELECT if(bitAnd(flags, 8) = 8, client_ip, server_ip) AS ip, SUM(sent_bytes + received_bytes) AS bytes FROM $table WHERE $intervals_and_filter GROUP BY ip ORDER BY bytes DESC $limit
#end
#sql("APPLICATION_AND_PROTOCOL_APP_THROUGHPUT")
-SELECT FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time),'$granularity','zero')) AS stat_time, app_name, SUM(c2s_bytes + s2c_bytes) AS bytes FROM $table WHERE $intervals_and_filter GROUP BY FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time),'$granularity','zero')), app_name ORDER BY stat_time ASC
+SELECT FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time),'$granularity','zero')) AS stat_time, app_name, SUM(c2s_bytes + s2c_bytes) AS bytes, SUM(c2s_bytes + s2c_bytes) * 8 / $granularity_seconds AS bit_rate FROM $table WHERE $intervals_and_filter GROUP BY FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time),'$granularity','zero')), app_name ORDER BY stat_time ASC
#end
#sql("APPLICATION_AND_PROTOCOL_APP_SUMMARY")
SELECT app_name, SUM(sessions) AS sessions, SUM(c2s_bytes + s2c_bytes) AS bytes, SUM(s2c_bytes) AS received_bytes, SUM(c2s_bytes) AS sent_bytes, SUM(c2s_pkts + s2c_pkts) AS packets, SUM(c2s_pkts) AS sent_packets, SUM(s2c_pkts) AS received_packets FROM $table WHERE $intervals_and_filter GROUP BY app_name
#end
-#sql("REAL_TIME_DATA_ANALYTICS_SUBSCRIBER_ID_RELATE_IP")
-SELECT account AS SUBSCRIBER_ID, framed_ip AS IP, first_found_time AS FIRST_FOUND_TIME, last_update_time AS LAST_FOUND_TIME, vsys_id as VSYS_ID FROM $table WHERE $intervals_and_filter AND acct_status_type != 2 $limit
+#sql("DATAPATH_TELEMETRY_RECORD")
+SELECT * FROM $table WHERE $intervals_and_filter ORDER BY timestamp_us ASC
+#end
+#sql("TRAFFIC_SPECTRUM_SUMMARY")
+SELECT direction, SUM(bytes) AS total_bytes, SUM(sessions) AS total_sessions, SUM(pkts) AS total_packets, SUM(IF(app = 'unknown', bytes, 0)) AS unknown_app_bytes, SUM(asymmetric_c2s_flows + asymmetric_s2c_flows) AS asymmetric_flows, SUM(c2s_fragments + s2c_fragments) AS fragmentation_packets FROM $table WHERE $intervals_and_filter GROUP BY direction
+#end
+#sql("TRAFFIC_SPECTRUM_UNIQ_IP")
+SELECT COUNT(DISTINCT(IF(direction = 'Outbound', client_ip, NULL))) AS internal_uniq_client_ip, COUNT(DISTINCT(IF(direction = 'Outbound', server_ip, NULL))) AS external_uniq_server_ip, COUNT(DISTINCT(IF(direction = 'Inbound', server_ip, NULL))) AS internal_uniq_server_ip, COUNT(DISTINCT(IF(direction = 'Inbound', client_ip, NULL))) AS external_uniq_client_ip FROM $table WHERE $intervals_and_filter
+#end
+#sql("TRAFFIC_SPECTRUM_APP_DISTRIBUTION_SERVER_IP")
+SELECT server_ip as value, SUM(bytes) as bytes, SUM(sessions) as sessions, SUM(pkts) as packets FROM $table WHERE $intervals_and_filter GROUP BY server_ip ORDER BY bytes DESC $limit
+#end
+#sql("TRAFFIC_SPECTRUM_APP_DISTRIBUTION_SERVER_DOMAIN")
+SELECT server_domain as value, SUM(bytes) as bytes, SUM(sessions) as sessions, SUM(pkts) as packets FROM $table WHERE $intervals_and_filter AND notEmpty(server_domain) GROUP BY server_domain ORDER BY bytes DESC $limit
+#end
+#sql("TRAFFIC_SPECTRUM_CLIENT_IP_CONNECT_APPLICATION_USAGE")
+SELECT direction, SUM(temp_sessions) AS sessions, SUM(temp_bytes) AS bytes, SUM(temp_packets) AS packets, MAX(temp_rate) AS max_rate, ROUND(SUM(temp_bytes) * 8 /$interval_seconds) AS avg_rate, client_ip AS client_ip, client_country AS client_country, app AS app, app_category AS app_category FROM( SELECT toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(recv_time), INTERVAL $granularity_seconds SECOND))) AS stat_time, IF(client_ip IN ( SELECT client_ip FROM $table WHERE $intervals_and_filter AND notEmpty(app) GROUP BY client_ip,app ORDER BY SUM(bytes) DESC $limit), client_ip, 'Other') AS client_ip, IF(client_country IN ( SELECT anyLast(client_country) FROM $table WHERE $intervals_and_filter AND notEmpty(app) GROUP BY client_ip,app ORDER BY SUM(bytes) DESC $limit), client_country, 'Other') AS client_country, IF(app IN ( SELECT app FROM $table WHERE $intervals_and_filter AND notEmpty(app) GROUP BY client_ip,app ORDER BY SUM(bytes) DESC $limit), app, 'Other') AS app, IF(app_category IN ( SELECT anyLast(app_category) FROM $table WHERE $intervals_and_filter AND notEmpty(app) GROUP BY client_ip,app ORDER BY SUM(bytes) DESC $limit), app_category, 'Other') AS app_category, direction, SUM(sessions) AS temp_sessions, SUM(bytes) AS temp_bytes, SUM(pkts) AS temp_packets, ROUND(SUM(bytes) * 8 / $granularity_seconds) AS temp_rate FROM $table WHERE $intervals_and_filter GROUP BY stat_time, client_ip, client_country, app, app_category, direction) GROUP BY client_ip, client_country, app, app_category, direction ORDER BY bytes DESC
#end
-#sql("REAL_TIME_DATA_ANALYTICS_MOBILE_IDENTITY_RELATE_TEID")
-SELECT uplink_teid AS teid, apn, phone_number, imsi, imei, last_update_time,vsys_id FROM $table WHERE $intervals_and_filter AND msg_type = 1 and uplink_teid !=0 order by last_update_time desc $limit
+#sql("TRAFFIC_SPECTRUM_NETWORK_THROUGHPUT_TREND")
+SELECT FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(recv_time, '$granularity', 'zero')) AS stat_time, ROUND(RATE(bytes,$granularity_seconds,1)) * 8 AS avg_bits_per_sec, ROUND(RATE(bytes,$granularity_seconds,1)) AS avg_bytes_per_sec, ROUND(RATE(pkts,$granularity_seconds,1)) AS avg_pkts_per_sec, ROUND(RATE(sessions,$granularity_seconds,1)) AS avg_sessions_per_sec FROM $table WHERE $intervals_and_filter GROUP BY stat_time ORDER BY stat_time ASC $limit
#end \ No newline at end of file
diff --git a/src/main/resources/dsl-validation.json b/src/main/resources/dsl-validation.json
index f54eb056..715ac00f 100644
--- a/src/main/resources/dsl-validation.json
+++ b/src/main/resources/dsl-validation.json
@@ -1,12 +1,11 @@
{
+ "$schema": "http://json-schema.org/draft-07/schema#",
"title": "服务推荐",
"description": "查询",
"type": "object",
"properties": {
"clientId": {
"description": "唯一标识符",
- "maximum": 2147483647,
- "minimum": -2147483648,
"type": [
"null",
"integer"
@@ -21,12 +20,10 @@
"properties": {
"queryType": {
"description": "查询类型",
- "maxLength": 65535,
"type": "string"
},
"dataSource": {
"description": "查询数据源",
- "maxLength": 65535,
"type": "string"
},
"parameters": {
@@ -47,19 +44,16 @@
"properties": {
"type": {
"description": "匹配符号",
- "maxLength": 65535,
"type": "string"
},
"fieldKey": {
"description": "查询字段",
- "maxLength": 65535,
"type": "string"
},
"fieldValues": {
"description": "查询参数",
"type": "array",
"items": {
- "maxLength": 65535,
"type": "string"
}
}
@@ -76,19 +70,16 @@
"properties": {
"type": {
"description": "匹配符号",
- "maxLength": 65535,
"type": "string"
},
"fieldKey": {
"description": "查询字段",
- "maxLength": 65535,
"type": "string"
},
"fieldValues": {
"description": "查询参数",
"type": "array",
"items": {
- "maxLength": 65535,
"type": "string"
}
}
@@ -108,21 +99,16 @@
"properties": {
"type": {
"description": "匹配符号",
- "maxLength": 65535,
"type": "string"
},
"fieldKey": {
"description": "查询字段",
- "maxLength": 65535,
"type": "string"
},
"fieldValues": {
"description": "查询参数",
"type": "array",
"items": {
- "maximum": 2147483647,
- "minimum": -2147483648,
- "maxLength": 65535,
"type": [
"integer",
"string"
@@ -141,21 +127,16 @@
"properties": {
"type": {
"description": "匹配符号",
- "maxLength": 65535,
"type": "string"
},
"fieldKey": {
"description": "查询字段",
- "maxLength": 65535,
"type": "string"
},
"fieldValues": {
"description": "查询参数",
"type": "array",
"items": {
- "maximum": 2147483647,
- "minimum": -2147483648,
- "maxLength": 65535,
"type": [
"integer",
"string"
@@ -169,11 +150,9 @@
"description": "查询时间",
"type": "array",
"items": {
- "maxLength": 65535,
"type": "string"
},
"additionalItems": {
- "maxLength": 65535,
"type": "string"
}
},
@@ -186,12 +165,10 @@
"properties": {
"type": {
"description": "匹配符号",
- "maxLength": 65535,
"type": "string"
},
"fieldKey": {
"description": "排序字段",
- "maxLength": 65535,
"type": "string"
}
}
@@ -202,12 +179,10 @@
"properties": {
"type": {
"description": "匹配符号",
- "maxLength": 65535,
"type": "string"
},
"fieldKey": {
"description": "排序字段",
- "maxLength": 65535,
"type": "string"
}
}
@@ -215,9 +190,6 @@
},
"limit": {
"description": "查询条数",
- "maxLength": 65535,
- "maximum": 2147483647,
- "minimum": 0,
"type": [
"integer",
"string"
diff --git a/src/main/resources/http-sql-template.sql b/src/main/resources/http-sql-template.sql
index c81bfb98..4be71868 100644
--- a/src/main/resources/http-sql-template.sql
+++ b/src/main/resources/http-sql-template.sql
@@ -1,109 +1,9 @@
-#sql("NETWORK_OVERVIEW_ASYMMETRIC_FLOWS_STAT")
-SELECT SUM(asymmetric_c2s_flows + asymmetric_s2c_flows) AS asymmetric_flows, SUM(closed_sessions) AS total_session_used_on_asymmetric_flows FROM traffic_general_stat WHERE %s %s LIMIT 1
-#end
-
-#sql("NETWORK_OVERVIEW_STAT")
-SELECT APPROX_COUNT_DISTINCT_DS_HLL(client_ip_sketch) AS uniq_client_ip, SUM(c2s_fragments + s2c_fragments) AS fragmentation_packets, SUM(c2s_bytes + s2c_bytes) AS total_bytes, SUM(c2s_pkts + s2c_pkts) AS total_packets, SUM(sessions) AS total_sessions, (SUM(c2s_bytes + s2c_bytes) * 8)/(%s -%s) AS data_rate FROM %s WHERE %s %s AND protocol_stack_id = '%s' LIMIT 1
-#end
-
-#sql("NETWORK_OVERVIEW_TCP_STAT")
-SELECT SUM(c2s_tcp_retransmitted_pkts + s2c_tcp_retransmitted_pkts) AS tcp_retransmissions_packets, SUM(c2s_pkts + s2c_pkts) AS tcp_total_packets FROM %s WHERE %s %s AND RIGHT(protocol_stack_id, 4) = '.TCP' LIMIT 1
-#end
-
-#sql("NETWORK_OVERVIEW_APP_STAT")
-SELECT SUM(c2s_bytes + s2c_bytes) as unknown_app_bytes FROM %s WHERE %s %s AND app_name = 'unknown' LIMIT 1
-#end
-
-#sql("PROTOCOL_TREE_SUMMARY")
-SELECT protocol_stack_id, SUM(sessions) as sessions,SUM(c2s_bytes) as c2s_bytes, SUM(c2s_pkts) as c2s_pkts, SUM(s2c_bytes) as s2c_bytes, SUM(s2c_pkts) as s2c_pkts FROM %s WHERE %s %s GROUP BY protocol_stack_id
-#end
-
-#sql("PROTOCOL_DATA_RATE_SUMMARY")
-(SELECT TIME_FORMAT(MILLIS_TO_TIMESTAMP( 1000 * TIME_FLOOR_WITH_FILL(TIMESTAMP_TO_MILLIS(__time)/1000, '%s', 'zero')), 'yyyy-MM-dd HH:mm:ss') as stat_time, protocol_stack_id as type, sum(c2s_bytes + s2c_bytes) as bytes from %s where %s %s and protocol_stack_id = '%s' group by TIME_FORMAT(MILLIS_TO_TIMESTAMP( 1000 * TIME_FLOOR_WITH_FILL(TIMESTAMP_TO_MILLIS(__time)/1000, '%s', 'zero')), 'yyyy-MM-dd HH:mm:ss'), protocol_stack_id order by stat_time asc)
-union all
-(SELECT TIME_FORMAT(MILLIS_TO_TIMESTAMP( 1000 * TIME_FLOOR_WITH_FILL(TIMESTAMP_TO_MILLIS(__time)/1000, '%s', 'zero')), 'yyyy-MM-dd HH:mm:ss') as stat_time, protocol_stack_id as type, sum(c2s_bytes + s2c_bytes) as bytes from %s where %s %s and protocol_stack_id like CONCAT('%s','.%s') and LENGTH(protocol_stack_id) = LENGTH(REPLACE(protocol_stack_id,'.','')) + 1 + %s group by TIME_FORMAT(MILLIS_TO_TIMESTAMP( 1000 * TIME_FLOOR_WITH_FILL(TIMESTAMP_TO_MILLIS(__time)/1000, '%s', 'zero')), 'yyyy-MM-dd HH:mm:ss'), protocol_stack_id order by stat_time asc)
-#end
-
-#sql("APP_DATA_SUMMARY")
-SELECT app_name as app_name, SUM(bytes) as bytes, SUM(sessions) as sessions, MAX(bytes_rate) as max_rate, AVG(bytes_rate) as avg_rate FROM (SELECT app_name as app_name, SUM(sessions) as sessions, SUM(c2s_bytes + s2c_bytes) as bytes, SUM(c2s_bytes + s2c_bytes) * 8 / %s as bytes_rate FROM %s WHERE %s %s AND notEmpty(app_name) GROUP BY app_name ORDER BY bytes DESC ) GROUP BY app_name ORDER BY bytes DESC %s
-#end
-
-#sql("APP_INTERNAL_IP_SUMMARY")
-SELECT if(bitAnd(flags, 8) = 8, client_ip, server_ip) AS ip, SUM(sent_bytes + received_bytes) AS bytes FROM %s WHERE %s %s GROUP BY ip ORDER BY bytes DESC %s
-#end
-
-#sql("APP_DATA_RATE_SUMMARY")
-SELECT TIME_FORMAT(MILLIS_TO_TIMESTAMP( 1000 * TIME_FLOOR_WITH_FILL(TIMESTAMP_TO_MILLIS(__time)/ 1000, '%s', 'zero')), 'yyyy-MM-dd HH:mm:ss') as stat_time, app_name , sum(c2s_bytes + s2c_bytes) as bytes from %s where %s %s group by TIME_FORMAT(MILLIS_TO_TIMESTAMP( 1000 * TIME_FLOOR_WITH_FILL(TIMESTAMP_TO_MILLIS(__time)/ 1000, '%s', 'zero')), 'yyyy-MM-dd HH:mm:ss'), app_name order by stat_time asc
-#end
-
-#sql("APP_TRAFFIC_SUMMARY")
-SELECT app_name, SUM(sessions) as sessions, sum(c2s_bytes + s2c_bytes) as bytes, sum(s2c_bytes) as received_bytes, sum(c2s_bytes) as sent_bytes, sum(c2s_pkts + s2c_pkts) as packets, sum(c2s_pkts) as sent_packets, sum(s2c_pkts) as received_packets from %s where %s %s group by app_name
-#end
-
-#sql("ENTITY_ACTIVE_CLIENT_IP")
-select client_ip as client_ip, vsys_id as vsys_id from %s where %s %s AND notEmpty(client_ip) GROUP BY client_ip, app, vsys_id ORDER BY COUNT(1) DESC LIMIT %s
-#end
-
-#sql("ENTITY_UDP_SESSION")
-select server_ip as server_ip,vsys_id as vsys_id ,COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'UDP') and server_port in (53 , 443) group by server_ip,vsys_id order by sessions desc limit %s
-#end
-
-#sql("ENTITY_UDP_UNIQ_CLIENT_IPS")
-select server_ip as server_ip, vsys_id as vsys_id from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'UDP') group by server_ip,vsys_id order by COUNT(DISTINCT(client_ip)) desc limit %s
-#end
-
-#sql("ENTITY_TCP_SESSION")
-select server_ip as server_ip,vsys_id as vsys_id ,COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'TCP') group by server_ip,vsys_id order by sessions desc limit %s
-#end
-
-#sql("ENTITY_TCP_UNIQ_CLIENT_IPS")
-select server_ip as server_ip, vsys_id as vsys_id from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'TCP') group by server_ip, vsys_id order by COUNT(DISTINCT(client_ip)) desc limit %s
-#end
-
-#sql("TOP_ENTITY_TCP_UNIQ_CLIENT_IPS")
-select COUNT(DISTINCT(client_ip)) as client_ips from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'TCP') AND server_ip in (select server_ip from %s as cc where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'TCP') group by server_ip order by COUNT(DISTINCT(client_ip)) desc limit %s)
-#end
-
-#sql("TOP_ENTITY_UDP_UNIQ_CLIENT_IPS")
-select COUNT(DISTINCT(client_ip)) as client_ips from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'UDP') AND server_ip in (select server_ip from %s as cc where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'UDP') group by server_ip order by COUNT(DISTINCT(client_ip)) desc limit %s)
-#end
-
-#sql("TOTAL_ENTITY_UDP_SESSION")
-select COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'UDP') and server_port in (53 , 443) limit 1
-#end
-
-#sql("TOTAL_ENTITY_UDP_UNIQ_CLIENT_IPS")
-select COUNT(DISTINCT(client_ip)) as uniq_client_ips from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'UDP') limit 1
-#end
-
-#sql("TOTAL_ENTITY_TCP_SESSION")
-select COUNT(1) as sessions from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'TCP') limit 1
-#end
-
-#sql("TOTAL_ENTITY_TCP_UNIQ_CLIENT_IPS")
-select COUNT(DISTINCT(client_ip)) as uniq_client_ips from %s where %s AND (arrayElement(splitByString('.',protocol_path),length(splitByString('.',protocol_path))) = 'TCP') limit 1
-#end
-
-#sql("ENTITY_TOP_SNI")
-select ssl_sni, SUM(sent_bytes + received_bytes) AS bytes, COUNT(1) AS sessions,vsys_id as vsys_id from %s where %s and notEmpty(ssl_sni) and decoded_as = 'SSL' group by ssl_sni,vsys_id order by COUNT(1) desc limit %s
-#end
-
-#sql("ENTITY_SNI_TOTAL")
-select SUM(sent_bytes + received_bytes) AS bytes, COUNT(1) AS sessions from %s where %s and decoded_as = 'SSL' limit 1
-#end
-
#sql("SYSTEM_STORAGE_QUOTA")
-SELECT type, SUM(used_size) as used_size, SUM(max_size) * 7 / 10 as max_size, TIME_FORMAT(MILLIS_TO_TIMESTAMP(ANY_VALUE(last_storage) * 1000), 'YYYY-MM-dd') as first_storage FROM
- (SELECT log_type as type, LATEST(used_size) as used_size, LATEST(max_size) as max_size, LATEST(last_storage) as last_storage FROM sys_storage_log WHERE __time >= CURRENT_TIMESTAMP - INTERVAL '1' HOUR AND data_center != '' GROUP BY data_center, log_type)
-GROUP BY type
+SELECT log_type AS type, SUM(used_size) AS used_size, SUM(total_allocated_size) AS total_allocated_size, MIN(since_time) AS since_time FROM (SELECT log_type, used_size, total_allocated_size, DATE(FROM_UNIXTIME(since_time)) AS since_time, ROW_NUMBER() OVER (PARTITION BY log_type ORDER BY generated_time DESC) AS row_num FROM sys_storage_event WHERE generated_time >= UNIX_TIMESTAMP(DATE_SUB(NOW(), INTERVAL 1 WEEK)) ) AS subquery WHERE row_num = 1 GROUP BY log_type
#end
#sql("SYSTEM_DAILY_TREND_OF_STORAGE")
-select TIME_FORMAT(__time, 'YYYY-MM-dd') as stat_time, log_type as type, sum(aggregate_size) as used_size from sys_storage_log where __time >= '%s' and __time < '%s' group by TIME_FORMAT(__time, 'YYYY-MM-dd'), log_type
-#end
-
-#sql("RELATION_SUBSCRIBER_ID")
-SELECT account AS SUBSCRIBER_ID,framed_ip AS IP,first_found_time AS FIRST_FOUND_TIME,last_update_time AS LAST_FOUND_TIME,vsys_id as VSYS_ID FROM tsg_galaxy.relation_account_framedip WHERE 1=1 %s AND acct_status_type != 2 LIMIT %s
+SELECT DATE(FROM_UNIXTIME(generated_time)) AS stat_time, log_type AS type, sum( bytes ) AS used_size FROM sys_storage_event WHERE generated_time >= UNIX_TIMESTAMP('%s') AND generated_time < UNIX_TIMESTAMP('%s') GROUP BY stat_time, type ORDER BY stat_time ASC
#end
#sql("SCHEMA_STORAGE_SIZE")
@@ -114,25 +14,10 @@ SELECT name AS field, IFNULL(SUM(data_compressed_bytes), 0) AS bytes FROM column
SELECT name, engine_full AS table_ttl FROM tables WHERE name ='%s_local' AND "database" = '%s' AND "engine" ='MergeTree'
#end
-#sql("GTPC_KNOWLEDGE_BASE")
-SELECT uplink_teid AS teid, apn, phone_number, imsi, imei, last_update_time,vsys_id FROM tsg_galaxy.gtpc_knowledge_base WHERE msg_type = 1 and uplink_teid !=0 %s order by last_update_time desc limit %s
-#end
-
#sql("TABLE_INDEX_KEY")
SELECT any(splitByString(', ',sorting_key)) AS index_key FROM tables_cluster where name = '%s_local' AND "database" = '%s'
#end
-#sql("UNSTRUCTURED_DATA_PATH")
-SELECT log_id AS log_id,%s AS file_path FROM %s where %s AND notEmpty(%s) ORDER BY recv_time DESC LIMIT %s
-#end
-
-#sql("UNSTRUCTURED_DATA_PATH_OLD")
-SELECT common_log_id AS log_id,%s AS file_path FROM %s where %s AND notEmpty(%s) ORDER BY common_recv_time DESC LIMIT %s
-#end
-
#sql("SQL_DATASETS")
-SELECT id, identifier_name, category, execute_engine, type, template, description, generated_time, last_update_time FROM dataset %s ORDER BY last_update_time DESC
+SELECT id, identifier_name, category, backend_engine, type, template, description, generated_time, last_update_time FROM dataset %s ORDER BY last_update_time DESC
#end
-
-#sql("SQL_DATASETS_CATEGORY")
-SELECT category FROM dataset GROUP BY category
diff --git a/src/main/resources/job-sql-template.sql b/src/main/resources/job-sql-template.sql
index 36ef3c40..5be4a538 100644
--- a/src/main/resources/job-sql-template.sql
+++ b/src/main/resources/job-sql-template.sql
@@ -17,13 +17,13 @@ SELECT count(*) AS logCount, %s AS totalMetric FROM %s %s LIMIT 1
UPSERT INTO "tsg_galaxy"."job_result"( ROWKEY, "detail"."is_done", "detail"."done_progress", "detail"."duration_time") VALUES('%s', %s, %s, %s)
#end
#sql("JOB_TOPK_DEFAULT")
-SELECT %s AS value, %s(%s) AS %s FROM %s %s GROUP BY value ORDER BY %s DESC limit 10000
+SELECT %s AS value, %s(%s) AS %s%s FROM %s %s GROUP BY value ORDER BY %s%s DESC limit %s
#end
#sql("JOB_TOPK_ARRAY")
-SELECT value, %s(metric) AS %s FROM (SELECT arrayJoin(items) AS value, metric FROM (SELECT %s AS items, %s AS metric FROM %s WHERE notEmpty(%s) %s GROUP BY items )) GROUP BY value ORDER BY %s DESC LIMIT 10000
+SELECT value, %s(metric) AS %s%s FROM (SELECT arrayJoin(items) AS value, metric FROM (SELECT IF(empty(%s), arrayPushBack(%s, NULL), %s) AS items, %s AS metric FROM %s %s GROUP BY items )) GROUP BY value ORDER BY %s%s DESC LIMIT %s
#end
#sql("JOB_TOPK_BIT")
-SELECT value, %s(metric) AS %s FROM (SELECT arrayJoin(items) AS value, metric FROM (SELECT bitmaskToArray(%s) AS items, %s AS metric FROM %s %s GROUP BY items )) GROUP BY value ORDER BY %s DESC LIMIT 10000
+SELECT value, %s(metric) AS %s%s FROM (SELECT arrayJoin(items) AS value, metric FROM (SELECT bitmaskToArray(%s) AS items, %s AS metric FROM %s %s GROUP BY items )) GROUP BY value ORDER BY %s%s DESC LIMIT %s
#end
#sql("JOB_UPDATE_FIELD_VALUE_SINGLE_QUOTATION")
UPSERT into "tsg_galaxy"."job_result" (ROWKEY, "%s"."%s") values('%s', '%s')
@@ -50,7 +50,7 @@ SELECT %s FROM %s WHERE %s LIMIT 1
INSERT INTO saved_query_job (job_id, query_sql, state, done_progress, is_failed, result_message, elapsed, rows_read, bytes_read, result_rows, result_bytes, is_valid, start_time, end_time, last_update_time, generated_time ) VALUES ('%s', '%s', 'PENDING', 0, 0, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, '%s', '%s')
#end
#sql("SAVED_QUERY_JOB_STATUS")
-SELECT state, done_progress, is_failed, is_valid, start_time, end_time, rows_read, query_sql, job_id FROM saved_query_job WHERE job_id IN ('%s') LIMIT %s
+SELECT state, done_progress, is_failed, result_message, is_valid, start_time, end_time, rows_read, query_sql, job_id FROM saved_query_job WHERE job_id IN ('%s') LIMIT %s
#end
#sql("SAVED_QUERY_JOB_SAME")
SELECT job_id AS id FROM saved_query_job WHERE is_valid = 1 AND state IN ( 'PENDING', 'RUNNING') AND is_failed = 0 AND query_sql = '%s' LIMIT 1
diff --git a/src/main/resources/schema-syntax-validation.json b/src/main/resources/schema-syntax-validation.json
new file mode 100644
index 00000000..2870eba3
--- /dev/null
+++ b/src/main/resources/schema-syntax-validation.json
@@ -0,0 +1,120 @@
+{
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "properties": {
+ "type": {
+ "enum": ["record","enum","array","map","union","fixed"]
+ },
+ "doc": {
+ "properties": {
+ "schema_query": {
+ "properties": {
+ "time": {
+ "items": {
+ "enum": [
+ "$ENUM_VALUES$"
+ ]
+ },
+ "uniqueItems": true
+ },
+ "dimensions": {
+ "items": {
+ "enum": [
+ "$ENUM_VALUES$"
+ ]
+ },
+ "uniqueItems": true
+ },
+ "metrics": {
+ "items": {
+ "enum": [
+ "$ENUM_VALUES$"
+ ]
+ },
+ "uniqueItems": true
+ },
+ "filters": {
+ "items": {
+ "enum": [
+ "$ENUM_VALUES$"
+ ]
+ },
+ "uniqueItems": true
+ },
+ "details": {
+ "patternProperties": {
+ ".*": {
+ "items": {
+ "enum": [
+ "$ENUM_VALUES$"
+ ]
+ },
+ "uniqueItems": true
+ }
+ }
+ }
+ }
+ },
+ "decoded_as": {
+ "patternProperties": {
+ ".*": {
+ "patternProperties": {
+ ".*": {
+ "items": {
+ "enum": [
+ "$ENUM_VALUES$"
+ ]
+ },
+ "uniqueItems": true
+ }
+ }
+ }
+ }
+ }
+ },
+ "message": {
+ "enum" : "{0}: described field not found in $.fields[*].name"
+ }
+ },
+ "fields": {
+ "items": {
+ "properties": {
+ "type": {
+ "oneOf": [
+ {
+ "type": "string",
+ "enum": ["null", "boolean", "int", "long", "float", "double", "bytes", "string"]
+ },
+ {
+ "type": "object",
+ "properties": {
+ "type": {
+ "enum": ["array", "null", "boolean", "int", "long", "float", "double", "bytes", "string"]
+ },
+ "items": {
+ "enum": ["null", "boolean", "int", "long", "float", "double", "bytes", "string"]
+ }
+ },
+ "required": ["type"]
+ }
+ ]
+ },
+ "doc": {
+ "required": [
+ "visibility"
+ ]
+ }
+ },
+ "required": [
+ "name",
+ "type"
+ ]
+ }
+ }
+ },
+ "required": [
+ "type",
+ "name",
+ "namespace",
+ "fields"
+ ]
+} \ No newline at end of file
diff --git a/src/test/java/com/mesalab/GalaxyQGWApplicationTests.java b/src/test/java/com/mesalab/GalaxyQGWApplicationTests.java
index 276c3fa3..fd89b068 100644
--- a/src/test/java/com/mesalab/GalaxyQGWApplicationTests.java
+++ b/src/test/java/com/mesalab/GalaxyQGWApplicationTests.java
@@ -9,6 +9,7 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.web.server.LocalServerPort;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
@@ -18,12 +19,15 @@ import java.util.Map;
import java.util.Objects;
@RunWith(SpringRunner.class)
-@SpringBootTest(classes = {GalaxyQGWApplication.class}, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT)
+@SpringBootTest(classes = {GalaxyQGWApplication.class}, webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
@EnableAutoConfiguration
@ActiveProfiles("test")
public class GalaxyQGWApplicationTests {
private static final Log log = LogFactory.get();
+ @LocalServerPort
+ protected int testPort;
+
static {
System.setProperty("jasypt.encryptor.password", "galaxy");
System.setProperty("JM.SNAPSHOT.PATH", "config");
diff --git a/src/test/java/com/mesalab/knowledge/JsonSchemaTest.java b/src/test/java/com/mesalab/knowledge/JsonSchemaTest.java
deleted file mode 100644
index ec78f262..00000000
--- a/src/test/java/com/mesalab/knowledge/JsonSchemaTest.java
+++ /dev/null
@@ -1,107 +0,0 @@
-package com.mesalab.knowledge;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.github.fge.jackson.JsonLoader;
-import com.github.fge.jsonschema.core.exceptions.ProcessingException;
-import com.github.fge.jsonschema.core.report.ProcessingMessage;
-import com.github.fge.jsonschema.core.report.ProcessingReport;
-import com.github.fge.jsonschema.main.JsonSchema;
-import com.github.fge.jsonschema.main.JsonSchemaFactory;
-import com.mesalab.GalaxyQGWApplicationTests;
-import org.apache.commons.lang3.Validate;
-import org.junit.Test;
-import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
-
-import java.io.IOException;
-import java.util.Iterator;
-
-/**
- * @description:
- * @author: zhq
- * @create: 2020-07-31
- **/
-@EnableAutoConfiguration
-public class JsonSchemaTest extends GalaxyQGWApplicationTests {
- private static final Log log = LogFactory.get();
-
- @Test
- public void jsonTest() {
- //创建jsonschema工厂
- String jsonStr = "{\n" +
- " \"clientId\": null,\n" +
- " \"query\": {\n" +
- " \"queryType\": \"iplearning\",\n" +
- " \"dataSource\": \"IP_LEARNING_VIEW\",\n" +
- " \"parameters\": {\n" +
- " \"match\": [\n" +
- " {\n" +
- " \"type\": \"substring\",\n" +
- " \"fieldKey\": \"FQDN_NAME\",\n" +
- " \"fieldValues\": \n" +
- " [\"360\"]\n" +
- " \n" +
- " }\n" +
- " ],\n" +
- " \"range\": [\n" +
- " {\n" +
- " \"type\": \"ge\",\n" +
- " \"fieldKey\": \"PROTOCOL\",\n" +
- " \"fieldValues\": [\n" +
- " \"HTTP\"\n" +
- " ]\n" +
- " },\n" +
- " {\n" +
- " \"type\": \"eq\",\n" +
- " \"fieldKey\": \"DEPTH\",\n" +
- " \"fieldValues\": [\n" +
- " 1\n" +
- " ]\n" +
- " },\n" +
- " {\n" +
- " \"type\": \"ge\",\n" +
- " \"fieldKey\": \"UNIQ_CIP\",\n" +
- " \"fieldValues\": \n" +
- " [5]\n" +
- " \n" +
- " }\n" +
- " ],\n" +
- " \"intervals\": [\n" +
- " \"2020-07-01 00:00:00/2020-08-02 00:00:00\"]\n" +
- " ,\n" +
- " \"limit\": \"15\"\n" +
- " }\n" +
- " }\n" +
- "}";
- try {
- JsonSchemaFactory factory = JsonSchemaFactory.byDefault();
- //通过jsonschemaFactory获取jsonnode对象
- JsonNode schemaNode = JsonLoader.fromResource("/dsl-validation.json");
- //通过jsonstr字符串获取对应的jsonnode对象
- JsonNode dataNode = JsonLoader.fromString(jsonStr);
- JsonSchema jsonSchema = factory.getJsonSchema(schemaNode);
- //使用json-schema-validator中的jsonschema对象的validate方法对数据进行校验
- //获取处理的报告信息
- ProcessingReport processingReport = jsonSchema.validate(dataNode);
- //获取完整的报告信息
- Iterator<ProcessingMessage> iterator = processingReport.iterator();
- StringBuffer sb = new StringBuffer();
- while (iterator.hasNext()) {
- ProcessingMessage next = iterator.next();
- JsonNode jsonNode = next.asJson();
- sb.append("pointer on ");
- sb.append(jsonNode.get("instance").get("pointer"));
- sb.append(", ");
- sb.append(next.getMessage());
- sb.append(". ");
- }
- //判断校验是否成功,如果为true成功
- Validate.isTrue(processingReport.isSuccess(), sb.toString());
- } catch (ProcessingException e) {
- log.error("ProcessingException information: ", e.getMessage());
- } catch (IOException e) {
- log.error("IOException information: ", e.getMessage());
- }
- }
-}
diff --git a/src/test/java/com/mesalab/qgw/service/ApplicationAndProtocolTest.java b/src/test/java/com/mesalab/qgw/service/ApplicationAndProtocolTest.java
new file mode 100644
index 00000000..dffa9959
--- /dev/null
+++ b/src/test/java/com/mesalab/qgw/service/ApplicationAndProtocolTest.java
@@ -0,0 +1,113 @@
+package com.mesalab.qgw.service;
+
+import com.mesalab.GalaxyQGWApplicationTests;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.qgw.controller.QueryController;
+import com.mesalab.qgw.model.basic.DSLQueryRequestParam;
+import org.junit.Assert;
+import org.junit.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+import org.springframework.web.context.request.async.DeferredResult;
+
+import java.util.Map;
+
+/**
+ * @Author ww
+ * @Date 2024/1/30
+ */
+@EnableAutoConfiguration
+public class ApplicationAndProtocolTest extends GalaxyQGWApplicationTests {
+
+ @Autowired
+ private QueryController queryController;
+
+ @Test
+ public void testAppAndProtocolSummary() {
+ DSLQueryRequestParam body = jsonToInParameter("parameters/applicationAndProtocolTest.json", "application_and_protocol_summary", DSLQueryRequestParam.class);
+ BaseResult commitResult = queryController.createDSLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+ DeferredResult<BaseResult> getResult = new DeferredResult<>();
+ while (true) {
+ if (getResult.isSetOrExpired()) {
+ break;
+ }
+ getResult = queryController.getJobStatusById(id, 0);
+ }
+ BaseResult result = (BaseResult) getResult.getResult();
+ Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
+ }
+
+ @Test
+ public void testAppAndProtocolTreeComposition() {
+ DSLQueryRequestParam body = jsonToInParameter("parameters/applicationAndProtocolTest.json", "application_and_protocol_tree_composition", DSLQueryRequestParam.class);
+ BaseResult commitResult = queryController.createDSLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+ DeferredResult<BaseResult> getResult = new DeferredResult<>();
+ while (true) {
+ if (getResult.isSetOrExpired()) {
+ break;
+ }
+ getResult = queryController.getJobResultById(id, 0);
+ }
+ BaseResult result = (BaseResult) getResult.getResult();
+ Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
+ }
+
+ @Test
+ public void testAppAndProtocolTreeThroughput() {
+ DSLQueryRequestParam body = jsonToInParameter("parameters/applicationAndProtocolTest.json", "application_and_protocol_tree_throughput", DSLQueryRequestParam.class);
+ BaseResult commitResult = queryController.createDSLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+ DeferredResult<BaseResult> getResult = new DeferredResult<>();
+ while (true) {
+ if (getResult.isSetOrExpired()) {
+ break;
+ }
+ getResult = queryController.getJobResult(id, 0);
+ }
+ BaseResult result = (BaseResult) getResult.getResult();
+ Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
+ }
+
+ @Test
+ public void testAppAndProtocolTopApp() {
+ DSLQueryRequestParam body = jsonToInParameter("parameters/applicationAndProtocolTest.json", "application_and_protocol_top_apps", DSLQueryRequestParam.class);
+ BaseResult commitResult = queryController.createDSLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+ DeferredResult<BaseResult> getResult = new DeferredResult<>();
+ while (true) {
+ if (getResult.isSetOrExpired()) {
+ break;
+ }
+ getResult = queryController.getJobStatus(id, 0);
+ }
+ BaseResult result = (BaseResult) getResult.getResult();
+ Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
+ }
+
+ @Test
+ public void testAppAndProtocolTreeAppSummaryOneshot() {
+ DSLQueryRequestParam body = jsonToInParameter("parameters/applicationAndProtocolTest.json", "application_and_protocol_app_summary", DSLQueryRequestParam.class);
+ BaseResult commitResult = queryController.createDSLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 200);
+ }
+
+ @Test
+ public void testAppAndProtocolAppRelatedInternalIpsOneshot() {
+ DSLQueryRequestParam body = jsonToInParameter("parameters/applicationAndProtocolTest.json", "application_and_protocol_app_related_internal_ips", DSLQueryRequestParam.class);
+ BaseResult commitResult = queryController.createDSLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 200);
+ }
+
+ @Test
+ public void testAppAndProtocolAppThroughputOneshot() {
+ DSLQueryRequestParam body = jsonToInParameter("parameters/applicationAndProtocolTest.json", "application_and_protocol_app_throughput", DSLQueryRequestParam.class);
+ BaseResult commitResult = queryController.createDSLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 200);
+ }
+}
diff --git a/src/test/java/com/mesalab/qgw/service/AutoPeriodDSLTest.java b/src/test/java/com/mesalab/qgw/service/AutoPeriodDSLTest.java
new file mode 100644
index 00000000..cabe8c5a
--- /dev/null
+++ b/src/test/java/com/mesalab/qgw/service/AutoPeriodDSLTest.java
@@ -0,0 +1,204 @@
+package com.mesalab.qgw.service;
+
+import cn.hutool.core.date.DateTime;
+import cn.hutool.core.date.DateUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.google.common.collect.Lists;
+import com.mesalab.GalaxyQGWApplicationTests;
+import com.mesalab.common.utils.sqlparser.AutoPeriodHelper;
+import com.mesalab.qgw.model.basic.DSLQueryRequestParam;
+import org.junit.Assert;
+import org.junit.Test;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+
+/**
+ * TODO
+ *
+ * @Classname AutoPeriodTest
+ * @Date 2024/6/20 09:24
+ * @Author wWei
+ */
+@EnableAutoConfiguration
+public class AutoPeriodDSLTest extends GalaxyQGWApplicationTests {
+ private static final Log log = LogFactory.get();
+ private static final DateTime now;
+
+ static {
+ now = new DateTime();
+ }
+
+ @Test
+ public void testDSLLast5Minutes() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "application_and_protocol_summary_auto", DSLQueryRequestParam.class);
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetMinute(now, -5), "yyyy-MM-dd HH:mm:ss");
+ dslQueryRequestParam.setIntervals(Lists.newArrayList(startTime + "/" + endTime));
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "PT1S", granularity);
+ }
+
+ @Test
+ public void testDSLLast5MinutesISO8601() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "application_and_protocol_summary_auto", DSLQueryRequestParam.class);
+ String endTime = DateUtil.format(now, "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
+ String startTime = DateUtil.format(DateUtil.offsetMinute(now, -5), "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
+ dslQueryRequestParam.setIntervals(Lists.newArrayList(startTime + "/" + endTime));
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "PT1S", granularity);
+ }
+
+ @Test
+ public void testDSLLast30Minutes() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "application_and_protocol_summary_auto", DSLQueryRequestParam.class);
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetMinute(now, -30), "yyyy-MM-dd HH:mm:ss");
+ dslQueryRequestParam.setIntervals(Lists.newArrayList(startTime + "/" + endTime));
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "PT30S", granularity);
+ }
+
+ @Test
+ public void testDSLLast1Hour() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "application_and_protocol_summary_auto", DSLQueryRequestParam.class);
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetHour(now, -1), "yyyy-MM-dd HH:mm:ss");
+ dslQueryRequestParam.setIntervals(Lists.newArrayList(startTime + "/" + endTime));
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "PT30S", granularity);
+ }
+
+ @Test
+ public void testDSLLast6Hour() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "application_and_protocol_summary_auto", DSLQueryRequestParam.class);
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetHour(now, -6), "yyyy-MM-dd HH:mm:ss");
+ dslQueryRequestParam.setIntervals(Lists.newArrayList(startTime + "/" + endTime));
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "PT1M", granularity);
+ }
+
+ @Test
+ public void testDSLLast12Hour() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "application_and_protocol_summary_auto", DSLQueryRequestParam.class);
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetHour(now, -12), "yyyy-MM-dd HH:mm:ss");
+ dslQueryRequestParam.setIntervals(Lists.newArrayList(startTime + "/" + endTime));
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "PT1M", granularity);
+ }
+
+ @Test
+ public void testDSLLast1Day() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "application_and_protocol_summary_auto", DSLQueryRequestParam.class);
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetDay(now, -1), "yyyy-MM-dd HH:mm:ss");
+ dslQueryRequestParam.setIntervals(Lists.newArrayList(startTime + "/" + endTime));
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "PT5M", granularity);
+ }
+
+ @Test
+ public void testDSLLast2Day() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "application_and_protocol_summary_auto", DSLQueryRequestParam.class);
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetDay(now, -2), "yyyy-MM-dd HH:mm:ss");
+ dslQueryRequestParam.setIntervals(Lists.newArrayList(startTime + "/" + endTime));
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "PT5M", granularity);
+ }
+
+ @Test
+ public void testDSLLast3Day() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "application_and_protocol_summary_auto", DSLQueryRequestParam.class);
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetDay(now, -3), "yyyy-MM-dd HH:mm:ss");
+ dslQueryRequestParam.setIntervals(Lists.newArrayList(startTime + "/" + endTime));
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "PT5M", granularity);
+ }
+
+ @Test
+ public void testDSLLast1Week() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "application_and_protocol_summary_auto", DSLQueryRequestParam.class);
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetWeek(now, -1), "yyyy-MM-dd HH:mm:ss");
+ dslQueryRequestParam.setIntervals(Lists.newArrayList(startTime + "/" + endTime));
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "PT1H", granularity);
+ }
+
+ @Test
+ public void testDSLLast30Day() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "application_and_protocol_summary_auto", DSLQueryRequestParam.class);
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetDay(now, -30), "yyyy-MM-dd HH:mm:ss");
+ dslQueryRequestParam.setIntervals(Lists.newArrayList(startTime + "/" + endTime));
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "PT1H", granularity);
+ }
+
+ @Test
+ public void testDSLLast365Day() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "application_and_protocol_summary_auto", DSLQueryRequestParam.class);
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetDay(now, -365), "yyyy-MM-dd HH:mm:ss");
+ dslQueryRequestParam.setIntervals(Lists.newArrayList(startTime + "/" + endTime));
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "P1D", granularity);
+ }
+
+ @Test
+ public void testDSLConst() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "application_and_protocol_summary_const", DSLQueryRequestParam.class);
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "PT1S", granularity);
+ }
+
+ @Test
+ public void testDSLAutoRange() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "application_and_protocol_summary_auto_const_range", DSLQueryRequestParam.class);
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "P1D", granularity);
+ }
+
+ @Test
+ public void testDSLTrafficSpectrumNetworkThroughputTrend() {
+ DSLQueryRequestParam dslQueryRequestParam = jsonToInParameter("parameters/dslAutoGranularityTest.json", "traffic_spectrum_network_throughput_trend_auto", DSLQueryRequestParam.class);
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetHour(now, -1), "yyyy-MM-dd HH:mm:ss");
+ dslQueryRequestParam.setIntervals(Lists.newArrayList(startTime + "/" + endTime));
+ AutoPeriodHelper.buildDslGranularity(dslQueryRequestParam);
+ String granularity = dslQueryRequestParam.getGranularity();
+ log.info(granularity);
+ Assert.assertEquals("ERROR: don't expect period", "PT1M", granularity);
+ }
+}
diff --git a/src/test/java/com/mesalab/qgw/service/AutoPeriodSQLTest.java b/src/test/java/com/mesalab/qgw/service/AutoPeriodSQLTest.java
new file mode 100644
index 00000000..214d0554
--- /dev/null
+++ b/src/test/java/com/mesalab/qgw/service/AutoPeriodSQLTest.java
@@ -0,0 +1,227 @@
+package com.mesalab.qgw.service;
+
+import cn.hutool.core.date.DateTime;
+import cn.hutool.core.date.DateUtil;
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.mesalab.GalaxyQGWApplicationTests;
+import com.mesalab.common.utils.sqlparser.AutoPeriodHelper;
+import net.sf.jsqlparser.JSQLParserException;
+import net.sf.jsqlparser.parser.CCJSqlParserUtil;
+import net.sf.jsqlparser.statement.Statement;
+import org.junit.Assert;
+import org.junit.Test;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+
+/**
+ * TODO
+ *
+ * @Classname AutoPeriodTest
+ * @Date 2024/6/20 09:24
+ * @Author wWei
+ */
+@EnableAutoConfiguration
+public class AutoPeriodSQLTest extends GalaxyQGWApplicationTests {
+ private static final Log log = LogFactory.get();
+ private static final DateTime now;
+
+ static {
+ now = new DateTime();
+ }
+
+ private static final String originalSQL = "SELECT\n" +
+ " FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(stat_time, CHART_GRANULARITY('${start_time}', '${end_time}'), 'zero')) AS stat_time,\n" +
+ " AVG( in_pkts_per_sec ) AS avg_in_pkts_per_sec\n" +
+ "FROM\n" +
+ " (\n" +
+ " SELECT\n" +
+ " TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time), SAMPLE_GRANULARITY('${start_time}', '${end_time}')) AS stat_time,\n" +
+ " RATE( in_pkts, SAMPLE_GRANULARITY('${start_time}', '${end_time}'), 1) AS in_pkts_per_sec\n" +
+ " FROM\n" +
+ " traffic_general_stat\n" +
+ " WHERE\n" +
+ " __time >= '${start_time}'\n" +
+ " AND __time < '${end_time}'\n" +
+ " AND vsys_id IN ( 1)\n" +
+ " AND ( 1 = 1 )\n" +
+ " GROUP BY\n" +
+ " TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time), SAMPLE_GRANULARITY('${start_time}', '${end_time}'))\n" +
+ " HAVING TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time), SAMPLE_GRANULARITY('${start_time}', '${end_time}')) >= '${start_time}'\n" +
+ " ORDER BY TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time), SAMPLE_GRANULARITY('${start_time}', '${end_time}')) DESC\n" +
+ " )\n" +
+ "GROUP BY\n" +
+ " FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(stat_time, CHART_GRANULARITY('${start_time}', '${end_time}'), 'zero'))\n" +
+ "ORDER BY\n" +
+ " stat_time ASC\n" +
+ "LIMIT 10";
+
+ @Test
+ public void testSQLLast5Minutes() throws JSQLParserException {
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetMinute(now, -5), "yyyy-MM-dd HH:mm:ss");
+ String afterReplaceSQL = originalSQL.replace("${start_time}", startTime);
+ afterReplaceSQL = afterReplaceSQL.replace("${end_time}", endTime);
+ Statement statement = CCJSqlParserUtil.parse(afterReplaceSQL);
+ afterReplaceSQL = AutoPeriodHelper.buildSqlGranularity(statement).toString();
+ log.info(afterReplaceSQL);
+ Assert.assertFalse("ERROR: contains CHART/SAMPLE_GRANULARITY", afterReplaceSQL.contains("CHART_GRANULARITY") || afterReplaceSQL.contains("SAMPLE_GRANULARITY"));
+ Assert.assertTrue("ERROR: not contains PT1S", afterReplaceSQL.contains("PT1S"));
+ }
+
+ @Test
+ public void testSQLLast5MinutesISO8601() throws JSQLParserException {
+ String endTime = DateUtil.format(now, "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
+ String startTime = DateUtil.format(DateUtil.offsetMinute(now, -5), "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
+ String afterReplaceSQL = originalSQL.replace("${start_time}", startTime);
+ afterReplaceSQL = afterReplaceSQL.replace("${end_time}", endTime);
+ Statement statement = CCJSqlParserUtil.parse(afterReplaceSQL);
+ afterReplaceSQL = AutoPeriodHelper.buildSqlGranularity(statement).toString();
+ log.info(afterReplaceSQL);
+ Assert.assertFalse("ERROR: contains CHART/SAMPLE_GRANULARITY", afterReplaceSQL.contains("CHART_GRANULARITY") || afterReplaceSQL.contains("SAMPLE_GRANULARITY"));
+ Assert.assertTrue("ERROR: not contains PT1S", afterReplaceSQL.contains("PT1S"));
+ }
+
+ @Test
+ public void testSQLLast30Minutes() throws JSQLParserException {
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetMinute(now, -30), "yyyy-MM-dd HH:mm:ss");
+ String afterReplaceSQL = originalSQL.replace("${start_time}", startTime);
+ afterReplaceSQL = afterReplaceSQL.replace("${end_time}", endTime);
+ Statement statement = CCJSqlParserUtil.parse(afterReplaceSQL);
+ afterReplaceSQL = AutoPeriodHelper.buildSqlGranularity(statement).toString();
+ log.info(afterReplaceSQL);
+ Assert.assertFalse("ERROR: contains CHART/SAMPLE_GRANULARITY", afterReplaceSQL.contains("CHART_GRANULARITY") || afterReplaceSQL.contains("SAMPLE_GRANULARITY"));
+ Assert.assertTrue("ERROR: don't expect period", afterReplaceSQL.contains("PT1S") && afterReplaceSQL.contains("PT30S"));
+ }
+
+ @Test
+ public void testSQLLast1Hour() throws JSQLParserException {
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetHour(now, -1), "yyyy-MM-dd HH:mm:ss");
+ String afterReplaceSQL = originalSQL.replace("${start_time}", startTime);
+ afterReplaceSQL = afterReplaceSQL.replace("${end_time}", endTime);
+ Statement statement = CCJSqlParserUtil.parse(afterReplaceSQL);
+ afterReplaceSQL = AutoPeriodHelper.buildSqlGranularity(statement).toString();
+ log.info(afterReplaceSQL);
+ Assert.assertFalse("ERROR: contains CHART/SAMPLE_GRANULARITY", afterReplaceSQL.contains("CHART_GRANULARITY") || afterReplaceSQL.contains("SAMPLE_GRANULARITY"));
+ Assert.assertTrue("ERROR: don't expect period", afterReplaceSQL.contains("PT1S") && afterReplaceSQL.contains("PT30S"));
+ }
+
+ @Test
+ public void testSQLLast6Hour() throws JSQLParserException {
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetHour(now, -6), "yyyy-MM-dd HH:mm:ss");
+ String afterReplaceSQL = originalSQL.replace("${start_time}", startTime);
+ afterReplaceSQL = afterReplaceSQL.replace("${end_time}", endTime);
+ Statement statement = CCJSqlParserUtil.parse(afterReplaceSQL);
+ afterReplaceSQL = AutoPeriodHelper.buildSqlGranularity(statement).toString();
+ log.info(afterReplaceSQL);
+ Assert.assertFalse("ERROR: contains CHART/SAMPLE_GRANULARITY", afterReplaceSQL.contains("CHART_GRANULARITY") || afterReplaceSQL.contains("SAMPLE_GRANULARITY"));
+ Assert.assertTrue("ERROR: don't expect period", afterReplaceSQL.contains("PT30S") && afterReplaceSQL.contains("PT1M"));
+ }
+
+ @Test
+ public void testSQLLast12Hour() throws JSQLParserException {
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetHour(now, -6), "yyyy-MM-dd HH:mm:ss");
+ String afterReplaceSQL = originalSQL.replace("${start_time}", startTime);
+ afterReplaceSQL = afterReplaceSQL.replace("${end_time}", endTime);
+ Statement statement = CCJSqlParserUtil.parse(afterReplaceSQL);
+ afterReplaceSQL = AutoPeriodHelper.buildSqlGranularity(statement).toString();
+ log.info(afterReplaceSQL);
+ Assert.assertFalse("ERROR: contains CHART/SAMPLE_GRANULARITY", afterReplaceSQL.contains("CHART_GRANULARITY") || afterReplaceSQL.contains("SAMPLE_GRANULARITY"));
+ Assert.assertTrue("ERROR: don't expect period", afterReplaceSQL.contains("PT30S") && afterReplaceSQL.contains("PT1M"));
+ }
+
+ @Test
+ public void testSQLLast1Day() throws JSQLParserException {
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetDay(now, -1), "yyyy-MM-dd HH:mm:ss");
+ String afterReplaceSQL = originalSQL.replace("${start_time}", startTime);
+ afterReplaceSQL = afterReplaceSQL.replace("${end_time}", endTime);
+ Statement statement = CCJSqlParserUtil.parse(afterReplaceSQL);
+ afterReplaceSQL = AutoPeriodHelper.buildSqlGranularity(statement).toString();
+ log.info(afterReplaceSQL);
+ Assert.assertFalse("ERROR: contains CHART/SAMPLE_GRANULARITY", afterReplaceSQL.contains("CHART_GRANULARITY") || afterReplaceSQL.contains("SAMPLE_GRANULARITY"));
+ Assert.assertTrue("ERROR: don't expect period", afterReplaceSQL.contains("PT1M") && afterReplaceSQL.contains("PT5M"));
+ }
+
+ @Test
+ public void testSQLLast2Day() throws JSQLParserException {
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetDay(now, -2), "yyyy-MM-dd HH:mm:ss");
+ String afterReplaceSQL = originalSQL.replace("${start_time}", startTime);
+ afterReplaceSQL = afterReplaceSQL.replace("${end_time}", endTime);
+ Statement statement = CCJSqlParserUtil.parse(afterReplaceSQL);
+ afterReplaceSQL = AutoPeriodHelper.buildSqlGranularity(statement).toString();
+ log.info(afterReplaceSQL);
+ Assert.assertFalse("ERROR: contains CHART/SAMPLE_GRANULARITY", afterReplaceSQL.contains("CHART_GRANULARITY") || afterReplaceSQL.contains("SAMPLE_GRANULARITY"));
+ Assert.assertTrue("ERROR: don't expect period", afterReplaceSQL.contains("PT1M") && afterReplaceSQL.contains("PT5M"));
+ }
+
+ @Test
+ public void testSQLLast3Day() throws JSQLParserException {
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetDay(now, -3), "yyyy-MM-dd HH:mm:ss");
+ String afterReplaceSQL = originalSQL.replace("${start_time}", startTime);
+ afterReplaceSQL = afterReplaceSQL.replace("${end_time}", endTime);
+ Statement statement = CCJSqlParserUtil.parse(afterReplaceSQL);
+ afterReplaceSQL = AutoPeriodHelper.buildSqlGranularity(statement).toString();
+ log.info(afterReplaceSQL);
+ Assert.assertFalse("ERROR: contains CHART/SAMPLE_GRANULARITY", afterReplaceSQL.contains("CHART_GRANULARITY") || afterReplaceSQL.contains("SAMPLE_GRANULARITY"));
+ Assert.assertTrue("ERROR: don't expect period", afterReplaceSQL.contains("PT1M") && afterReplaceSQL.contains("PT5M"));
+ }
+
+ @Test
+ public void testSQLLast1Week() throws JSQLParserException {
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetWeek(now, -1), "yyyy-MM-dd HH:mm:ss");
+ String afterReplaceSQL = originalSQL.replace("${start_time}", startTime);
+ afterReplaceSQL = afterReplaceSQL.replace("${end_time}", endTime);
+ Statement statement = CCJSqlParserUtil.parse(afterReplaceSQL);
+ afterReplaceSQL = AutoPeriodHelper.buildSqlGranularity(statement).toString();
+ log.info(afterReplaceSQL);
+ Assert.assertFalse("ERROR: contains CHART/SAMPLE_GRANULARITY", afterReplaceSQL.contains("CHART_GRANULARITY") || afterReplaceSQL.contains("SAMPLE_GRANULARITY"));
+ Assert.assertTrue("ERROR: don't expect period", afterReplaceSQL.contains("PT5M") && afterReplaceSQL.contains("PT1H"));
+ }
+
+ @Test
+ public void testSQLLast1Month() throws JSQLParserException {
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetDay(now, -30), "yyyy-MM-dd HH:mm:ss");
+ String afterReplaceSQL = originalSQL.replace("${start_time}", startTime);
+ afterReplaceSQL = afterReplaceSQL.replace("${end_time}", endTime);
+ Statement statement = CCJSqlParserUtil.parse(afterReplaceSQL);
+ afterReplaceSQL = AutoPeriodHelper.buildSqlGranularity(statement).toString();
+ log.info(afterReplaceSQL);
+ Assert.assertFalse("ERROR: contains CHART/SAMPLE_GRANULARITY", afterReplaceSQL.contains("CHART_GRANULARITY") || afterReplaceSQL.contains("SAMPLE_GRANULARITY"));
+ Assert.assertTrue("ERROR: don't expect period", afterReplaceSQL.contains("PT5M") && afterReplaceSQL.contains("PT1H"));
+ }
+
+ @Test
+ public void testSQLLast365Day() throws JSQLParserException {
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetDay(now, -365), "yyyy-MM-dd HH:mm:ss");
+ String afterReplaceSQL = originalSQL.replace("${start_time}", startTime);
+ afterReplaceSQL = afterReplaceSQL.replace("${end_time}", endTime);
+ Statement statement = CCJSqlParserUtil.parse(afterReplaceSQL);
+ afterReplaceSQL = AutoPeriodHelper.buildSqlGranularity(statement).toString();
+ log.info(afterReplaceSQL);
+ Assert.assertFalse("ERROR: contains CHART/SAMPLE_GRANULARITY", afterReplaceSQL.contains("CHART_GRANULARITY") || afterReplaceSQL.contains("SAMPLE_GRANULARITY"));
+ Assert.assertTrue("ERROR: don't expect period", afterReplaceSQL.contains("PT1H") && afterReplaceSQL.contains("P1D"));
+ }
+
+ @Test
+ public void testSQLDrillDown() throws JSQLParserException {
+ String sql = "SELECT FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time), CHART_GRANULARITY('${start_time}', '${end_time}'), 'zero')) AS \"Time\", device_group AS \"Device Group\", RATE(sessions, CHART_GRANULARITY('${start_time}', '${end_time}'), 1) AS \"Sessions\" FROM druid.statistics_rule WHERE __time >= '${start_time}' AND __time < '${end_time}' AND rule_id = 787368 AND template_id = 40010 AND chart_id = 44044 AND version = 1 AND device_group IN (SELECT device_group FROM statistics_rule WHERE __time >= '${start_time}' AND __time < '${end_time}' AND rule_id = 787368 AND template_id = 40010 AND chart_id = 44044 AND version = 1 AND statistics_rule.vsys_id IN (1) GROUP BY device_group ORDER BY RATE(sessions, CHART_GRANULARITY('${start_time}', '${end_time}'), 1) DESC LIMIT 10) AND statistics_rule.vsys_id IN (1) GROUP BY device_group, FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time), CHART_GRANULARITY('${start_time}', '${end_time}'), 'zero')) ORDER BY \"Time\" ASC LIMIT 65536";
+ String endTime = DateUtil.format(now, "yyyy-MM-dd HH:mm:ss");
+ String startTime = DateUtil.format(DateUtil.offsetHour(now, -1), "yyyy-MM-dd HH:mm:ss");
+ String afterReplaceSQL = sql.replace("${start_time}", startTime);
+ afterReplaceSQL = afterReplaceSQL.replace("${end_time}", endTime);
+ Statement statement = CCJSqlParserUtil.parse(afterReplaceSQL);
+ afterReplaceSQL = AutoPeriodHelper.buildSqlGranularity(statement).toString();
+ log.info(afterReplaceSQL);
+ Assert.assertFalse("ERROR: contains CHART/SAMPLE_GRANULARITY", afterReplaceSQL.contains("CHART_GRANULARITY") || afterReplaceSQL.contains("SAMPLE_GRANULARITY"));
+ Assert.assertTrue("ERROR: don't expect period", afterReplaceSQL.contains("PT30S"));
+ }
+}
diff --git a/src/test/java/com/mesalab/qgw/service/AvroSchemaFormatTest.java b/src/test/java/com/mesalab/qgw/service/AvroSchemaFormatTest.java
deleted file mode 100644
index efffa34d..00000000
--- a/src/test/java/com/mesalab/qgw/service/AvroSchemaFormatTest.java
+++ /dev/null
@@ -1,40 +0,0 @@
-package com.mesalab.qgw.service;
-
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.mesalab.GalaxyQGWApplicationTests;
-import com.mesalab.common.entity.BaseResult;
-import org.apache.http.HttpStatus;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
-
-import static org.junit.Assert.assertEquals;
-
-@EnableAutoConfiguration
-public class AvroSchemaFormatTest extends GalaxyQGWApplicationTests {
- private static final Log log = LogFactory.get();
- @Autowired
- public DiagnosisService diagnosisService;
- @Before
- public void testBefore() {
- log.info("=================================================SCHEMA TEST BEGIN=================================================");
- }
-
- @After
- public void testAfter() {
- log.info("=================================================SCHEMA TEST END=================================================");
- }
-
- /**
- * 测试schema
- */
- @Test
- public void testCheckSchema() {
- BaseResult baseResult = diagnosisService.validateSchema();
- assertEquals("Schema format failure.", String.valueOf(baseResult.getStatus()), String.valueOf(HttpStatus.SC_OK));
- }
-
-}
diff --git a/src/test/java/com/mesalab/qgw/service/ClickHouseTest.java b/src/test/java/com/mesalab/qgw/service/ClickHouseTest.java
index 4e9dd11d..f6500835 100644
--- a/src/test/java/com/mesalab/qgw/service/ClickHouseTest.java
+++ b/src/test/java/com/mesalab/qgw/service/ClickHouseTest.java
@@ -8,7 +8,7 @@ import cn.hutool.log.LogFactory;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.mesalab.GalaxyQGWApplicationTests;
-import com.mesalab.common.enums.DBTypeEnum;
+import com.mesalab.common.enums.DBEngineType;
import com.mesalab.qgw.benchmark.DialectWriter;
import net.sf.jsqlparser.JSQLParserException;
import net.sf.jsqlparser.parser.CCJSqlParserUtil;
@@ -20,7 +20,6 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import java.io.File;
-import java.io.IOException;
import java.util.List;
import java.util.Optional;
//clickhouse-benchmark -i 93 --host 127.0.0.1 --port 9001 --user default --password "ceiec2019" --database tsg_galaxy_v3 < /root/session_record_queries_20210113203103.sql
@@ -38,7 +37,7 @@ public class ClickHouseTest extends GalaxyQGWApplicationTests {
@Test
public void testSqlParser() {
- String pocFile = dialectWriter.buildPocSQL(Lists.newLinkedList(), DBTypeEnum.CLICKHOUSE.getValue(), null);
+ String pocFile = dialectWriter.buildPocSQL(Lists.newLinkedList(), DBEngineType.CLICKHOUSE.getValue(), null);
Optional<File> ckTestFile = Optional.of(new File(pocFile));
if (ckTestFile.isPresent()) {
if (!ckTestFile.get().isFile()) {
diff --git a/src/test/java/com/mesalab/qgw/service/DruidTest.java b/src/test/java/com/mesalab/qgw/service/DruidTest.java
index a8b4fb51..55ffd62a 100644
--- a/src/test/java/com/mesalab/qgw/service/DruidTest.java
+++ b/src/test/java/com/mesalab/qgw/service/DruidTest.java
@@ -8,7 +8,7 @@ import cn.hutool.log.LogFactory;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.mesalab.GalaxyQGWApplicationTests;
-import com.mesalab.common.enums.DBTypeEnum;
+import com.mesalab.common.enums.DBEngineType;
import com.mesalab.qgw.benchmark.DialectWriter;
import net.sf.jsqlparser.JSQLParserException;
import net.sf.jsqlparser.parser.CCJSqlParserUtil;
@@ -20,7 +20,6 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import java.io.File;
-import java.io.IOException;
import java.util.List;
import java.util.Optional;
@@ -35,7 +34,7 @@ public class DruidTest extends GalaxyQGWApplicationTests {
@Test
public void testSqlParser() {
- String pocFile = dialectWriter.buildPocSQL(Lists.newLinkedList(), DBTypeEnum.DRUID.getValue(), null);
+ String pocFile = dialectWriter.buildPocSQL(Lists.newLinkedList(), DBEngineType.DRUID.getValue(), null);
Optional<File> ckTestFile = Optional.of(new File(pocFile));
if (ckTestFile.isPresent()) {
if (!ckTestFile.get().isFile()) {
diff --git a/src/test/java/com/mesalab/qgw/service/EngineTest.java b/src/test/java/com/mesalab/qgw/service/EngineTest.java
index 65629173..b69d3e8a 100644
--- a/src/test/java/com/mesalab/qgw/service/EngineTest.java
+++ b/src/test/java/com/mesalab/qgw/service/EngineTest.java
@@ -4,7 +4,6 @@ import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import com.mesalab.GalaxyQGWApplicationTests;
import com.mesalab.common.entity.BaseResult;
-import com.mesalab.common.enums.DiagnosisOptionEnum;
import org.apache.http.HttpStatus;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
@@ -24,16 +23,5 @@ public class EngineTest extends GalaxyQGWApplicationTests {
assertEquals("Schema validate failure.", String.valueOf(baseResult.getStatus()), String.valueOf(HttpStatus.SC_OK));
}
- @Test
- public void testMetadata() {
- BaseResult baseResult = diagnosisService.validateMetadata();
- assertEquals("Metadata validate failure.", String.valueOf(baseResult.getStatus()), String.valueOf(HttpStatus.SC_OK));
- }
-
- @Test
- public void testPocSql() {
- BaseResult baseResult = diagnosisService.runPocSQL(false, DiagnosisOptionEnum.PARSE.getValue(), null);
- assertEquals("POC sql parse failure.", String.valueOf(baseResult.getStatus()), String.valueOf(HttpStatus.SC_OK));
- }
}
diff --git a/src/test/java/com/mesalab/qgw/service/EntityTest.java b/src/test/java/com/mesalab/qgw/service/EntityTest.java
deleted file mode 100644
index 2c5f498d..00000000
--- a/src/test/java/com/mesalab/qgw/service/EntityTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-package com.mesalab.qgw.service;
-
-import com.mesalab.GalaxyQGWApplicationTests;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.services.common.dsl.ComDSLObject;
-import com.mesalab.services.service.EntityService;
-import com.mesalab.services.service.RelationService;
-import org.junit.Assert;
-import org.junit.Test;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
-
-/**
- * @Author wxs
- * @Date 2022/9/28
- */
-@EnableAutoConfiguration
-public class EntityTest extends GalaxyQGWApplicationTests {
-
- @Autowired
- private EntityService entityService;
-
- @Autowired
- private RelationService relationService;
-
- @Test
- public void activeClientIp() {
-
- ComDSLObject dslObject = jsonToInParameter("parameters/entityTest.json", "activeClientIp", ComDSLObject.class);
- BaseResult entityInfo = entityService.getEntityInfo("activeclientip", dslObject);
- Assert.assertTrue(entityInfo.getMessage(), entityInfo.getStatus() == 200);
-
- }
-
- @Test
- public void topServerIp() {
-
- ComDSLObject dslObject = jsonToInParameter("parameters/entityTest.json", "topServerIp", ComDSLObject.class);
- BaseResult entityInfo = entityService.getEntityInfo("topserverip", dslObject);
- Assert.assertTrue(entityInfo.getMessage(), entityInfo.getStatus() == 200);
-
- }
-
- @Test
- public void topSni() {
-
- ComDSLObject dslObject = jsonToInParameter("parameters/entityTest.json", "topSni", ComDSLObject.class);
- BaseResult entityInfo = entityService.getEntityInfo("topsni", dslObject);
- Assert.assertTrue(entityInfo.getMessage(), entityInfo.getStatus() == 200);
-
- }
-
- @Test
- public void subScriberidPool() {
- ComDSLObject dslObject = jsonToInParameter("parameters/entityTest.json", "subScriberidPool", ComDSLObject.class);
- BaseResult entityInfo = relationService.getRealRelation("subscriberidpool", dslObject);
- Assert.assertTrue(entityInfo.getMessage(), entityInfo.getStatus() == 200);
- }
-
- @Test
- public void gtpc() {
- ComDSLObject dslObject = jsonToInParameter("parameters/entityTest.json", "gtpc", ComDSLObject.class);
- BaseResult entityInfo = relationService.getRealRelation("gtpc", dslObject);
- Assert.assertTrue(entityInfo.getMessage(), entityInfo.getStatus() == 200);
- }
-}
diff --git a/src/test/java/com/mesalab/qgw/service/ExampleDataTest.java b/src/test/java/com/mesalab/qgw/service/ExampleDataTest.java
new file mode 100644
index 00000000..c0f985c3
--- /dev/null
+++ b/src/test/java/com/mesalab/qgw/service/ExampleDataTest.java
@@ -0,0 +1,62 @@
+package com.mesalab.qgw.service;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.mesalab.GalaxyQGWApplicationTests;
+import com.mesalab.common.entity.DataTypeMapping;
+import com.mesalab.common.utils.sqlparser.ExampleDataHelper;
+import com.mesalab.common.utils.sqlparser.SelectItemHelper;
+import com.mesalab.qgw.constant.DataTypeConst;
+import com.mesalab.qgw.constant.MetaConst;
+import lombok.extern.slf4j.Slf4j;
+import net.sf.jsqlparser.JSQLParserException;
+import org.junit.Assert;
+import org.junit.Test;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+
+import java.util.List;
+import java.util.Map;
+
+
+@Slf4j
+@EnableAutoConfiguration
+public class ExampleDataTest extends GalaxyQGWApplicationTests {
+
+ @Test
+ public void testSelectItemHelper() throws JSQLParserException {
+ String sql = "SELECT filed_1 AS \"Field A\", SUM(field_2) AS \"SUM(field_2)\", MAX(field_3) FROM table GROUP BY filed_1 ORDER BY \"Field A\" DESC LIMIT 10";
+ Map<String, SelectItemHelper.AliasObject> selectItem = SelectItemHelper.getSelectItem(sql);
+ Assert.assertEquals("ok", 3, selectItem.size());
+ }
+
+ @Test
+ public void testExampleDataHelper() {
+ List<Map<String, String>> meta = initMeta();
+ List<Object> objects = ExampleDataHelper.buildExampleData(ExampleDataHelper.QUERY_TYPE_GROUP_BY, meta, 1L, null, true);
+ Assert.assertEquals("ok", 1, objects.size());
+ }
+
+ private static List<Map<String, String>> initMeta() {
+ List<Map<String, String>> meta = Lists.newArrayList();
+ Map<String, String> item1 = Maps.newHashMap();
+ item1.put(MetaConst.META_NAME, "Field A");
+ item1.put(MetaConst.META_TYPE, DataTypeMapping.STRING);
+ item1.put(MetaConst.META_CATEGORY, MetaConst.META_CATEGORY_DIMENSION);
+ item1.put(MetaConst.META_DATA_TYPE, DataTypeConst.IP);
+ meta.add(item1);
+
+ Map<String, String> item2 = Maps.newHashMap();
+ item2.put(MetaConst.META_NAME, "SUM(field_2)");
+ item2.put(MetaConst.META_TYPE, DataTypeMapping.LONG);
+ item2.put(MetaConst.META_CATEGORY, MetaConst.META_CATEGORY_METRIC);
+ meta.add(item2);
+
+ Map<String, String> item3 = Maps.newHashMap();
+ item3.put(MetaConst.META_NAME, "MAX(field_3)");
+ item3.put(MetaConst.META_TYPE, DataTypeMapping.LONG);
+ item3.put(MetaConst.META_CATEGORY, MetaConst.META_CATEGORY_METRIC);
+ meta.add(item3);
+ return meta;
+ }
+
+}
diff --git a/src/test/java/com/mesalab/qgw/service/FieldDiscoveryTest.java b/src/test/java/com/mesalab/qgw/service/FieldDiscoveryTest.java
new file mode 100644
index 00000000..db0ba405
--- /dev/null
+++ b/src/test/java/com/mesalab/qgw/service/FieldDiscoveryTest.java
@@ -0,0 +1,114 @@
+package com.mesalab.qgw.service;
+
+import com.mesalab.GalaxyQGWApplicationTests;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.qgw.controller.QueryController;
+import com.mesalab.qgw.model.basic.DSLQueryRequestParam;
+import org.junit.Assert;
+import org.junit.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+import org.springframework.web.context.request.async.DeferredResult;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * @Author ww
+ * @Date 2024/1/30
+ */
+@EnableAutoConfiguration
+public class FieldDiscoveryTest extends GalaxyQGWApplicationTests {
+
+ @Autowired
+ private QueryController queryController;
+ private static final List<String> fields = new ArrayList<>();
+
+ static {
+ fields.add("flags");
+ fields.add("security_rule_list");
+ fields.add("client_port");
+ fields.add("server_ip");
+ }
+
+ @Test
+ public void testFieldDiscoveryDefault() {
+ DSLQueryRequestParam body = jsonToInParameter("parameters/fieldDiscoveryTest.json", "field_discovery_default", DSLQueryRequestParam.class);
+ body.getCustomRequestParam().put("custom.field_discovery.fields", fields);
+ BaseResult commitResult = queryController.createDSLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+ DeferredResult<BaseResult> getResult = new DeferredResult<>();
+ while (true) {
+ if (getResult.isSetOrExpired()) {
+ break;
+ }
+ getResult = queryController.getJobStatusById(id, 0);
+ }
+ BaseResult result = (BaseResult) getResult.getResult();
+ Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
+ }
+
+ @Test
+ public void testFieldDiscoverySessions() {
+ DSLQueryRequestParam body = jsonToInParameter("parameters/fieldDiscoveryTest.json", "field_discovery_sessions", DSLQueryRequestParam.class);
+ body.getCustomRequestParam().put("custom.field_discovery.fields", fields);
+ BaseResult commitResult = queryController.createDSLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+ DeferredResult<BaseResult> getResult = new DeferredResult<>();
+ while (true) {
+ if (getResult.isSetOrExpired()) {
+ break;
+ }
+ getResult = queryController.getJobResultById(id, 0);
+ }
+ BaseResult result = (BaseResult) getResult.getResult();
+ Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
+ }
+
+ @Test
+ public void testFieldDiscoveryBytes() {
+ DSLQueryRequestParam body = jsonToInParameter("parameters/fieldDiscoveryTest.json", "field_discovery_bytes", DSLQueryRequestParam.class);
+ body.getCustomRequestParam().put("custom.field_discovery.fields", fields);
+ BaseResult commitResult = queryController.createDSLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+ DeferredResult<BaseResult> getResult = new DeferredResult<>();
+ while (true) {
+ if (getResult.isSetOrExpired()) {
+ break;
+ }
+ getResult = queryController.getJobResult(id, 0);
+ }
+ BaseResult result = (BaseResult) getResult.getResult();
+ Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
+ }
+
+ @Test
+ public void testFieldDiscoveryIncomingBytes() {
+ DSLQueryRequestParam body = jsonToInParameter("parameters/fieldDiscoveryTest.json", "field_discovery_incoming_bytes", DSLQueryRequestParam.class);
+ body.getCustomRequestParam().put("custom.field_discovery.fields", fields);
+ BaseResult commitResult = queryController.createDSLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+ DeferredResult<BaseResult> getResult = new DeferredResult<>();
+ while (true) {
+ if (getResult.isSetOrExpired()) {
+ break;
+ }
+ getResult = queryController.getJobStatus(id, 0);
+ }
+ BaseResult result = (BaseResult) getResult.getResult();
+ Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
+ }
+
+ @Test
+ public void testFieldDiscoveryOutgoingBytes() {
+ DSLQueryRequestParam body = jsonToInParameter("parameters/fieldDiscoveryTest.json", "field_discovery_outgoing_bytes", DSLQueryRequestParam.class);
+ body.getCustomRequestParam().put("custom.field_discovery.fields", fields);
+ BaseResult commitResult = queryController.createDSLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ }
+}
diff --git a/src/test/java/com/mesalab/qgw/service/HttpClientServiceTest.java b/src/test/java/com/mesalab/qgw/service/HttpClientServiceTest.java
new file mode 100644
index 00000000..d761f4fc
--- /dev/null
+++ b/src/test/java/com/mesalab/qgw/service/HttpClientServiceTest.java
@@ -0,0 +1,24 @@
+package com.mesalab.qgw.service;
+
+import com.mesalab.GalaxyQGWApplicationTests;
+import com.mesalab.common.enums.HttpStatusCodeEnum;
+import com.mesalab.qgw.service.impl.HttpClientServiceV2;
+import org.junit.Assert;
+import org.junit.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+
+@EnableAutoConfiguration
+public class HttpClientServiceTest extends GalaxyQGWApplicationTests {
+
+ @SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection")
+ @Autowired
+ private HttpClientServiceV2 httpClientServiceV2;
+
+ @Test
+ public void testHttpGet() {
+ Assert.assertTrue(httpClientServiceV2.get("https://www.baidu.com", 1000).getStatusCode()==200);
+ Assert.assertTrue(httpClientServiceV2.get("http://www.baidu.com", 1).getStatusCode() == HttpStatusCodeEnum.GATEWAY_TIMEOUT.getCode());
+ Assert.assertTrue(httpClientServiceV2.get("http://fake.www.baidu.com", 1000).getStatusCode() == HttpStatusCodeEnum.BAD_GATEWAY.getCode());
+ }
+}
diff --git a/src/test/java/com/mesalab/qgw/service/JobTest.java b/src/test/java/com/mesalab/qgw/service/JobTest.java
deleted file mode 100644
index 4df5e0fd..00000000
--- a/src/test/java/com/mesalab/qgw/service/JobTest.java
+++ /dev/null
@@ -1,98 +0,0 @@
-package com.mesalab.qgw.service;
-
-import com.mesalab.GalaxyQGWApplicationTests;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.services.controller.JobController;
-import org.junit.Assert;
-import org.junit.Test;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
-import org.springframework.web.context.request.async.DeferredResult;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * @Author wxs
- * @Date 2022/9/28
- */
-@EnableAutoConfiguration
-public class JobTest extends GalaxyQGWApplicationTests {
-
- @Autowired
- private JobController jobController;
-
- @Test
- public void fieldDiscoveryDefault() {
- HashMap param = jsonToInParameter("parameters/jobTest.json", "field_discovery_default", HashMap.class);
- BaseResult committed = jobController.commit(param);
- Assert.assertTrue(committed.getMessage(), committed.getStatus() == 201);
- String id = ((List<Map>) committed.getData()).get(0).get("job_id").toString();
- DeferredResult<BaseResult> baseResultDeferredResult = jobController.statisticsResult(id);
- if(baseResultDeferredResult.hasResult()){
- BaseResult result = (BaseResult) baseResultDeferredResult.getResult();
- Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
- }
- }
-
- @Test
- public void fieldDiscoveryBytes() {
- HashMap param = jsonToInParameter("parameters/jobTest.json", "field_discovery_bytes", HashMap.class);
- BaseResult committed = jobController.commit(param);
- Assert.assertTrue(committed.getMessage(), committed.getStatus() == 201);
- String id = ((List<Map>) committed.getData()).get(0).get("job_id").toString();
- DeferredResult<BaseResult> baseResultDeferredResult = jobController.statisticsResult(id);
- if(baseResultDeferredResult.hasResult()){
- BaseResult result = (BaseResult) baseResultDeferredResult.getResult();
- Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
- }
- }
-
- @Test
- public void longTerm() {
- HashMap param = jsonToInParameter("parameters/jobTest.json", "long_term", HashMap.class);
- BaseResult committed = jobController.commit(param);
- Assert.assertTrue(committed.getMessage(), committed.getStatus() == 201);
- String id = ((List<Map>) committed.getData()).get(0).get("job_id").toString();
- DeferredResult<BaseResult> baseResultDeferredResult = jobController.statisticsResult(id);
- if(baseResultDeferredResult.hasResult()){
- BaseResult result = (BaseResult) baseResultDeferredResult.getResult();
- Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
- }
-
- }
-
- @Test
- public void report() {
- HashMap param = jsonToInParameter("parameters/jobTest.json", "report", HashMap.class);
- BaseResult committed = jobController.commit(param);
- Assert.assertTrue(committed.getMessage(), committed.getStatus() == 201);
- String id = ((List<Map>) committed.getData()).get(0).get("job_id").toString();
- BaseResult result = jobController.savedQueryStatus(id);
- Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
- BaseResult cancelled = jobController.cancelSavedQuery(id);
- Assert.assertTrue(cancelled.getMessage(), cancelled.getStatus() == 202);
-
- }
-
- @Test
- public void statistics() {
- HashMap param = jsonToInParameter("parameters/jobTest.json", "statistics_top", HashMap.class);
- BaseResult committed = jobController.commit(param);
- Assert.assertTrue(committed.getMessage(), committed.getStatus() == 201);
- String id = ((List<Map>) committed.getData()).get(0).get("job_id").toString();
- DeferredResult<BaseResult> baseResultDeferredResult = jobController.statisticsResult(id);
- if(baseResultDeferredResult.hasResult()){
- BaseResult result = (BaseResult) baseResultDeferredResult.getResult();
- Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
- }
- }
-
-// @Test
-// public void getResultReport() {
-// BaseResult result = jobController.getResultReport(jobId);
-// Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
-// }
-
-}
diff --git a/src/test/java/com/mesalab/qgw/service/JsonSchemaValidatorTest.java b/src/test/java/com/mesalab/qgw/service/JsonSchemaValidatorTest.java
new file mode 100644
index 00000000..a4352c20
--- /dev/null
+++ b/src/test/java/com/mesalab/qgw/service/JsonSchemaValidatorTest.java
@@ -0,0 +1,86 @@
+package com.mesalab.qgw.service;
+
+
+import cn.hutool.core.io.resource.ClassPathResource;
+import cn.hutool.core.io.resource.Resource;
+import com.mesalab.GalaxyQGWApplicationTests;
+import com.mesalab.common.utils.JsonSchemaValidator;
+import com.mesalab.qgw.exception.QGWBusinessException;
+import org.apache.commons.io.IOUtils;
+import org.junit.Assert;
+import org.junit.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+import org.springframework.test.context.TestPropertySource;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.Map;
+
+
+@EnableAutoConfiguration
+@TestPropertySource("classpath:")
+public class JsonSchemaValidatorTest extends GalaxyQGWApplicationTests {
+ @Autowired
+ DatabaseService databaseService;
+
+ @Test
+ public void testJsonSchema() {
+ try {
+ JsonSchemaValidator jsonSchemaValidator = JsonSchemaValidator.getInstance();
+ Map<String, Object> schemaMap = databaseService.getSchemaInfo("fields", "session_record", false);
+ boolean isValidSuccess = jsonSchemaValidator
+ .addRule("schema-syntax-validation.json", "schema")
+ .validateSchema(com.alibaba.fastjson2.JSON.toJSONString(schemaMap));
+ Assert.assertTrue(isValidSuccess);
+ } catch (QGWBusinessException e) {
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void testInvalidJsonSchema() {
+ boolean isValidError = true;
+ try {
+ JsonSchemaValidator jsonSchemaValidator = JsonSchemaValidator.getInstance();
+ Resource resource = new ClassPathResource("examples/invalidSessionRecordTest.json");
+ String jsonStr = new String(IOUtils.toByteArray(resource.getStream()), StandardCharsets.UTF_8);
+ jsonSchemaValidator
+ .addRule("schema-syntax-validation.json", "schema")
+ .validateSchema(jsonStr);
+ } catch (Exception e) {
+ isValidError = false;
+ }
+ Assert.assertFalse(isValidError);
+ }
+
+ @Test
+ public void testDSLRequest() {
+ try {
+ JsonSchemaValidator jsonSchemaValidator = JsonSchemaValidator.getInstance();
+ Resource resource = new ClassPathResource("examples/validDSLRequestTest.json");
+ String jsonStr = new String(IOUtils.toByteArray(resource.getStream()), StandardCharsets.UTF_8);
+ boolean isValidSuccess = jsonSchemaValidator
+ .addRule("dsl-validation.json", "dsl")
+ .validateDSL(jsonStr);
+ Assert.assertTrue(isValidSuccess);
+ } catch (QGWBusinessException | IOException e) {
+ Assert.fail(e.getMessage());
+ }
+ }
+
+ @Test
+ public void testInvalidDSLRequest() {
+ boolean isValidError = true;
+ try {
+ JsonSchemaValidator jsonSchemaValidator = JsonSchemaValidator.getInstance();
+ Resource resource = new ClassPathResource("examples/invalidDSLRequestTest.json");
+ String jsonStr = new String(IOUtils.toByteArray(resource.getStream()), StandardCharsets.UTF_8);
+ jsonSchemaValidator
+ .addRule("dsl-validation.json", "dsl")
+ .validateDSL(jsonStr);
+ } catch (Exception e) {
+ isValidError = false;
+ }
+ Assert.assertFalse(isValidError);
+ }
+}
diff --git a/src/test/java/com/mesalab/qgw/service/KnowledgeBaseTest.java b/src/test/java/com/mesalab/qgw/service/KnowledgeBaseTest.java
index 583f2e6f..2164e03b 100644
--- a/src/test/java/com/mesalab/qgw/service/KnowledgeBaseTest.java
+++ b/src/test/java/com/mesalab/qgw/service/KnowledgeBaseTest.java
@@ -2,10 +2,9 @@ package com.mesalab.qgw.service;
import com.mesalab.GalaxyQGWApplicationTests;
import com.mesalab.common.entity.BaseResult;
-import com.mesalab.services.common.entity.KnowledgeBase;
+import com.mesalab.services.common.entity.KnowledgeBaseRequest;
import com.mesalab.services.common.entity.KnowledgeConstant;
-import com.mesalab.services.service.KnowledgeBaseService;
-import org.apache.http.entity.ContentType;
+import com.mesalab.services.service.KBService;
import org.junit.Assert;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
@@ -15,7 +14,6 @@ import org.springframework.web.multipart.MultipartFile;
import java.io.File;
import java.io.FileInputStream;
-import java.util.Map;
/**
* @Author wxs
@@ -23,58 +21,53 @@ import java.util.Map;
*/
@EnableAutoConfiguration
public class KnowledgeBaseTest extends GalaxyQGWApplicationTests {
+
@Autowired
- private KnowledgeBaseService knowledgeService;
+ private KBService kbService;
+
- private String id;
@Test
public void query() {
- BaseResult result = knowledgeService.queryKnowledge();
+ BaseResult result = kbService.getList(null, null);
Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
}
@Test
public void cycle() throws Exception {
publish();
- update();
- toggle();
+ updateFile();
+ updateStatus();
delete();
+
}
private void publish() throws Exception {
- KnowledgeBase knowledgeBase = jsonToInParameter("parameters/knowledgeBase.json", "publishTest", KnowledgeBase.class);
- File file = new File(KnowledgeConstant.DAT + File.separator + KnowledgeConstant.IP_BUILTIN + KnowledgeConstant.MMDB);
+ KnowledgeBaseRequest knowledgeBase = jsonToInParameter("parameters/knowledgeBase.json", "publish", KnowledgeBaseRequest.class);
+ File file = new File(KnowledgeConstant.DAT + File.separator + KnowledgeConstant.ASN_BUILTIN + KnowledgeConstant.MMDB);
FileInputStream inputStream = new FileInputStream(file);
- MultipartFile multipartFile = new MockMultipartFile(file.getName(), file.getName(),
- ContentType.APPLICATION_OCTET_STREAM.toString(), inputStream);
- BaseResult result = knowledgeService.publishKnowledge(multipartFile, knowledgeBase);
+ MultipartFile multipartFile = new MockMultipartFile(file.getName(), inputStream);
+ BaseResult result = kbService.publishKnowledge(multipartFile, knowledgeBase);
Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
- id = ((Map) result.getData()).get("id").toString();
}
- private void update() throws Exception {
- File file = new File(KnowledgeConstant.DAT + File.separator + KnowledgeConstant.IP_BUILTIN + KnowledgeConstant.MMDB);
+ private void updateFile() throws Exception {
+ KnowledgeBaseRequest knowledgeBase = jsonToInParameter("parameters/knowledgeBase.json", "update", KnowledgeBaseRequest.class);
+ File file = new File(KnowledgeConstant.DAT + File.separator + KnowledgeConstant.ASN_BUILTIN + KnowledgeConstant.MMDB);
FileInputStream inputStream = new FileInputStream(file);
- MultipartFile multipartFile = new MockMultipartFile(file.getName(), file.getName(),
- ContentType.APPLICATION_OCTET_STREAM.toString(), inputStream);
- KnowledgeBase knowledgeBase = new KnowledgeBase();
- knowledgeBase.setId(id);
- BaseResult result = knowledgeService.updateKnowledge(multipartFile, knowledgeBase);
+ MultipartFile multipartFile = new MockMultipartFile(file.getName(), inputStream);
+ BaseResult result = kbService.updateKnowledge(multipartFile, knowledgeBase);
Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
}
- private void toggle() throws Exception{
- KnowledgeBase knowledgeBase = new KnowledgeBase();
- knowledgeBase.setId(id);
- knowledgeBase.setIsValid(KnowledgeConstant.VALID);
- BaseResult result = knowledgeService.toggleKnowledge(knowledgeBase);
+ private void updateStatus() {
+ KnowledgeBaseRequest knowledgeBase = jsonToInParameter("parameters/knowledgeBase.json", "update_status", KnowledgeBaseRequest.class);
+ BaseResult result = kbService.updateStatus(knowledgeBase);
Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
}
- private void delete() throws Exception {
- KnowledgeBase knowledgeBase = new KnowledgeBase();
- knowledgeBase.setId(id);
- BaseResult result = knowledgeService.deleteKnowledge(id);
+ private void delete() {
+ KnowledgeBaseRequest knowledgeBase = jsonToInParameter("parameters/knowledgeBase.json", "delete", KnowledgeBaseRequest.class);
+ BaseResult result = kbService.deleteKnowledge(knowledgeBase);
Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
}
}
diff --git a/src/test/java/com/mesalab/qgw/service/MergeFunctionsHelperTest.java b/src/test/java/com/mesalab/qgw/service/MergeFunctionsHelperTest.java
new file mode 100644
index 00000000..02a6ff6a
--- /dev/null
+++ b/src/test/java/com/mesalab/qgw/service/MergeFunctionsHelperTest.java
@@ -0,0 +1,36 @@
+package com.mesalab.qgw.service;
+
+import cn.hutool.log.Log;
+import cn.hutool.log.LogFactory;
+import com.mesalab.GalaxyQGWApplicationTests;
+import com.mesalab.common.utils.sqlparser.FunctionsMergeHelper;
+import net.sf.jsqlparser.JSQLParserException;
+import org.junit.Assert;
+import org.junit.Test;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+
+/**
+ * TODO
+ *
+ * @Classname MergeFunctionsHelperTest
+ * @Date 2024/2/24 14:22
+ * @Author wWei
+ */
+@EnableAutoConfiguration
+public class MergeFunctionsHelperTest extends GalaxyQGWApplicationTests {
+ private static final Log log = LogFactory.get();
+
+ @Test
+ public void testPlainSelectSQL() throws JSQLParserException {
+ String originalSQL = "SELECT FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(column), 'PT5M', 'zero')) as a ,TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(column), 'PT5M', 'zero') as b ,TIME_FLOOR_WITH_FILL(long_column, 'PT5M', 'zero') as c, SUM(bytes) AS bytes FROM table GROUP BY FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(column), 'PT5M', 'zero')) ,TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(column), 'PT5M', 'zero'),TIME_FLOOR_WITH_FILL(long_column, 'PT5M', 'zero') order by FROM_UNIXTIME(TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(column), 'PT5M', 'zero')) asc ,TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(column), 'PT5M', 'zero') desc ,TIME_FLOOR_WITH_FILL(long_column, 'PT5M', 'zero') desc, bytes asc, a desc limit 1";
+ String build = FunctionsMergeHelper.build(originalSQL);
+ Assert.assertTrue(build.equals("SELECT TIME_FORMAT(TIME_FLOOR(column, 'PT5M'), 'yyyy-MM-dd HH:mm:ss') AS a, TIMESTAMP_TO_MILLIS(TIME_FLOOR(column, 'PT5M')) / 1000 AS b, TIMESTAMP_TO_MILLIS(TIME_FLOOR(MILLIS_TO_TIMESTAMP(long_column * 1000), 'PT5M')) / 1000 AS c, SUM(bytes) AS bytes FROM table GROUP BY TIME_FLOOR(column, 'PT5M'), TIME_FLOOR(column, 'PT5M'), TIME_FLOOR(MILLIS_TO_TIMESTAMP(long_column * 1000), 'PT5M') ORDER BY TIME_FLOOR(column, 'PT5M') ASC, TIME_FLOOR(column, 'PT5M') DESC, TIME_FLOOR(MILLIS_TO_TIMESTAMP(long_column * 1000), 'PT5M') DESC, bytes ASC, TIME_FLOOR(column, 'PT5M') DESC LIMIT 1"));
+ }
+
+ @Test
+ public void testNestedSQL() throws JSQLParserException {
+ String originalSQL = "SELECT FROM_UNIXTIME(TIME_FLOOR_WITH_FILL( stat_time, 'PT5S','zero' )) AS stat_time, SUM(bytes) AS bytes FROM ( SELECT TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time),'PT1S') AS stat_time, SUM(in_bytes + out_bytes) AS bytes FROM statistics_rule WHERE __time >= '2024-02-01 00:00:00' and __time < '2024-02-02 00:00:00' and rule_id = 301 GROUP BY TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time),'PT1S') ORDER BY TIME_FLOOR_WITH_FILL(UNIX_TIMESTAMP(__time),'PT1S') ASC ) GROUP BY FROM_UNIXTIME(TIME_FLOOR_WITH_FILL( stat_time,'PT5S','zero' )) ORDER BY stat_time ASC, bytes DESC LIMIT 10";
+ String build = FunctionsMergeHelper.build(originalSQL);
+ Assert.assertTrue(build.equals("SELECT TIME_FORMAT(TIME_FLOOR(MILLIS_TO_TIMESTAMP(stat_time * 1000), 'PT5S'), 'yyyy-MM-dd HH:mm:ss') AS stat_time, SUM(bytes) AS bytes FROM (SELECT TIMESTAMP_TO_MILLIS(TIME_FLOOR(__time, 'PT1S')) / 1000 AS stat_time, SUM(in_bytes + out_bytes) AS bytes FROM statistics_rule WHERE __time >= '2024-02-01 00:00:00' AND __time < '2024-02-02 00:00:00' AND rule_id = 301 GROUP BY TIME_FLOOR(__time, 'PT1S') ORDER BY TIME_FLOOR(__time, 'PT1S') ASC) GROUP BY TIME_FLOOR(MILLIS_TO_TIMESTAMP(stat_time * 1000), 'PT5S') ORDER BY TIME_FLOOR(MILLIS_TO_TIMESTAMP(stat_time * 1000), 'PT5S') ASC, bytes DESC LIMIT 10"));
+ }
+}
diff --git a/src/test/java/com/mesalab/qgw/service/NetworkMonitorTest.java b/src/test/java/com/mesalab/qgw/service/NetworkMonitorTest.java
deleted file mode 100644
index e74a4f9d..00000000
--- a/src/test/java/com/mesalab/qgw/service/NetworkMonitorTest.java
+++ /dev/null
@@ -1,52 +0,0 @@
-package com.mesalab.qgw.service;
-
-import com.mesalab.GalaxyQGWApplicationTests;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.network.dsl.DSLObject;
-import com.mesalab.network.service.NetworkMonitorService;
-import org.junit.Assert;
-import org.junit.Test;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
-
-/**
- * TODO
- *
- * @Classname NetworkMonitorTest
- * @Date 2022/12/7 09:45
- * @Author wWei
- */
-@EnableAutoConfiguration
-public class NetworkMonitorTest extends GalaxyQGWApplicationTests {
-
- @Autowired
- NetworkMonitorService networkMonitorService;
-
- @Test
- public void appDataSummary() {
- DSLObject dslObject = jsonToInParameter("parameters/networkMonitorTest.json", "appDataSummary", DSLObject.class);
- BaseResult result = networkMonitorService.buildAppData(dslObject);
- Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
- }
-
- @Test
- public void internalIPDataSummary() {
- DSLObject dslObject = jsonToInParameter("parameters/networkMonitorTest.json", "internalIPDataSummary", DSLObject.class);
- BaseResult result = networkMonitorService.buildInternalIPData(dslObject);
- Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
- }
-
- @Test
- public void appDataRateSummary() {
- DSLObject dslObject = jsonToInParameter("parameters/networkMonitorTest.json", "appDataRateSummary", DSLObject.class);
- BaseResult result = networkMonitorService.buildAppDataRate(dslObject);
- Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
- }
-
- @Test
- public void appTrafficSummary() {
- DSLObject dslObject = jsonToInParameter("parameters/networkMonitorTest.json", "appTrafficSummary", DSLObject.class);
- BaseResult result = networkMonitorService.buildAppTraffic(dslObject);
- Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
- }
-}
diff --git a/src/test/java/com/mesalab/qgw/service/ProtocolTreeTest.java b/src/test/java/com/mesalab/qgw/service/ProtocolTreeTest.java
index 4121e9b5..76ed3ba8 100644
--- a/src/test/java/com/mesalab/qgw/service/ProtocolTreeTest.java
+++ b/src/test/java/com/mesalab/qgw/service/ProtocolTreeTest.java
@@ -6,7 +6,6 @@ import cn.hutool.log.LogFactory;
import com.alibaba.fastjson2.JSON;
import com.google.common.base.Stopwatch;
import com.mesalab.GalaxyQGWApplicationTests;
-import com.mesalab.network.service.NetworkMonitorService;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
@@ -20,7 +19,7 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS;
public class ProtocolTreeTest extends GalaxyQGWApplicationTests {
private static final Log log = LogFactory.get();
@Autowired
- public NetworkMonitorService networkMonitorService;
+ public DSLService dslService;
private List<Map> getLiveChartsData(String fileName) {
File file = new File("benchmark" + File.separator + "live_traffic_chart" + File.separator + fileName);
@@ -32,7 +31,7 @@ public class ProtocolTreeTest extends GalaxyQGWApplicationTests {
@Test
public void test5MinuteTreeOutput() {
Stopwatch stopwatch = Stopwatch.createStarted();
- networkMonitorService.buildHierarchicalStructure(networkMonitorService.buildFlatStructure(getLiveChartsData("livecharts-5min.txt")));
+ dslService.buildHierarchicalStructure(dslService.buildFlatStructure(getLiveChartsData("livecharts-5min.txt")));
stopwatch.stop();
long millis = stopwatch.elapsed(MILLISECONDS);
log.info("time:{} ms", millis);
@@ -41,7 +40,7 @@ public class ProtocolTreeTest extends GalaxyQGWApplicationTests {
@Test
public void test10MinuteTreeOutput() {
Stopwatch stopwatch = Stopwatch.createStarted();
- networkMonitorService.buildHierarchicalStructure(networkMonitorService.buildFlatStructure(getLiveChartsData("livecharts-10min.txt")));
+ dslService.buildHierarchicalStructure(dslService.buildFlatStructure(getLiveChartsData("livecharts-10min.txt")));
stopwatch.stop();
long millis = stopwatch.elapsed(MILLISECONDS);
log.info("time:{} ms", millis);
@@ -49,7 +48,7 @@ public class ProtocolTreeTest extends GalaxyQGWApplicationTests {
@Test
public void test30MinuteTreeOutput() {
Stopwatch stopwatch = Stopwatch.createStarted();
- networkMonitorService.buildHierarchicalStructure(networkMonitorService.buildFlatStructure(getLiveChartsData("livecharts-30min.txt")));
+ dslService.buildHierarchicalStructure(dslService.buildFlatStructure(getLiveChartsData("livecharts-30min.txt")));
stopwatch.stop();
long millis = stopwatch.elapsed(MILLISECONDS);
log.info("time:{} ms", millis);
@@ -57,7 +56,7 @@ public class ProtocolTreeTest extends GalaxyQGWApplicationTests {
@Test
public void test1HourTreeOutput() {
Stopwatch stopwatch = Stopwatch.createStarted();
- networkMonitorService.buildHierarchicalStructure(networkMonitorService.buildFlatStructure(getLiveChartsData("livecharts-1h.txt")));
+ dslService.buildHierarchicalStructure(dslService.buildFlatStructure(getLiveChartsData("livecharts-1h.txt")));
stopwatch.stop();
long millis = stopwatch.elapsed(MILLISECONDS);
log.info("time:{} ms", millis);
diff --git a/src/test/java/com/mesalab/qgw/service/RecommendTest.java b/src/test/java/com/mesalab/qgw/service/RecommendTest.java
new file mode 100644
index 00000000..497e71e2
--- /dev/null
+++ b/src/test/java/com/mesalab/qgw/service/RecommendTest.java
@@ -0,0 +1,48 @@
+package com.mesalab.qgw.service;
+
+import com.mesalab.GalaxyQGWApplicationTests;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.qgw.controller.QueryController;
+import com.mesalab.qgw.model.basic.DSLQueryRequestParam;
+import org.junit.Assert;
+import org.junit.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+import org.springframework.web.context.request.async.DeferredResult;
+
+import java.util.Map;
+
+/**
+ * @Author ww
+ * @Date 2024/1/30
+ */
+@EnableAutoConfiguration
+public class RecommendTest extends GalaxyQGWApplicationTests {
+
+ @Autowired
+ private QueryController queryController;
+
+ @Test
+ public void testIpLearningFqdnRelateIp() {
+ DSLQueryRequestParam body = jsonToInParameter("parameters/recommendTest.json", "ip_learning_fqdn_relate_ip", DSLQueryRequestParam.class);
+ BaseResult commitResult = queryController.createDSLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+ DeferredResult<BaseResult> getResult = new DeferredResult<>();
+ while (true) {
+ if (getResult.isSetOrExpired()) {
+ break;
+ }
+ getResult = queryController.getJobStatusById(id, 0);
+ }
+ BaseResult result = (BaseResult) getResult.getResult();
+ Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
+ }
+
+ @Test
+ public void testIpLearningActiveIpOneshot() {
+ DSLQueryRequestParam body = jsonToInParameter("parameters/recommendTest.json", "ip_learning_active_ip", DSLQueryRequestParam.class);
+ BaseResult commitResult = queryController.createDSLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 200);
+ }
+}
diff --git a/src/test/java/com/mesalab/qgw/service/SQLAdHocTest.java b/src/test/java/com/mesalab/qgw/service/SQLAdHocTest.java
new file mode 100644
index 00000000..5f9083d9
--- /dev/null
+++ b/src/test/java/com/mesalab/qgw/service/SQLAdHocTest.java
@@ -0,0 +1,123 @@
+package com.mesalab.qgw.service;
+
+import com.mesalab.GalaxyQGWApplicationTests;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.qgw.controller.QueryController;
+import com.mesalab.qgw.model.basic.SqlQueryRequestParam;
+import org.junit.Assert;
+import org.junit.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+import org.springframework.web.context.request.async.DeferredResult;
+
+import java.util.Map;
+
+/**
+ * @Author ww
+ * @Date 2024/1/30
+ */
+@EnableAutoConfiguration
+public class SQLAdHocTest extends GalaxyQGWApplicationTests {
+
+ @Autowired
+ private QueryController queryController;
+
+ @Test
+ public void testQuerySqlDefault() {
+ SqlQueryRequestParam body = jsonToInParameter("parameters/sqlAdHocTest.json", "query_sql_default", SqlQueryRequestParam.class);
+ BaseResult commitResult = queryController.createSQLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+ DeferredResult<BaseResult> getResult = new DeferredResult<>();
+ while (true) {
+ if (getResult.isSetOrExpired()) {
+ break;
+ }
+ getResult = queryController.getJobStatusById(id, 0);
+ }
+ BaseResult result = (BaseResult) getResult.getResult();
+ Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
+ }
+
+ @Test
+ public void testQuerySqlOneshot() {
+ SqlQueryRequestParam body = jsonToInParameter("parameters/sqlAdHocTest.json", "query_sql_oneshot", SqlQueryRequestParam.class);
+ BaseResult baseResult = queryController.createSQLQuery(body);
+ Assert.assertTrue(baseResult.getMessage(), baseResult.getStatus() == 200);
+ }
+
+ @Test
+ public void testQuerySqlNormal() {
+ SqlQueryRequestParam body = jsonToInParameter("parameters/sqlAdHocTest.json", "query_sql_normal", SqlQueryRequestParam.class);
+ BaseResult commitResult = queryController.createSQLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+ DeferredResult<BaseResult> getResult = new DeferredResult<>();
+ while (true) {
+ if (getResult.isSetOrExpired()) {
+ break;
+ }
+ getResult = queryController.getJobStatus(id, 0);
+ }
+ BaseResult result = (BaseResult) getResult.getResult();
+ Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
+ }
+
+ @Test
+ public void testQuerySqlBlocking() {
+ SqlQueryRequestParam body = jsonToInParameter("parameters/sqlAdHocTest.json", "query_sql_blocking", SqlQueryRequestParam.class);
+ BaseResult commitResult = queryController.createSQLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+ DeferredResult<BaseResult> getResult = new DeferredResult<>();
+ while (true) {
+ if (getResult.isSetOrExpired()) {
+ break;
+ }
+ getResult = queryController.getJobResultById(id, 0);
+ }
+ BaseResult result = (BaseResult) getResult.getResult();
+ Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
+ }
+
+ @Test
+ public void testQuerySqlJson() {
+ SqlQueryRequestParam body = jsonToInParameter("parameters/sqlAdHocTest.json", "query_sql_json", SqlQueryRequestParam.class);
+ BaseResult commitResult = queryController.createSQLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+ DeferredResult<BaseResult> getResult = new DeferredResult<>();
+ while (true) {
+ if (getResult.isSetOrExpired()) {
+ break;
+ }
+ getResult = queryController.getJobResult(id, 0);
+ }
+ BaseResult result = (BaseResult) getResult.getResult();
+ Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
+ }
+
+ @Test
+ public void testQuerySqlCsv() {
+ SqlQueryRequestParam body = jsonToInParameter("parameters/sqlAdHocTest.json", "query_sql_csv", SqlQueryRequestParam.class);
+ BaseResult commitResult = queryController.createSQLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+ DeferredResult<BaseResult> getResult = new DeferredResult<>();
+ while (true) {
+ if (getResult.isSetOrExpired()) {
+ break;
+ }
+ getResult = queryController.getJobResultById(id, 0);
+ }
+ BaseResult result = (BaseResult) getResult.getResult();
+ Assert.assertTrue(result.getMessage(), result.getStatus() == 200);
+ }
+
+ @Test
+ public void testQueryNoneSubQuery() {
+ SqlQueryRequestParam body = jsonToInParameter("parameters/sqlAdHocTest.json", "query_sql_oneshot_error_trigger_sub_query", SqlQueryRequestParam.class);
+ BaseResult baseResult = queryController.createSQLQuery(body);
+ Assert.assertTrue(baseResult.getMessage(), baseResult.getStatus() == 200);
+ }
+}
diff --git a/src/test/java/com/mesalab/qgw/service/SQLSavedTest.java b/src/test/java/com/mesalab/qgw/service/SQLSavedTest.java
new file mode 100644
index 00000000..c6048642
--- /dev/null
+++ b/src/test/java/com/mesalab/qgw/service/SQLSavedTest.java
@@ -0,0 +1,41 @@
+package com.mesalab.qgw.service;
+
+import com.mesalab.GalaxyQGWApplicationTests;
+import com.mesalab.common.entity.BaseResult;
+import com.mesalab.qgw.controller.QueryController;
+import com.mesalab.qgw.model.basic.SqlQueryRequestParam;
+import org.junit.Assert;
+import org.junit.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+
+import java.util.Map;
+
+/**
+ * @Author ww
+ * @Date 2024/1/30
+ */
+@EnableAutoConfiguration
+public class SQLSavedTest extends GalaxyQGWApplicationTests {
+
+ @Autowired
+ private QueryController queryController;
+ @Autowired
+ private QueryJobService queryJobService;
+
+ @Test
+ public void testWholeProcess() {
+ SqlQueryRequestParam body = jsonToInParameter("parameters/sqlSavedTest.json", "default", SqlQueryRequestParam.class);
+
+ BaseResult commitResult = queryController.createSQLQuery(body);
+ Assert.assertTrue(commitResult.getMessage(), commitResult.getStatus() == 201);
+ String id = ((Map<String, Object>) commitResult.getJob()).get("job_id").toString();
+
+ BaseResult statusResult = queryJobService.getSavedQueryStatusById(id);
+ Assert.assertTrue(statusResult.getMessage(), statusResult.getStatus() == 200);
+
+ BaseResult deletedResult = queryController.deleteJobById(id, 1);
+ Assert.assertTrue(deletedResult.getMessage(), deletedResult.getStatus() == 202);
+
+ }
+} \ No newline at end of file
diff --git a/src/test/java/com/mesalab/qgw/service/AvroSchemaDynamicTest.java b/src/test/java/com/mesalab/qgw/service/SchemaDynamicTest.java
index 274e9487..1c7fb876 100644
--- a/src/test/java/com/mesalab/qgw/service/AvroSchemaDynamicTest.java
+++ b/src/test/java/com/mesalab/qgw/service/SchemaDynamicTest.java
@@ -14,14 +14,14 @@ import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import static org.junit.Assert.assertEquals;
@EnableAutoConfiguration
-public class AvroSchemaDynamicTest extends GalaxyQGWApplicationTests {
+public class SchemaDynamicTest extends GalaxyQGWApplicationTests {
private static final Log log = LogFactory.get();
@Autowired
public DiagnosisService diagnosisService;
@Test
public void testValidateMetadata(){
- BaseResult baseResult = diagnosisService.validateMetadata();
- assertEquals("Validate metadata failure.", String.valueOf(baseResult.getStatus()), String.valueOf(HttpStatus.SC_OK));
+ BaseResult baseResult = diagnosisService.validateSchema();
+ assertEquals("Validate schema failure.", String.valueOf(baseResult.getStatus()), String.valueOf(HttpStatus.SC_OK));
}
diff --git a/src/test/java/com/mesalab/qgw/service/TopEntityTest.java b/src/test/java/com/mesalab/qgw/service/TopEntityTest.java
deleted file mode 100644
index 318cf863..00000000
--- a/src/test/java/com/mesalab/qgw/service/TopEntityTest.java
+++ /dev/null
@@ -1,87 +0,0 @@
-package com.mesalab.qgw.service;
-
-import cn.hutool.core.io.FileUtil;
-import cn.hutool.core.util.CharsetUtil;
-import cn.hutool.core.util.NumberUtil;
-import cn.hutool.core.util.StrUtil;
-import cn.hutool.log.Log;
-import cn.hutool.log.LogFactory;
-import com.google.common.base.Stopwatch;
-import com.google.common.collect.Lists;
-import com.mesalab.GalaxyQGWApplicationTests;
-import com.mesalab.services.service.EntityService;
-import org.junit.Test;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static java.util.concurrent.TimeUnit.MILLISECONDS;
-
-@EnableAutoConfiguration
-public class TopEntityTest extends GalaxyQGWApplicationTests {
- private static final Log log = LogFactory.get();
-
- @Autowired
- EntityService entityService;
-
- private List<Map<String, Object>> getEntityData(String fileName) {
- File file = new File("benchmark" + File.separator + "entity_dataset" + File.separator + fileName);
- List<String> list = FileUtil.readLines(file, CharsetUtil.UTF_8);
- List<Map<String, Object>> data = Lists.newArrayList();
- for (String s : list) {
- String[] split = StrUtil.split(s, "\t");
- Map<String, Object> map = new HashMap<>(16);
- if (split.length >= 2) {
- map.put("sessions", NumberUtil.isLong(split[1]) ? split[1] : "0");
- }
- if (split.length >= 1) {
- map.put("ssl_sni", split[0]);
- data.add(map);
- }
- }
- return data;
- }
-
- @Test
- public void testCUCCSNIOutput() {
- Stopwatch stopwatch = Stopwatch.createStarted();
- entityService.mergeSNI(getEntityData("XJ-CUCC-SNI-Top200w.txt"));
- stopwatch.stop();
- long millis = stopwatch.elapsed(MILLISECONDS);
- log.info("time:{} ms", millis);
- }
-
- @Test
- public void testE21SNIOutput() {
- Stopwatch stopwatch = Stopwatch.createStarted();
- entityService.mergeSNI(getEntityData("E21-SNI-Top200w.txt"));
- stopwatch.stop();
- long millis = stopwatch.elapsed(MILLISECONDS);
- log.info("time:{} ms", millis);
- }
-
- @Test
- public void testE21SNI20221011Output() {
- Stopwatch stopwatch = Stopwatch.createStarted();
- entityService.mergeSNI(getEntityData("E21-SNI-Top3W-20221011.txt"));
- stopwatch.stop();
- long millis = stopwatch.elapsed(MILLISECONDS);
- log.info("time:{} ms", millis);
- }
-
- @Test
- public void testE21SNI20221020Output() {
- Stopwatch stopwatch = Stopwatch.createStarted();
- entityService.mergeSNI(getEntityData("E21-SNI-Top120W-20221020.txt"));
- stopwatch.stop();
- long millis = stopwatch.elapsed(MILLISECONDS);
- log.info("time:{} ms", millis);
- }
-
-
-
-}
diff --git a/src/test/java/com/mesalab/qgw/service/UnstructuredTest.java b/src/test/java/com/mesalab/qgw/service/UnstructuredTest.java
deleted file mode 100644
index 4285bc5a..00000000
--- a/src/test/java/com/mesalab/qgw/service/UnstructuredTest.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package com.mesalab.qgw.service;
-
-import com.mesalab.GalaxyQGWApplicationTests;
-import com.mesalab.common.entity.BaseResult;
-import com.mesalab.services.common.dsl.ComDSLObject;
-import com.mesalab.services.service.UnstructuredService;
-import org.junit.Assert;
-import org.junit.Test;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
-
-@EnableAutoConfiguration
-public class UnstructuredTest extends GalaxyQGWApplicationTests {
- @Autowired
- UnstructuredService unstructuredService;
-
- @Test
- public void allFilePath(){
- ComDSLObject dslObject = jsonToInParameter("parameters/unstructuredTest.json", "all", ComDSLObject.class);
- BaseResult UnstructuredData = unstructuredService.getUnstructuredData("all", dslObject);
- Assert.assertTrue(UnstructuredData.getMessage(), UnstructuredData.getStatus() == 200);
- }
-
- @Test
- public void mailFilePath(){
- ComDSLObject dslObject = jsonToInParameter("parameters/unstructuredTest.json", "mail", ComDSLObject.class);
- BaseResult UnstructuredData = unstructuredService.getUnstructuredData("mail", dslObject);
- Assert.assertTrue(UnstructuredData.getMessage(), UnstructuredData.getStatus() == 200);
- }
-
- @Test
- public void pcapFilePath(){
- ComDSLObject dslObject = jsonToInParameter("parameters/unstructuredTest.json", "pcap", ComDSLObject.class);
- BaseResult UnstructuredData = unstructuredService.getUnstructuredData("pcap", dslObject);
- Assert.assertTrue(UnstructuredData.getMessage(), UnstructuredData.getStatus() == 200);
- }
-
- @Test
- public void httpFilePath(){
- ComDSLObject dslObject = jsonToInParameter("parameters/unstructuredTest.json", "http", ComDSLObject.class);
- BaseResult UnstructuredData = unstructuredService.getUnstructuredData("http", dslObject);
- Assert.assertTrue(UnstructuredData.getMessage(), UnstructuredData.getStatus() == 200);
- }
-}
diff --git a/src/test/resources/examples/invalidDSLRequestTest.json b/src/test/resources/examples/invalidDSLRequestTest.json
new file mode 100644
index 00000000..903f8f53
--- /dev/null
+++ b/src/test/resources/examples/invalidDSLRequestTest.json
@@ -0,0 +1,50 @@
+{
+ "query": {
+ "parameters": {
+ "intervals": [
+ "2024-03-14 00:00:00/2024-03-15 00:00:00"
+ ],
+ "limit": "3",
+ "match": [
+ {
+ "fieldValues": [
+ "SSL",
+ "HTTP",
+ "DNS"
+ ],
+ "type": "exactly"
+ },
+ {
+ "fieldKey": "FQDN_NAME",
+ "fieldValues": [
+ "itunes.apple",
+ "itunes.apple.com"
+ ],
+ "type": "exactly"
+ }
+ ],
+ "range": [
+ {
+ "fieldKey": "VSYS_ID",
+ "type": "eq"
+ },
+ {
+ "fieldKey": "DEPTH",
+ "fieldValues": [
+ 1
+ ],
+ "type": "eq"
+ },
+ {
+ "fieldKey": "UNIQ_CIP",
+ "fieldValues": [
+ 12
+ ],
+ "type": "gt"
+ }
+ ],
+ "sort": []
+ },
+ "queryType": "iplearning"
+ }
+} \ No newline at end of file
diff --git a/src/test/resources/examples/invalidSessionRecordTest.json b/src/test/resources/examples/invalidSessionRecordTest.json
new file mode 100644
index 00000000..632ab4b0
--- /dev/null
+++ b/src/test/resources/examples/invalidSessionRecordTest.json
@@ -0,0 +1,6764 @@
+{
+ "type": "record",
+ "name": "session_record",
+ "namespace": "tsg_galaxy_v3",
+ "doc": {
+ "primary_key": "log_id",
+ "partition_key": "recv_time",
+ "index_key": [
+ "vsys_id",
+ "security_action",
+ "proxy_action",
+ "decoded_as",
+ "data_center",
+ "device_group",
+ "recv_time"
+ ],
+ "ttl": 2592000,
+ "default_ttl": 2592000,
+ "functions": {
+ "aggregation": [
+ {
+ "name": "COUNT",
+ "label": "COUNT",
+ "function": "count(expr)",
+ "metric_type": "counter",
+ "unit": "short"
+ },
+ {
+ "name": "COUNT_DISTINCT",
+ "label": "COUNT_DISTINCT",
+ "function": "COUNT_DISTINCT(expr)",
+ "metric_type": "gauge",
+ "unit": "short"
+ },
+ {
+ "name": "AVG",
+ "label": "AVG",
+ "function": "avg(expr)",
+ "metric_type": "gauge",
+ "unit": "short"
+ },
+ {
+ "name": "SUM",
+ "label": "SUM",
+ "function": "sum(expr)",
+ "metric_type": "counter",
+ "unit": "short"
+ },
+ {
+ "name": "MAX",
+ "label": "MAX",
+ "function": "max(expr)",
+ "metric_type": "gauge",
+ "unit": "short"
+ },
+ {
+ "name": "MIN",
+ "label": "MIN",
+ "function": "min(expr)",
+ "metric_type": "gauge",
+ "unit": "short"
+ },
+ {
+ "name": "MEDIAN",
+ "label": "MEDIAN",
+ "function": "MEDIAN(expr)",
+ "metric_type": "gauge",
+ "unit": "short"
+ },
+ {
+ "name": "MEDIAN_HDR",
+ "label": "MEDIAN_HDR",
+ "function": "MEDIAN_HDR(expr)",
+ "metric_type": "gauge",
+ "unit": "short"
+ },
+ {
+ "name": "QUANTILE",
+ "label": "QUANTILE",
+ "function": "QUANTILE(expr,level)",
+ "metric_type": "gauge",
+ "unit": "short"
+ },
+ {
+ "name": "P95_PERCENTILE",
+ "label": "P95_PERCENTILE",
+ "function": "QUANTILE(expr,0.95)",
+ "metric_type": "gauge",
+ "unit": "short"
+ },
+ {
+ "name": "P99_PERCENTILE",
+ "label": "P99_PERCENTILE",
+ "function": "QUANTILE(expr,0.99)",
+ "metric_type": "gauge",
+ "unit": "short"
+ },
+ {
+ "name": "QUANTILE_HDR",
+ "label": "QUANTILE_HDR",
+ "function": "QUANTILE_HDR(expr,level)",
+ "metric_type": "gauge",
+ "unit": "short"
+ },
+ {
+ "name": "P95_PERCENTILE_HDR",
+ "label": "P95_PERCENTILE_HDR",
+ "function": "QUANTILE_HDR(expr,0.95)",
+ "metric_type": "gauge",
+ "unit": "short"
+ },
+ {
+ "name": "P99_PERCENTILE_HDR",
+ "label": "P99_PERCENTILE_HDR",
+ "function": "QUANTILE_HDR(expr,0.99)",
+ "metric_type": "gauge",
+ "unit": "short"
+ },
+ {
+ "name": "PERCENTILES_HDR",
+ "label": "PERCENTILES_HDR",
+ "function": "PERCENTILES_HDR(expr)",
+ "metric_type": "histogram",
+ "unit": "short"
+ },
+ {
+ "name": "APPROX_COUNT_DISTINCT_HLLD",
+ "label": "COUNT_DISTINCT_HLLD",
+ "function": "APPROX_COUNT_DISTINCT_HLLD(expr)",
+ "metric_type": "gauge",
+ "unit": "short"
+ },
+ {
+ "name": "APPROX_COUNT_DISTINCT_DS_HLL",
+ "label": "COUNT_DISTINCT_DS_HLL",
+ "function": "APPROX_COUNT_DISTINCT_DS_HLL(expr)",
+ "metric_type": "gauge",
+ "unit": "short"
+ },
+ {
+ "name": "RATE",
+ "label": "RATE",
+ "function": "RATE(expr,duration)",
+ "metric_type": "gauge",
+ "unit": "sps"
+ },
+ {
+ "name": "BITRATE",
+ "label": "BITRATE",
+ "function": "RATE(expr,duration)*8",
+ "metric_type": "gauge",
+ "unit": "bps"
+ }
+ ],
+ "date": [
+ {
+ "name": "UNIX_TIMESTAMP",
+ "label": "UNIX_TIMESTAMP",
+ "function": "UNIX_TIMESTAMP(expr)"
+ },
+ {
+ "name": "UNIX_TIMESTAMP_MILLIS",
+ "label": "UNIX_TIMESTAMP_MILLIS",
+ "function": "UNIX_TIMESTAMP_MILLIS(expr)"
+ },
+ {
+ "name": "FROM_UNIXTIME",
+ "label": "FROM_UNIXTIME",
+ "function": "FROM_UNIXTIME(expr)"
+ },
+ {
+ "name": "FROM_UNIXTIME_MILLIS",
+ "label": "FROM_UNIXTIME_MILLIS",
+ "function": "FROM_UNIXTIME_MILLIS(expr)"
+ },
+ {
+ "name": "DATE_FORMAT",
+ "label": "DATE_FORMAT",
+ "function": "DATE_FORMAT(expr,format)"
+ },
+ {
+ "name": "CONVERT_TZ",
+ "label": "CONVERT_TZ",
+ "function": "CONVERT_TZ(expr, from_tz, to_tz)"
+ },
+ {
+ "name": "TIME_FLOOR_WITH_FILL",
+ "label": "TIME_FLOOR_WITH_FILL",
+ "function": "TIME_FLOOR_WITH_FILL(expr,period,fill)"
+ }
+ ],
+ "operator": [
+ {
+ "name": "=",
+ "label": "=",
+ "function": "expr = value"
+ },
+ {
+ "name": "!=",
+ "label": "!=",
+ "function": "expr != value"
+ },
+ {
+ "name": ">",
+ "label": ">",
+ "function": "expr > value"
+ },
+ {
+ "name": "<",
+ "label": "<",
+ "function": "expr < value"
+ },
+ {
+ "name": ">=",
+ "label": ">=",
+ "function": "expr >= value"
+ },
+ {
+ "name": "<=",
+ "label": "<=",
+ "function": "expr <= value"
+ },
+ {
+ "name": "has",
+ "label": "HAS",
+ "function": "has(expr, value)"
+ },
+ {
+ "name": "in",
+ "label": "IN",
+ "function": "expr in (values)"
+ },
+ {
+ "name": "not in",
+ "label": "NOT IN",
+ "function": "expr not in (values)"
+ },
+ {
+ "name": "like",
+ "label": "LIKE",
+ "function": "expr like value"
+ },
+ {
+ "name": "not like",
+ "label": "NOT LIKE",
+ "function": "expr not like value"
+ },
+ {
+ "name": "notEmpty",
+ "label": "NOT EMPTY",
+ "function": "notEmpty(expr)"
+ },
+ {
+ "name": "empty",
+ "label": "EMPTY",
+ "function": "empty(expr)"
+ },
+ {
+ "name": "bitAnd",
+ "label": "Bitwise AND",
+ "function": "bitAnd(expr, value)=value"
+ }
+ ]
+ },
+ "schema_query": {
+ "time": [
+ "recv_time",
+ "test",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "processing_time",
+ "ingestion_time",
+ "insert_time"
+ ],
+ "dimensions": [
+ "session_id",
+ "session_id",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "data_center",
+ "sled_ip",
+ "device_group",
+ "address_type",
+ "decoded_as",
+ "vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_action",
+ "security_rule_list",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_action",
+ "proxy_rule_list",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "in_dest_mac",
+ "out_dest_mac",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "http_url",
+ "http_host",
+ "http_request_line",
+ "http_response_line",
+ "http_proxy_flag",
+ "http_sequence",
+ "http_cookie",
+ "http_referer",
+ "http_user_agent",
+ "http_request_content_length",
+ "http_request_content_type",
+ "http_response_content_length",
+ "http_response_content_type",
+ "http_set_cookie",
+ "http_version",
+ "http_status_code",
+ "http_response_latency_ms",
+ "http_session_duration_ms",
+ "http_action_file_size",
+ "mail_protocol_type",
+ "mail_account",
+ "mail_from_cmd",
+ "mail_to_cmd",
+ "mail_from",
+ "mail_password",
+ "mail_to",
+ "mail_cc",
+ "mail_bcc",
+ "mail_subject",
+ "mail_subject_charset",
+ "mail_attachment_name",
+ "mail_attachment_name_charset",
+ "mail_eml_file",
+ "dns_message_id",
+ "dns_qr",
+ "dns_opcode",
+ "dns_aa",
+ "dns_tc",
+ "dns_rd",
+ "dns_ra",
+ "dns_rcode",
+ "dns_qname",
+ "dns_qtype",
+ "dns_qclass",
+ "dns_cname",
+ "dns_sub",
+ "ssl_version",
+ "ssl_sni",
+ "ssl_san",
+ "ssl_cn",
+ "ssl_ja3_hash",
+ "ssl_ja3s_hash",
+ "ssl_cert_issuer",
+ "ssl_cert_subject",
+ "ssl_esni_flag",
+ "ssl_ech_flag",
+ "dtls_cookie",
+ "dtls_version",
+ "dtls_sni",
+ "dtls_san",
+ "dtls_cn",
+ "dtls_handshake_latency_ms",
+ "dtls_ja3_fingerprint",
+ "dtls_ja3_hash",
+ "dtls_cert_issuer",
+ "dtls_cert_subject",
+ "quic_sni",
+ "quic_version",
+ "quic_user_agent",
+ "ftp_account",
+ "ftp_url",
+ "ftp_link_type",
+ "sip_call_id",
+ "sip_originator_description",
+ "sip_responder_description",
+ "sip_user_agent",
+ "sip_server",
+ "sip_originator_sdp_connect_ip",
+ "sip_originator_sdp_media_port",
+ "sip_originator_sdp_media_type",
+ "sip_responder_sdp_connect_ip",
+ "sip_responder_sdp_media_port",
+ "sip_responder_sdp_media_type",
+ "sip_responder_sdp_content",
+ "sip_duration_s",
+ "sip_bye",
+ "ssh_version",
+ "ssh_auth_success",
+ "ssh_client_version",
+ "ssh_server_version",
+ "ssh_cipher_alg",
+ "ssh_mac_alg",
+ "ssh_compression_alg",
+ "ssh_kex_alg",
+ "ssh_host_key_alg",
+ "ssh_host_key",
+ "ssh_hassh",
+ "rtp_payload_type_c2s",
+ "rtp_payload_type_s2c",
+ "rtp_originator_dir",
+ "stratum_cryptocurrency",
+ "stratum_mining_pools",
+ "stratum_mining_program",
+ "stratum_mining_subscribe",
+ "rdp_cookie",
+ "rdp_security_protocol",
+ "rdp_client_channels",
+ "rdp_keyboard_layout",
+ "rdp_client_version",
+ "rdp_client_name",
+ "rdp_client_product_id",
+ "rdp_desktop_width",
+ "rdp_desktop_height",
+ "rdp_requested_color_depth",
+ "rdp_certificate_type",
+ "rdp_certificate_count",
+ "rdp_certificate_permanent",
+ "rdp_encryption_level",
+ "rdp_encryption_method",
+ "internal_ip_list",
+ "external_ip_list",
+ "security_rule_id",
+ "monitor_rule_id",
+ "proxy_rule_id",
+ "statistics_rule_id",
+ "shaping_rule_id",
+ "sc_rule_id"
+ ],
+ "metrics": [
+ "client_ip",
+ "client_port",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "in_src_mac",
+ "out_src_mac",
+ "server_ip",
+ "server_port",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "in_dest_mac",
+ "out_dest_mac",
+ "app",
+ "decoded_path",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "http_url",
+ "http_host",
+ "http_user_agent",
+ "http_request_content_length",
+ "http_response_content_length",
+ "http_status_code",
+ "http_response_latency_ms",
+ "http_session_duration_ms",
+ "mail_account",
+ "mail_from_cmd",
+ "mail_to_cmd",
+ "mail_from",
+ "mail_to",
+ "mail_cc",
+ "mail_bcc",
+ "mail_subject",
+ "mail_attachment_name",
+ "dns_message_id",
+ "dns_qr",
+ "dns_opcode",
+ "dns_aa",
+ "dns_rd",
+ "dns_ra",
+ "dns_rcode",
+ "dns_qtype",
+ "dns_qclass",
+ "dns_qname",
+ "dns_cname",
+ "dns_response_latency_ms",
+ "ssl_sni",
+ "ssl_san",
+ "ssl_cn",
+ "ssl_handshake_latency_ms",
+ "ssl_ja3_hash",
+ "ssl_ja3s_hash",
+ "ssl_cert_issuer",
+ "ssl_cert_subject",
+ "dtls_sni",
+ "dtls_san",
+ "dtls_cn",
+ "dtls_handshake_latency_ms",
+ "dtls_ja3_hash",
+ "dtls_cert_issuer",
+ "dtls_cert_subject",
+ "quic_sni",
+ "quic_user_agent",
+ "ftp_account",
+ "ftp_url",
+ "sip_call_id",
+ "sip_server",
+ "ssh_hassh"
+ ],
+ "filters": [
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "processing_time",
+ "ingestion_time",
+ "device_id",
+ "data_center",
+ "sled_ip",
+ "device_group",
+ "address_type",
+ "decoded_as",
+ "vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_action",
+ "security_rule_list",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "in_src_mac",
+ "out_src_mac",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "in_dest_mac",
+ "out_dest_mac",
+ "app_transition",
+ "app_debug_info",
+ "app",
+ "app_content",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "http_url",
+ "http_host",
+ "http_request_line",
+ "http_response_line",
+ "http_proxy_flag",
+ "http_sequence",
+ "http_cookie",
+ "http_referer",
+ "http_user_agent",
+ "http_request_content_length",
+ "http_request_content_type",
+ "http_response_content_length",
+ "http_response_content_type",
+ "http_set_cookie",
+ "http_version",
+ "http_status_code",
+ "http_response_latency_ms",
+ "http_session_duration_ms",
+ "http_action_file_size",
+ "mail_protocol_type",
+ "mail_account",
+ "mail_from_cmd",
+ "mail_to_cmd",
+ "mail_from",
+ "mail_password",
+ "mail_to",
+ "mail_cc",
+ "mail_bcc",
+ "mail_subject",
+ "mail_subject_charset",
+ "mail_attachment_name",
+ "mail_attachment_name_charset",
+ "mail_eml_file",
+ "dns_message_id",
+ "dns_qr",
+ "dns_opcode",
+ "dns_aa",
+ "dns_rd",
+ "dns_ra",
+ "dns_rcode",
+ "dns_qtype",
+ "dns_qclass",
+ "dns_qdcount",
+ "dns_ancount",
+ "dns_nscount",
+ "dns_arcount",
+ "dns_qname",
+ "dns_cname",
+ "dns_sub",
+ "dns_rr",
+ "dns_response_latency_ms",
+ "ssl_version",
+ "ssl_sni",
+ "ssl_san",
+ "ssl_cn",
+ "ssl_handshake_latency_ms",
+ "ssl_ja3_hash",
+ "ssl_ja3s_hash",
+ "ssl_cert_issuer",
+ "ssl_cert_subject",
+ "ssl_esni_flag",
+ "ssl_ech_flag",
+ "dtls_cookie",
+ "dtls_version",
+ "dtls_sni",
+ "dtls_san",
+ "dtls_cn",
+ "dtls_handshake_latency_ms",
+ "dtls_ja3_fingerprint",
+ "dtls_ja3_hash",
+ "dtls_cert_issuer",
+ "dtls_cert_subject",
+ "quic_sni",
+ "quic_version",
+ "quic_user_agent",
+ "ftp_account",
+ "ftp_url",
+ "ftp_link_type",
+ "sip_call_id",
+ "sip_originator_description",
+ "sip_responder_description",
+ "sip_user_agent",
+ "sip_server",
+ "sip_originator_sdp_connect_ip",
+ "sip_originator_sdp_media_port",
+ "sip_originator_sdp_media_type",
+ "sip_originator_sdp_content",
+ "sip_responder_sdp_connect_ip",
+ "sip_responder_sdp_media_port",
+ "sip_responder_sdp_media_type",
+ "sip_responder_sdp_content",
+ "sip_duration_s",
+ "sip_bye",
+ "rtp_payload_type_c2s",
+ "rtp_payload_type_s2c",
+ "rtp_originator_dir",
+ "ssh_version",
+ "ssh_auth_success",
+ "ssh_client_version",
+ "ssh_server_version",
+ "ssh_cipher_alg",
+ "ssh_mac_alg",
+ "ssh_compression_alg",
+ "ssh_kex_alg",
+ "ssh_host_key_alg",
+ "ssh_host_key",
+ "ssh_hassh",
+ "stratum_cryptocurrency",
+ "stratum_mining_pools",
+ "stratum_mining_program",
+ "stratum_mining_subscribe",
+ "rdp_cookie",
+ "rdp_security_protocol",
+ "rdp_client_channels",
+ "rdp_keyboard_layout",
+ "rdp_client_version",
+ "rdp_client_name",
+ "rdp_client_product_id",
+ "rdp_desktop_width",
+ "rdp_desktop_height",
+ "rdp_requested_color_depth",
+ "rdp_certificate_type",
+ "rdp_certificate_count",
+ "rdp_certificate_permanent",
+ "rdp_encryption_level",
+ "rdp_encryption_method",
+ "internal_ip_list",
+ "external_ip_list",
+ "security_rule_id",
+ "monitor_rule_id",
+ "proxy_rule_id",
+ "statistics_rule_id",
+ "shaping_rule_id",
+ "sc_rule_id"
+ ],
+ "references": {
+ "aggregation": [
+ {
+ "type": "int",
+ "functions": "COUNT,COUNT_DISTINCT,AVG,SUM,MAX,MIN,MEDIAN,P95_PERCENTILE,P99_PERCENTILE,RATE"
+ },
+ {
+ "type": "long",
+ "functions": "COUNT,COUNT_DISTINCT,AVG,SUM,MAX,MIN,MEDIAN,P95_PERCENTILE,P99_PERCENTILE,RATE"
+ },
+ {
+ "type": "float",
+ "functions": "COUNT,COUNT_DISTINCT,AVG,SUM,MAX,MIN,MEDIAN,P95_PERCENTILE,P99_PERCENTILE,RATE"
+ },
+ {
+ "type": "double",
+ "functions": "COUNT,COUNT_DISTINCT,AVG,SUM,MAX,MIN,MEDIAN,P95_PERCENTILE,P99_PERCENTILE,RATE"
+ },
+ {
+ "type": "string",
+ "functions": "COUNT,COUNT_DISTINCT"
+ },
+ {
+ "type": "date",
+ "functions": "COUNT,COUNT_DISTINCT,MAX,MIN"
+ },
+ {
+ "type": "datetime",
+ "functions": "COUNT,COUNT_DISTINCT,MAX,MIN"
+ },
+ {
+ "type": "timestamp",
+ "functions": "COUNT,COUNT_DISTINCT,MAX,MIN"
+ },
+ {
+ "type": "unix_timestamp",
+ "functions": "COUNT,COUNT_DISTINCT,MAX,MIN"
+ },
+ {
+ "type": "array",
+ "functions": "COUNT,COUNT_DISTINCT"
+ },
+ {
+ "type": "bit",
+ "functions": "COUNT,COUNT_DISTINCT"
+ }
+ ],
+ "operator": [
+ {
+ "type": "int",
+ "functions": "=,!=,>,<,>=,<=,in,not in"
+ },
+ {
+ "type": "long",
+ "functions": "=,!=,>,<,>=,<=,in,not in"
+ },
+ {
+ "type": "float",
+ "functions": "=,!=,>,<,>=,<="
+ },
+ {
+ "type": "double",
+ "functions": "=,!=,>,<,>=,<="
+ },
+ {
+ "type": "string",
+ "functions": "=,!=,in,not in,like,not like,notEmpty,empty"
+ },
+ {
+ "type": "date",
+ "functions": "=,!=,>,<,>=,<="
+ },
+ {
+ "type": "dateTime",
+ "functions": "=,!=,>,<,>=,<="
+ },
+ {
+ "type": "timestamp",
+ "functions": "=,!=,>,<,>=,<="
+ },
+ {
+ "type": "unix_timestamp",
+ "functions": "=,!=,>,<,>=,<="
+ },
+ {
+ "type": "array",
+ "functions": "has,notEmpty,empty"
+ },
+ {
+ "type": "bit",
+ "functions": "=,!=,bitAnd"
+ }
+ ]
+ },
+ "details": {
+ "general": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info"
+ ],
+ "treatment": [
+ "security_rule_list",
+ "security_action",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes"
+ ],
+ "source": [
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number"
+ ],
+ "destination": [
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain"
+ ],
+ "application": [
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "fqdn_category_list"
+ ],
+ "protocol": [
+ "ip_protocol",
+ "decoded_path",
+ "dns_message_id",
+ "dns_qr",
+ "dns_opcode",
+ "dns_aa",
+ "dns_tc",
+ "dns_rd",
+ "dns_ra",
+ "dns_rcode",
+ "dns_qdcount",
+ "dns_ancount",
+ "dns_nscount",
+ "dns_arcount",
+ "dns_qname",
+ "dns_qtype",
+ "dns_qclass",
+ "dns_cname",
+ "dns_sub",
+ "dns_rr",
+ "dns_response_latency_ms",
+ "dtls_cookie",
+ "dtls_version",
+ "dtls_sni",
+ "dtls_san",
+ "dtls_cn",
+ "dtls_handshake_latency_ms",
+ "dtls_ja3_fingerprint",
+ "dtls_ja3_hash",
+ "dtls_cert_issuer",
+ "dtls_cert_subject",
+ "ftp_account",
+ "ftp_url",
+ "ftp_link_type",
+ "http_url",
+ "http_host",
+ "http_request_line",
+ "http_response_line",
+ "http_request_content_length",
+ "http_request_content_type",
+ "http_response_content_length",
+ "http_response_content_type",
+ "http_request_body",
+ "http_response_body",
+ "http_proxy_flag",
+ "http_sequence",
+ "http_cookie",
+ "http_referer",
+ "http_user_agent",
+ "http_set_cookie",
+ "http_version",
+ "http_status_code",
+ "http_response_latency_ms",
+ "http_session_duration_ms",
+ "http_action_file_size",
+ "mail_protocol_type",
+ "mail_account",
+ "mail_from_cmd",
+ "mail_to_cmd",
+ "mail_from",
+ "mail_password",
+ "mail_to",
+ "mail_cc",
+ "mail_bcc",
+ "mail_subject",
+ "mail_subject_charset",
+ "mail_attachment_name",
+ "mail_attachment_name_charset",
+ "mail_eml_file",
+ "quic_version",
+ "quic_sni",
+ "quic_user_agent",
+ "rdp_cookie",
+ "rdp_security_protocol",
+ "rdp_client_channels",
+ "rdp_keyboard_layout",
+ "rdp_client_version",
+ "rdp_client_name",
+ "rdp_client_product_id",
+ "rdp_desktop_width",
+ "rdp_desktop_height",
+ "rdp_requested_color_depth",
+ "rdp_certificate_type",
+ "rdp_certificate_count",
+ "rdp_certificate_permanent",
+ "rdp_encryption_level",
+ "rdp_encryption_method",
+ "ssh_version",
+ "ssh_auth_success",
+ "ssh_client_version",
+ "ssh_server_version",
+ "ssh_cipher_alg",
+ "ssh_mac_alg",
+ "ssh_compression_alg",
+ "ssh_kex_alg",
+ "ssh_host_key_alg",
+ "ssh_host_key",
+ "ssh_hassh",
+ "ssl_version",
+ "ssl_sni",
+ "ssl_san",
+ "ssl_cn",
+ "ssl_handshake_latency_ms",
+ "ssl_ja3_hash",
+ "ssl_ja3s_hash",
+ "ssl_cert_issuer",
+ "ssl_cert_subject",
+ "ssl_esni_flag",
+ "ssl_ech_flag",
+ "sip_call_id",
+ "sip_originator_description",
+ "sip_responder_description",
+ "sip_user_agent",
+ "sip_server",
+ "sip_originator_sdp_connect_ip",
+ "sip_originator_sdp_media_port",
+ "sip_originator_sdp_media_type",
+ "sip_originator_sdp_content",
+ "sip_responder_sdp_connect_ip",
+ "sip_responder_sdp_media_port",
+ "sip_responder_sdp_media_type",
+ "sip_responder_sdp_content",
+ "sip_duration_s",
+ "sip_bye",
+ "rtp_payload_type_c2s",
+ "rtp_payload_type_s2c",
+ "rtp_pcap_path",
+ "rtp_originator_dir",
+ "stratum_cryptocurrency",
+ "stratum_mining_pools",
+ "stratum_mining_program",
+ "stratum_mining_subscribe"
+ ],
+ "transmission": [
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn"
+ ],
+ "other": [
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc"
+ ]
+ }
+ },
+ "data_view": {
+ "PROXY_INTERCEPT_EVENT_VIEW": {
+ "columns": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "encapsulation",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "http_url",
+ "http_host",
+ "http_request_line",
+ "http_response_line",
+ "http_request_content_length",
+ "http_request_content_type",
+ "http_response_content_length",
+ "http_response_content_type",
+ "http_request_body",
+ "http_response_body",
+ "http_proxy_flag",
+ "http_sequence",
+ "http_cookie",
+ "http_referer",
+ "http_user_agent",
+ "http_set_cookie",
+ "http_version",
+ "http_status_code",
+ "http_response_latency_ms",
+ "http_session_duration_ms",
+ "http_action_file_size",
+ "ssl_version",
+ "ssl_sni",
+ "ssl_san",
+ "ssl_cn",
+ "ssl_handshake_latency_ms",
+ "ssl_ja3_hash",
+ "ssl_ja3s_hash",
+ "ssl_cert_issuer",
+ "ssl_cert_subject",
+ "ssl_esni_flag",
+ "ssl_ech_flag"
+ ],
+ "default_columns": [
+ "recv_time",
+ "subscriber_id",
+ "security_rule_list",
+ "security_action",
+ "client_ip",
+ "client_port",
+ "server_fqdn",
+ "app",
+ "server_ip",
+ "server_port"
+ ]
+ }
+ },
+ "decoded_as": {
+ "BASE": {
+ "columns": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_rule_list",
+ "security_action",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "encapsulation",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc"
+ ],
+ "default_columns": [
+ "recv_time",
+ "subscriber_id",
+ "client_ip",
+ "client_port",
+ "server_ip",
+ "server_port"
+ ]
+ },
+ "HTTP": {
+ "columns": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_rule_list",
+ "security_action",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "encapsulation",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "http_url",
+ "http_host",
+ "http_request_line",
+ "http_response_line",
+ "http_request_content_length",
+ "http_request_content_type",
+ "http_response_content_length",
+ "http_response_content_type",
+ "http_request_body",
+ "http_response_body",
+ "http_proxy_flag",
+ "http_sequence",
+ "http_cookie",
+ "http_referer",
+ "http_user_agent",
+ "http_set_cookie",
+ "http_version",
+ "http_status_code",
+ "http_response_latency_ms",
+ "http_session_duration_ms",
+ "http_action_file_size"
+ ],
+ "default_columns": [
+ "recv_time",
+ "subscriber_id",
+ "client_ip",
+ "client_port",
+ "http_url",
+ "server_ip",
+ "server_port"
+ ]
+ },
+ "MAIL": {
+ "columns": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_rule_list",
+ "security_action",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "encapsulation",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "mail_protocol_type",
+ "mail_account",
+ "mail_from_cmd",
+ "mail_to_cmd",
+ "mail_from",
+ "mail_password",
+ "mail_to",
+ "mail_cc",
+ "mail_bcc",
+ "mail_subject",
+ "mail_subject_charset",
+ "mail_attachment_name",
+ "mail_attachment_name_charset",
+ "mail_eml_file"
+ ],
+ "default_columns": [
+ "recv_time",
+ "subscriber_id",
+ "client_ip",
+ "client_port",
+ "mail_from",
+ "mail_to",
+ "mail_subject",
+ "server_ip",
+ "server_port"
+ ]
+ },
+ "DNS": {
+ "columns": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_rule_list",
+ "security_action",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "encapsulation",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "dns_message_id",
+ "dns_qr",
+ "dns_opcode",
+ "dns_aa",
+ "dns_tc",
+ "dns_rd",
+ "dns_ra",
+ "dns_rcode",
+ "dns_qdcount",
+ "dns_ancount",
+ "dns_nscount",
+ "dns_arcount",
+ "dns_qname",
+ "dns_qtype",
+ "dns_qclass",
+ "dns_cname",
+ "dns_sub",
+ "dns_rr",
+ "dns_response_latency_ms"
+ ],
+ "default_columns": [
+ "recv_time",
+ "client_ip",
+ "client_port",
+ "dns_qr",
+ "dns_qname",
+ "dns_qtype",
+ "server_ip",
+ "server_port"
+ ]
+ },
+ "SSL": {
+ "columns": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_rule_list",
+ "security_action",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "encapsulation",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "ssl_version",
+ "ssl_sni",
+ "ssl_san",
+ "ssl_cn",
+ "ssl_handshake_latency_ms",
+ "ssl_ja3_hash",
+ "ssl_ja3s_hash",
+ "ssl_cert_issuer",
+ "ssl_cert_subject",
+ "ssl_esni_flag",
+ "ssl_ech_flag"
+ ],
+ "default_columns": [
+ "recv_time",
+ "subscriber_id",
+ "client_ip",
+ "client_port",
+ "ssl_sni",
+ "server_ip",
+ "server_port"
+ ]
+ },
+ "DTLS": {
+ "columns": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_rule_list",
+ "security_action",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "encapsulation",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "dtls_cookie",
+ "dtls_version",
+ "dtls_sni",
+ "dtls_san",
+ "dtls_cn",
+ "dtls_handshake_latency_ms",
+ "dtls_ja3_fingerprint",
+ "dtls_ja3_hash",
+ "dtls_cert_issuer",
+ "dtls_cert_subject"
+ ],
+ "default_columns": [
+ "recv_time",
+ "subscriber_id",
+ "client_ip",
+ "client_port",
+ "dtls_sni",
+ "server_ip",
+ "server_port"
+ ]
+ },
+ "QUIC": {
+ "columns": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_rule_list",
+ "security_action",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "encapsulation",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "quic_version",
+ "quic_sni",
+ "quic_user_agent"
+ ],
+ "default_columns": [
+ "recv_time",
+ "subscriber_id",
+ "client_ip",
+ "client_port",
+ "quic_sni",
+ "server_ip",
+ "server_port"
+ ]
+ },
+ "FTP": {
+ "columns": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_rule_list",
+ "security_action",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "encapsulation",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "ftp_account",
+ "ftp_url",
+ "ftp_link_type"
+ ],
+ "default_columns": [
+ "recv_time",
+ "subscriber_id",
+ "client_ip",
+ "client_port",
+ "ftp_url",
+ "server_ip",
+ "server_port"
+ ]
+ },
+ "SIP": {
+ "columns": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_rule_list",
+ "security_action",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "encapsulation",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "sip_call_id",
+ "sip_originator_description",
+ "sip_responder_description",
+ "sip_user_agent",
+ "sip_server",
+ "sip_originator_sdp_connect_ip",
+ "sip_originator_sdp_media_port",
+ "sip_originator_sdp_media_type",
+ "sip_originator_sdp_content",
+ "sip_responder_sdp_connect_ip",
+ "sip_responder_sdp_media_port",
+ "sip_responder_sdp_media_type",
+ "sip_responder_sdp_content",
+ "sip_duration_s",
+ "sip_bye"
+ ],
+ "default_columns": [
+ "recv_time",
+ "client_ip",
+ "client_port",
+ "sip_originator_description",
+ "sip_responder_description",
+ "sip_call_id",
+ "server_ip",
+ "server_port"
+ ]
+ },
+ "RTP": {
+ "columns": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_rule_list",
+ "security_action",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "encapsulation",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "rtp_payload_type_c2s",
+ "rtp_payload_type_s2c",
+ "rtp_pcap_path",
+ "rtp_originator_dir"
+ ],
+ "default_columns": [
+ "recv_time",
+ "subscriber_id",
+ "client_ip",
+ "client_port",
+ "server_ip",
+ "server_port",
+ "rtp_pcap_path",
+ "rtp_originator_dir"
+ ]
+ },
+ "RDP": {
+ "columns": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_rule_list",
+ "security_action",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "encapsulation",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "rdp_cookie",
+ "rdp_security_protocol",
+ "rdp_client_channels",
+ "rdp_keyboard_layout",
+ "rdp_client_version",
+ "rdp_client_name",
+ "rdp_client_product_id",
+ "rdp_desktop_width",
+ "rdp_desktop_height",
+ "rdp_requested_color_depth",
+ "rdp_certificate_type",
+ "rdp_certificate_count",
+ "rdp_certificate_permanent",
+ "rdp_encryption_level",
+ "rdp_encryption_method"
+ ],
+ "default_columns": [
+ "recv_time",
+ "subscriber_id",
+ "rdp_client_version",
+ "rdp_client_name"
+ ]
+ },
+ "SSH": {
+ "columns": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_rule_list",
+ "security_action",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "phone_number",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "encapsulation",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "ssh_version",
+ "ssh_auth_success",
+ "ssh_client_version",
+ "ssh_server_version",
+ "ssh_cipher_alg",
+ "ssh_mac_alg",
+ "ssh_compression_alg",
+ "ssh_kex_alg",
+ "ssh_host_key_alg",
+ "ssh_host_key",
+ "ssh_hassh"
+ ],
+ "default_columns": [
+ "recv_time",
+ "subscriber_id",
+ "client_ip",
+ "client_port",
+ "server_ip",
+ "server_port",
+ "ssh_auth_success"
+ ]
+ },
+ "Stratum": {
+ "columns": [
+ "recv_time",
+ "log_id",
+ "decoded_as",
+ "session_id",
+ "start_timestamp_ms",
+ "end_timestamp_ms",
+ "duration_ms",
+ "tcp_handshake_latency_ms",
+ "ingestion_time",
+ "processing_time",
+ "insert_time",
+ "device_id",
+ "out_link_id",
+ "in_link_id",
+ "device_tag",
+ "data_center",
+ "device_group",
+ "sled_ip",
+ "address_type",
+ "vsys_id",
+ "t_vsys_id",
+ "flags",
+ "flags_identify_info",
+ "security_rule_list",
+ "security_action",
+ "monitor_rule_list",
+ "shaping_rule_list",
+ "sc_rule_list",
+ "statistics_rule_list",
+ "sc_rsp_raw",
+ "sc_rsp_decrypted",
+ "proxy_rule_list",
+ "proxy_action",
+ "proxy_pinning_status",
+ "proxy_intercept_status",
+ "proxy_passthrough_reason",
+ "proxy_client_side_latency_ms",
+ "proxy_server_side_latency_ms",
+ "proxy_client_side_version",
+ "proxy_server_side_version",
+ "proxy_cert_verify",
+ "proxy_intercept_error",
+ "monitor_mirrored_pkts",
+ "monitor_mirrored_bytes",
+ "client_ip",
+ "client_port",
+ "client_os_desc",
+ "client_geolocation",
+ "client_asn",
+ "subscriber_id",
+ "imei",
+ "imsi",
+ "apn",
+ "phone_number",
+ "server_ip",
+ "server_port",
+ "server_os_desc",
+ "server_geolocation",
+ "server_asn",
+ "server_fqdn",
+ "server_domain",
+ "app_transition",
+ "app",
+ "app_debug_info",
+ "app_content",
+ "ip_protocol",
+ "decoded_path",
+ "fqdn_category_list",
+ "sent_pkts",
+ "received_pkts",
+ "sent_bytes",
+ "received_bytes",
+ "tcp_c2s_ip_fragments",
+ "tcp_s2c_ip_fragments",
+ "tcp_c2s_lost_bytes",
+ "tcp_s2c_lost_bytes",
+ "tcp_c2s_o3_pkts",
+ "tcp_s2c_o3_pkts",
+ "tcp_c2s_rtx_pkts",
+ "tcp_s2c_rtx_pkts",
+ "tcp_c2s_rtx_bytes",
+ "tcp_s2c_rtx_bytes",
+ "tcp_rtt_ms",
+ "tcp_client_isn",
+ "tcp_server_isn",
+ "packet_capture_file",
+ "in_src_mac",
+ "out_src_mac",
+ "in_dest_mac",
+ "out_dest_mac",
+ "encapsulation",
+ "dup_traffic_flag",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc",
+ "stratum_cryptocurrency",
+ "stratum_mining_pools",
+ "stratum_mining_program",
+ "stratum_mining_subscribe"
+ ],
+ "default_columns": [
+ "recv_time",
+ "subscriber_id",
+ "client_port",
+ "client_ip",
+ "server_ip",
+ "server_port",
+ "stratum_cryptocurrency",
+ "stratum_mining_pools",
+ "stratum_mining_program"
+ ]
+ }
+ },
+ "default_columns": [
+ "recv_time",
+ "subscriber_id",
+ "client_ip",
+ "client_port",
+ "server_ip",
+ "server_port",
+ "decoded_as",
+ "server_fqdn"
+ ],
+ "internal_columns": [
+ "recv_time",
+ "log_id",
+ "flags_identify_info",
+ "encapsulation",
+ "app_debug_info",
+ "app_content",
+ "packet_capture_file",
+ "tunnel_endpoint_a_desc",
+ "tunnel_endpoint_b_desc"
+ ],
+ "tunnel_type": {
+ "GTP": [
+ {
+ "name": "gtp_endpoint_a_ip",
+ "label": "Endpoint A IP",
+ "type": "string"
+ },
+ {
+ "name": "gtp_endpoint_b_ip",
+ "label": "Endpoint B IP",
+ "type": "string"
+ },
+ {
+ "name": "gtp_endpoint_a_port",
+ "label": "Endpoint A Port",
+ "type": "int"
+ },
+ {
+ "name": "gtp_endpoint_b_port",
+ "label": "Endpoint B Port",
+ "type": "int"
+ },
+ {
+ "name": "gtp_endpoint_a2b_teid",
+ "label": "Endpoint A2B TEID",
+ "type": "long"
+ },
+ {
+ "name": "gtp_endpoint_b2a_teid",
+ "label": "Endpoint B2A TEID",
+ "type": "long"
+ }
+ ],
+ "MPLS": [
+ {
+ "name": "mpls_c2s_direction_label",
+ "label": "Multiprotocol Label (c2s)",
+ "type": {
+ "type": "array",
+ "items": "int",
+ "logicalType": "array"
+ }
+ },
+ {
+ "name": "mpls_s2c_direction_label",
+ "label": "Multiprotocol Label (s2c)",
+ "type": {
+ "type": "array",
+ "items": "int",
+ "logicalType": "array"
+ }
+ }
+ ],
+ "VLAN": [
+ {
+ "name": "vlan_c2s_direction_id",
+ "label": "VLAN Direction (c2s)",
+ "type": {
+ "type": "array",
+ "items": "int",
+ "logicalType": "array"
+ }
+ },
+ {
+ "name": "vlan_s2c_direction_id",
+ "label": "VLAN Direction (s2c)",
+ "type": {
+ "type": "array",
+ "items": "int",
+ "logicalType": "array"
+ }
+ }
+ ],
+ "ETHERNET": [
+ {
+ "name": "source_mac",
+ "label": "Source MAC",
+ "type": "string"
+ },
+ {
+ "name": "destination_mac",
+ "label": "Destination MAC",
+ "type": "string"
+ }
+ ],
+ "MULTIPATH_ETHERNET": [
+ {
+ "name": "c2s_source_mac",
+ "label": "Source MAC (c2s)",
+ "type": "string"
+ },
+ {
+ "name": "c2s_destination_mac",
+ "label": "Destination MAC (c2s)",
+ "type": "string"
+ },
+ {
+ "name": "s2c_source_mac",
+ "label": "Source MAC (s2c)",
+ "type": "string"
+ },
+ {
+ "name": "s2c_destination_mac",
+ "label": "Destination MAC (s2c)",
+ "type": "string"
+ }
+ ],
+ "L2TP": [
+ {
+ "name": "l2tp_version",
+ "label": "Version",
+ "type": "string"
+ },
+ {
+ "name": "l2tp_lac2lns_tunnel_id",
+ "label": "LAC2LNS Tunnel ID",
+ "type": "int"
+ },
+ {
+ "name": "l2tp_lns2lac_tunnel_id",
+ "label": "LNS2LAC Tunnel ID",
+ "type": "int"
+ },
+ {
+ "name": "l2tp_lac2lns_session_id",
+ "label": "LAC2LNS Session ID",
+ "type": "int"
+ },
+ {
+ "name": "l2tp_lns2lac_session_id",
+ "label": "LNS2LAC Session ID",
+ "type": "int"
+ },
+ {
+ "name": "l2tp_access_concentrator_ip",
+ "label": "Access Concentrator IP",
+ "type": "string"
+ },
+ {
+ "name": "l2tp_access_concentrator_port",
+ "label": "Access Concentrator Port",
+ "type": "int"
+ },
+ {
+ "name": "l2tp_network_server_ip",
+ "label": "Network Server IP",
+ "type": "string"
+ },
+ {
+ "name": "l2tp_network_server_port",
+ "label": "Network Server Port",
+ "type": "int"
+ }
+ ],
+ "PPTP": [
+ {
+ "name": "pptp_uplink_tunnel_id",
+ "label": "UpLink Tunnel ID",
+ "type": "int"
+ },
+ {
+ "name": "pptp_downlink_tunnel_id",
+ "label": "Down Tunnel ID",
+ "type": "int"
+ }
+ ],
+ "IPv4": [
+ {
+ "name": "client_ip",
+ "label": "Client IP",
+ "type": "string"
+ },
+ {
+ "name": "server_ip",
+ "label": "Server IP",
+ "type": "string"
+ }
+ ],
+ "IPv6": [
+ {
+ "name": "client_ip",
+ "label": "Client IP",
+ "type": "string"
+ },
+ {
+ "name": "server_ip",
+ "label": "Server IP",
+ "type": "string"
+ }
+ ]
+ },
+ "measurements": {
+ "aggregated_metric_unit": {
+ "SUM": {
+ "sent_pkts": "packets",
+ "received_pkts": "packets",
+ "sent_bytes": "bytes",
+ "received_bytes": "bytes",
+ "tcp_c2s_lost_bytes": "bytes",
+ "tcp_s2c_lost_bytes": "bytes",
+ "tcp_c2s_o3_pkts": "packets",
+ "tcp_s2c_o3_pkts": "packets",
+ "tcp_c2s_rtx_pkts": "packets",
+ "tcp_s2c_rtx_pkts": "packets",
+ "tcp_c2s_rtx_bytes": "bytes",
+ "tcp_s2c_rtx_bytes": "bytes",
+ "http_request_content_length": "bytes",
+ "http_response_content_length": "bytes"
+ },
+ "RATE": {
+ "sent_pkts": "pps",
+ "received_pkts": "pps",
+ "sent_bytes": "Bps",
+ "received_bytes": "Bps",
+ "tcp_c2s_lost_bytes": "Bps",
+ "tcp_s2c_lost_bytes": "Bps",
+ "tcp_c2s_o3_pkts": "pps",
+ "tcp_s2c_o3_pkts": "pps",
+ "tcp_c2s_rtx_pkts": "pps",
+ "tcp_s2c_rtx_pkts": "pps",
+ "tcp_c2s_rtx_bytes": "Bps",
+ "tcp_s2c_rtx_bytes": "Bps",
+ "http_request_content_length": "Bps",
+ "http_response_content_length": "Bps"
+ },
+ "BITRATE": {
+ "sent_bytes": "bps",
+ "received_bytes": "bps",
+ "tcp_c2s_lost_bytes": "bps",
+ "tcp_s2c_lost_bytes": "bps",
+ "tcp_c2s_rtx_bytes": "bps",
+ "tcp_s2c_rtx_bytes": "bps",
+ "http_request_content_length": "bps",
+ "http_response_content_length": "bps"
+ },
+ "MAX": {
+ "sent_pkts": "packets",
+ "received_pkts": "packets",
+ "sent_bytes": "bytes",
+ "received_bytes": "bytes",
+ "duration_ms": "ms",
+ "tcp_handshake_latency_ms": "ms",
+ "tcp_c2s_lost_bytes": "bytes",
+ "tcp_s2c_lost_bytes": "bytes",
+ "tcp_c2s_o3_pkts": "packets",
+ "tcp_s2c_o3_pkts": "packets",
+ "tcp_c2s_rtx_pkts": "packets",
+ "tcp_s2c_rtx_pkts": "packets",
+ "tcp_c2s_rtx_bytes": "bytes",
+ "tcp_s2c_rtx_bytes": "bytes",
+ "tcp_rtt_ms": "ms",
+ "http_request_content_length": "bytes",
+ "http_response_content_length": "bytes",
+ "http_response_latency_ms": "ms",
+ "http_session_duration_ms": "ms",
+ "dtls_handshake_latency_ms": "ms",
+ "dns_response_latency_ms": "ms",
+ "ssl_handshake_latency_ms": "ms"
+ },
+ "MIN": {
+ "sent_pkts": "packets",
+ "received_pkts": "packets",
+ "sent_bytes": "bytes",
+ "received_bytes": "bytes",
+ "duration_ms": "ms",
+ "tcp_handshake_latency_ms": "ms",
+ "tcp_c2s_lost_bytes": "bytes",
+ "tcp_s2c_lost_bytes": "bytes",
+ "tcp_c2s_o3_pkts": "packets",
+ "tcp_s2c_o3_pkts": "packets",
+ "tcp_c2s_rtx_pkts": "packets",
+ "tcp_s2c_rtx_pkts": "packets",
+ "tcp_c2s_rtx_bytes": "bytes",
+ "tcp_s2c_rtx_bytes": "bytes",
+ "tcp_rtt_ms": "ms",
+ "http_request_content_length": "bytes",
+ "http_response_content_length": "bytes",
+ "http_response_latency_ms": "ms",
+ "http_session_duration_ms": "ms",
+ "dtls_handshake_latency_ms": "ms",
+ "dns_response_latency_ms": "ms",
+ "ssl_handshake_latency_ms": "ms"
+ },
+ "AVG": {
+ "sent_pkts": "packets",
+ "received_pkts": "packets",
+ "sent_bytes": "bytes",
+ "received_bytes": "bytes",
+ "duration_ms": "ms",
+ "tcp_handshake_latency_ms": "ms",
+ "tcp_c2s_lost_bytes": "bytes",
+ "tcp_s2c_lost_bytes": "bytes",
+ "tcp_c2s_o3_pkts": "packets",
+ "tcp_s2c_o3_pkts": "packets",
+ "tcp_c2s_rtx_pkts": "packets",
+ "tcp_s2c_rtx_pkts": "packets",
+ "tcp_c2s_rtx_bytes": "bytes",
+ "tcp_s2c_rtx_bytes": "bytes",
+ "tcp_rtt_ms": "ms",
+ "http_request_content_length": "bytes",
+ "http_response_content_length": "bytes",
+ "http_response_latency_ms": "ms",
+ "http_session_duration_ms": "ms",
+ "dtls_handshake_latency_ms": "ms",
+ "dns_response_latency_ms": "ms",
+ "ssl_handshake_latency_ms": "ms"
+ },
+ "MEDIAN": {
+ "sent_pkts": "packets",
+ "received_pkts": "packets",
+ "sent_bytes": "bytes",
+ "received_bytes": "bytes",
+ "duration_ms": "ms",
+ "tcp_handshake_latency_ms": "ms",
+ "tcp_c2s_lost_bytes": "bytes",
+ "tcp_s2c_lost_bytes": "bytes",
+ "tcp_c2s_o3_pkts": "packets",
+ "tcp_s2c_o3_pkts": "packets",
+ "tcp_c2s_rtx_pkts": "packets",
+ "tcp_s2c_rtx_pkts": "packets",
+ "tcp_c2s_rtx_bytes": "bytes",
+ "tcp_s2c_rtx_bytes": "bytes",
+ "tcp_rtt_ms": "ms",
+ "http_request_content_length": "bytes",
+ "http_response_content_length": "bytes",
+ "http_response_latency_ms": "ms",
+ "http_session_duration_ms": "ms",
+ "dtls_handshake_latency_ms": "ms",
+ "dns_response_latency_ms": "ms",
+ "ssl_handshake_latency_ms": "ms"
+ },
+ "P95_PERCENTILE": {
+ "sent_pkts": "packets",
+ "received_pkts": "packets",
+ "sent_bytes": "bytes",
+ "received_bytes": "bytes",
+ "duration_ms": "ms",
+ "tcp_handshake_latency_ms": "ms",
+ "tcp_c2s_lost_bytes": "bytes",
+ "tcp_s2c_lost_bytes": "bytes",
+ "tcp_c2s_o3_pkts": "packets",
+ "tcp_s2c_o3_pkts": "packets",
+ "tcp_c2s_rtx_pkts": "packets",
+ "tcp_s2c_rtx_pkts": "packets",
+ "tcp_c2s_rtx_bytes": "bytes",
+ "tcp_s2c_rtx_bytes": "bytes",
+ "tcp_rtt_ms": "ms",
+ "http_request_content_length": "bytes",
+ "http_response_content_length": "bytes",
+ "http_response_latency_ms": "ms",
+ "http_session_duration_ms": "ms",
+ "dtls_handshake_latency_ms": "ms",
+ "dns_response_latency_ms": "ms",
+ "ssl_handshake_latency_ms": "ms"
+ },
+ "P99_PERCENTILE": {
+ "sent_pkts": "packets",
+ "received_pkts": "packets",
+ "sent_bytes": "bytes",
+ "received_bytes": "bytes",
+ "duration_ms": "ms",
+ "tcp_handshake_latency_ms": "ms",
+ "tcp_c2s_lost_bytes": "bytes",
+ "tcp_s2c_lost_bytes": "bytes",
+ "tcp_c2s_o3_pkts": "packets",
+ "tcp_s2c_o3_pkts": "packets",
+ "tcp_c2s_rtx_pkts": "packets",
+ "tcp_s2c_rtx_pkts": "packets",
+ "tcp_c2s_rtx_bytes": "bytes",
+ "tcp_s2c_rtx_bytes": "bytes",
+ "tcp_rtt_ms": "ms",
+ "http_request_content_length": "bytes",
+ "http_response_content_length": "bytes",
+ "http_response_latency_ms": "ms",
+ "http_session_duration_ms": "ms",
+ "dtls_handshake_latency_ms": "ms",
+ "dns_response_latency_ms": "ms",
+ "ssl_handshake_latency_ms": "ms"
+ }
+ },
+ "field_discovery_metric": {
+ "sessions": [
+ {
+ "fn": "count",
+ "column": "log_id",
+ "value": "sessions",
+ "label": "Sessions",
+ "unit": "sessions"
+ }
+ ],
+ "bytes": [
+ {
+ "fn": "sum",
+ "column": "sent_bytes + received_bytes",
+ "value": "bytes",
+ "label": "Bytes",
+ "unit": "bytes"
+ }
+ ],
+ "incoming_bytes": [
+ {
+ "fn": "sum",
+ "column": "if(bitAnd(flags, 8) = 8, received_bytes, sent_bytes)",
+ "value": "incoming_bytes",
+ "label": "Incoming Bytes",
+ "unit": "bytes"
+ }
+ ],
+ "outgoing_bytes": [
+ {
+ "fn": "sum",
+ "column": "if(bitAnd(flags, 8) = 8, sent_bytes, received_bytes)",
+ "value": "outgoing_bytes",
+ "label": "Outgoing Bytes",
+ "unit": "bytes"
+ }
+ ]
+ }
+ },
+ "expression_fields": [
+ {
+ "name": "internal_ip_list",
+ "label": "Internal IP List",
+ "type": {
+ "type": "array",
+ "items": "string",
+ "logicalType": "array"
+ },
+ "doc": {
+ "constraints": {
+ "operator_functions": "has,notEmpty,empty"
+ },
+ "expression": "array(if(bitAnd(flags, 8)=8, client_ip,''), if(bitAnd(flags, 16)=16, server_ip,''))"
+ }
+ },
+ {
+ "name": "external_ip_list",
+ "label": "External IP List",
+ "type": {
+ "type": "array",
+ "items": "string",
+ "logicalType": "array"
+ },
+ "doc": {
+ "constraints": {
+ "operator_functions": "has,notEmpty,empty"
+ },
+ "expression": "array(if(bitAnd(flags, 8)!=8, client_ip,''), if(bitAnd(flags, 16)!=16, server_ip,''))"
+ }
+ },
+ {
+ "name": "security_rule_id",
+ "label": "Security Rule ID",
+ "type": "long",
+ "doc": {
+ "expression": "arrayJoin(IF(empty(security_rule_list), arrayPushFront(security_rule_list, null),security_rule_list))"
+ }
+ },
+ {
+ "name": "proxy_rule_id",
+ "label": "Proxy Rule ID",
+ "type": "long",
+ "doc": {
+ "expression": "arrayJoin(IF(empty(proxy_rule_list), arrayPushFront(proxy_rule_list, null),proxy_rule_list))"
+ }
+ },
+ {
+ "name": "monitor_rule_id",
+ "label": "Monitor Rule ID",
+ "type": "long",
+ "doc": {
+ "expression": "arrayJoin(IF(empty(monitor_rule_list), arrayPushFront(monitor_rule_list, null),monitor_rule_list))"
+ }
+ },
+ {
+ "name": "shaping_rule_id",
+ "label": "Shaping Rule ID",
+ "type": "long",
+ "doc": {
+ "expression": "arrayJoin(IF(empty(shaping_rule_list), arrayPushFront(shaping_rule_list, null),shaping_rule_list))"
+ }
+ },
+ {
+ "name": "sc_rule_id",
+ "label": "Service Chaining Rule ID",
+ "type": "long",
+ "doc": {
+ "expression": "arrayJoin(IF(empty(sc_rule_list), arrayPushFront(sc_rule_list, null),sc_rule_list))"
+ }
+ },
+ {
+ "name": "statistics_rule_id",
+ "label": "Statistics Rule ID",
+ "type": "long",
+ "doc": {
+ "expression": "arrayJoin(IF(empty(statistics_rule_list), arrayPushFront(statistics_rule_list, null),statistics_rule_list))"
+ }
+ }
+ ],
+ "size": 0
+ },
+ "fields": [
+ {
+ "name": "recv_time",
+ "type": {
+ "type": "long",
+ "logicalType": "unix_timestamp"
+ },
+ "doc": {
+ "constraints": {
+ "type": "unix_timestamp"
+ },
+ "visibility": "enabled",
+ "ttl": null,
+ "size": 0
+ },
+ "label": "Receive Time"
+ },
+ {
+ "name": "log_id",
+ "type": "long",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Log ID"
+ },
+ {
+ "name": "decoded_as",
+ "type": "string",
+ "doc": {
+ "constraints": {
+ "operator_functions": "=,!=,in,not in"
+ },
+ "data": [
+ {
+ "code": "BASE",
+ "value": "BASE"
+ },
+ {
+ "code": "MAIL",
+ "value": "MAIL"
+ },
+ {
+ "code": "DNS",
+ "value": "DNS"
+ },
+ {
+ "code": "HTTP",
+ "value": "HTTP"
+ },
+ {
+ "code": "SSL",
+ "value": "SSL"
+ },
+ {
+ "code": "DTLS",
+ "value": "DTLS"
+ },
+ {
+ "code": "QUIC",
+ "value": "QUIC"
+ },
+ {
+ "code": "FTP",
+ "value": "FTP"
+ },
+ {
+ "code": "SSH",
+ "value": "SSH"
+ },
+ {
+ "code": "Stratum",
+ "value": "Stratum"
+ },
+ {
+ "code": "RDP",
+ "value": "RDP"
+ },
+ {
+ "code": "SIP",
+ "value": "SIP"
+ },
+ {
+ "code": "RTP",
+ "value": "RTP"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": null,
+ "size": 0
+ },
+ "label": "Decoded AS"
+ },
+ {
+ "name": "session_id",
+ "type": "long",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Session ID"
+ },
+ {
+ "name": "start_timestamp_ms",
+ "type": {
+ "type": "string",
+ "logicalType": "datetime64"
+ },
+ "doc": {
+ "constraints": {
+ "operator_functions": "=,!=,>,<,>=,<=",
+ "type": "datetime64"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Start Time"
+ },
+ {
+ "name": "end_timestamp_ms",
+ "type": {
+ "type": "string",
+ "logicalType": "datetime64"
+ },
+ "doc": {
+ "constraints": {
+ "operator_functions": "=,!=,>,<,>=,<=",
+ "type": "datetime64"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "End Time"
+ },
+ {
+ "name": "duration_ms",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal",
+ "aggregation_functions": "AVG, MAX, MIN, MEDIAN, P95_PERCENTILE, P99_PERCENTILE"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Duration (ms)"
+ },
+ {
+ "name": "tcp_handshake_latency_ms",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal",
+ "aggregation_functions": "AVG, MAX, MIN, MEDIAN, P95_PERCENTILE, P99_PERCENTILE"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "TCP Handshake Latency (ms)"
+ },
+ {
+ "name": "processing_time",
+ "type": {
+ "type": "long",
+ "logicalType": "unix_timestamp"
+ },
+ "doc": {
+ "constraints": {
+ "type": "unix_timestamp"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Processing Time"
+ },
+ {
+ "name": "ingestion_time",
+ "type": {
+ "type": "long",
+ "logicalType": "unix_timestamp"
+ },
+ "doc": {
+ "constraints": {
+ "type": "unix_timestamp"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Ingestion Time"
+ },
+ {
+ "name": "insert_time",
+ "type": {
+ "type": "long",
+ "logicalType": "unix_timestamp"
+ },
+ "doc": {
+ "constraints": {
+ "type": "unix_timestamp"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Insert Time"
+ },
+ {
+ "name": "device_id",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Device ID"
+ },
+ {
+ "name": "out_link_id",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Outgoing Link ID"
+ },
+ {
+ "name": "in_link_id",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Incoming Link ID"
+ },
+ {
+ "name": "device_tag",
+ "type": "string",
+ "doc": {
+ "visibility": "hidden",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Device Tag"
+ },
+ {
+ "name": "data_center",
+ "type": "string",
+ "doc": {
+ "constraints": {
+ "operator_functions": "=,!=,in,not in"
+ },
+ "data": [],
+ "visibility": "enabled",
+ "ttl": null,
+ "size": 0
+ },
+ "label": "Data Center"
+ },
+ {
+ "name": "device_group",
+ "type": "string",
+ "doc": {
+ "constraints": {
+ "operator_functions": "=,!=,in,not in"
+ },
+ "data": [
+ {
+ "code": "City A",
+ "value": "City A"
+ },
+ {
+ "code": "City B",
+ "value": "City B"
+ },
+ {
+ "code": "City C",
+ "value": "City C"
+ },
+ {
+ "code": "City D",
+ "value": "City D"
+ },
+ {
+ "code": "City E",
+ "value": "City E"
+ },
+ {
+ "code": "City F",
+ "value": "City F"
+ },
+ {
+ "code": "City G",
+ "value": "City G"
+ },
+ {
+ "code": "City H",
+ "value": "City H"
+ },
+ {
+ "code": "City I",
+ "value": "City I"
+ },
+ {
+ "code": "City J",
+ "value": "City J"
+ },
+ {
+ "code": "City K",
+ "value": "City K"
+ },
+ {
+ "code": "City L",
+ "value": "City L"
+ },
+ {
+ "code": "City M",
+ "value": "City M"
+ },
+ {
+ "code": "City N",
+ "value": "City N"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": null,
+ "size": 0
+ },
+ "label": "Device Group"
+ },
+ {
+ "name": "sled_ip",
+ "type": "string",
+ "doc": {
+ "constraints": {
+ "type": "ip"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Sled IP"
+ },
+ {
+ "name": "address_type",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "operator_functions": "=,!=,in,not in"
+ },
+ "data": [
+ {
+ "code": "4",
+ "value": "ipv4"
+ },
+ {
+ "code": "6",
+ "value": "ipv6"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Address Type"
+ },
+ {
+ "name": "vsys_id",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": null,
+ "size": 0
+ },
+ "default": 1,
+ "label": "Vsys ID"
+ },
+ {
+ "name": "t_vsys_id",
+ "type": "int",
+ "doc": {
+ "allow_query": "false",
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Traffic Vsys ID"
+ },
+ {
+ "name": "flags",
+ "type": {
+ "type": "long",
+ "logicalType": "bit"
+ },
+ "doc": {
+ "constraints": {
+ "type": "bit",
+ "operator_functions": "=,!=,bitAnd"
+ },
+ "data": [
+ {
+ "code": "1",
+ "value": "Asymmetric"
+ },
+ {
+ "code": "2",
+ "value": "Bulky"
+ },
+ {
+ "code": "4",
+ "value": "CBR Streaming"
+ },
+ {
+ "code": "8",
+ "value": "Client is Local"
+ },
+ {
+ "code": "16",
+ "value": "Server is Local"
+ },
+ {
+ "code": "32",
+ "value": "Download"
+ },
+ {
+ "code": "64",
+ "value": "Interactive"
+ },
+ {
+ "code": "128",
+ "value": "Inbound"
+ },
+ {
+ "code": "256",
+ "value": "Outbound"
+ },
+ {
+ "code": "512",
+ "value": "Pseudo Unidirectional"
+ },
+ {
+ "code": "1024",
+ "value": "Streaming"
+ },
+ {
+ "code": "2048",
+ "value": "Unidirectional"
+ },
+ {
+ "code": "4096",
+ "value": "Random looking"
+ },
+ {
+ "code": "8192",
+ "value": "C2S"
+ },
+ {
+ "code": "16384",
+ "value": "S2C"
+ },
+ {
+ "code": "32768",
+ "value": "Bidirectional"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Flags"
+ },
+ {
+ "name": "flags_identify_info",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Flags Identify Info"
+ },
+ {
+ "name": "security_rule_list",
+ "type": {
+ "type": "array",
+ "items": "long",
+ "logicalType": "array"
+ },
+ "doc": {
+ "constraints": {
+ "operator_functions": "has,notEmpty,empty"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Security Rule List"
+ },
+ {
+ "name": "security_action",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "constraints": {
+ "operator_functions": "=,!=,in,not in"
+ },
+ "data": [
+ {
+ "code": "Deny",
+ "value": "Deny"
+ },
+ {
+ "code": "Allow",
+ "value": "Allow"
+ }
+ ],
+ "ttl": null,
+ "size": 0
+ },
+ "label": "Security Action"
+ },
+ {
+ "name": "monitor_rule_list",
+ "type": {
+ "type": "array",
+ "items": "long",
+ "logicalType": "array"
+ },
+ "doc": {
+ "constraints": {
+ "operator_functions": "has,notEmpty,empty"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Monitor Rule List"
+ },
+ {
+ "name": "sc_rule_list",
+ "type": {
+ "type": "array",
+ "items": "long",
+ "logicalType": "array"
+ },
+ "doc": {
+ "constraints": {
+ "operator_functions": "has,notEmpty,empty"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Service Chaining Rule List"
+ },
+ {
+ "name": "statistics_rule_list",
+ "type": {
+ "type": "array",
+ "items": "long",
+ "logicalType": "array"
+ },
+ "doc": {
+ "constraints": {
+ "operator_functions": "has,notEmpty,empty"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Statistics Rule List"
+ },
+ {
+ "name": "sc_rsp_raw",
+ "type": {
+ "type": "array",
+ "items": "long",
+ "logicalType": "array"
+ },
+ "doc": {
+ "constraints": {
+ "operator_functions": "has,notEmpty,empty"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Service Chaining Rendered Service Path (Raw)"
+ },
+ {
+ "name": "sc_rsp_decrypted",
+ "type": {
+ "type": "array",
+ "items": "long",
+ "logicalType": "array"
+ },
+ "doc": {
+ "constraints": {
+ "operator_functions": "has,notEmpty,empty"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Service Chaining Rendered Service Path (Decrypted)"
+ },
+ {
+ "name": "shaping_rule_list",
+ "type": {
+ "type": "array",
+ "items": "long",
+ "logicalType": "array"
+ },
+ "doc": {
+ "constraints": {
+ "operator_functions": "has,notEmpty,empty"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Shaping Rule List"
+ },
+ {
+ "name": "proxy_rule_list",
+ "type": {
+ "type": "array",
+ "items": "long",
+ "logicalType": "array"
+ },
+ "doc": {
+ "constraints": {
+ "operator_functions": "has,notEmpty,empty"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Proxy Rule List"
+ },
+ {
+ "name": "proxy_action",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "constraints": {
+ "operator_functions": "=,!=,in,not in"
+ },
+ "data": [
+ {
+ "code": "Intercept",
+ "value": "Intercept"
+ },
+ {
+ "code": "No Intercept",
+ "value": "No Intercept"
+ }
+ ],
+ "ttl": null,
+ "size": 0
+ },
+ "label": "Proxy Action"
+ },
+ {
+ "name": "proxy_pinning_status",
+ "type": "int",
+ "doc": {
+ "data": [
+ {
+ "code": "0",
+ "value": "not pinning"
+ },
+ {
+ "code": "1",
+ "value": "pinning"
+ },
+ {
+ "code": "2",
+ "value": "maybe pinning"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Proxy Pinning Status"
+ },
+ {
+ "name": "proxy_intercept_status",
+ "type": "int",
+ "doc": {
+ "data": [
+ {
+ "code": "0",
+ "value": "passthrough"
+ },
+ {
+ "code": "1",
+ "value": "intercept"
+ },
+ {
+ "code": "2",
+ "value": "shutdown"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Proxy Intercept Status"
+ },
+ {
+ "name": "proxy_passthrough_reason",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Proxy Passthrough Reason"
+ },
+ {
+ "name": "proxy_server_side_latency_ms",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Proxy Server-Side Latency (ms)"
+ },
+ {
+ "name": "proxy_client_side_latency_ms",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Proxy Client-Side Latency (ms)"
+ },
+ {
+ "name": "proxy_client_side_version",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Proxy Client-Side Version"
+ },
+ {
+ "name": "proxy_server_side_version",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Proxy Server-Side Version"
+ },
+ {
+ "name": "proxy_cert_verify",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Proxy Certificate Verify"
+ },
+ {
+ "name": "proxy_intercept_error",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Proxy Intercept Error"
+ },
+ {
+ "name": "monitor_mirrored_pkts",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Monitor Mirrored Packets"
+ },
+ {
+ "name": "monitor_mirrored_bytes",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "bytes"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Monitor Mirrored Bytes"
+ },
+ {
+ "name": "client_ip",
+ "type": "string",
+ "doc": {
+ "constraints": {
+ "type": "ip"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Client IP"
+ },
+ {
+ "name": "client_port",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "aggregation_functions": "COUNT, COUNT_DISTINCT"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Client Port"
+ },
+ {
+ "name": "client_os_desc",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Client OS Description"
+ },
+ {
+ "name": "client_geolocation",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Client Geolocation"
+ },
+ {
+ "name": "client_asn",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "aggregation_functions": "COUNT, COUNT_DISTINCT"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Client ASN"
+ },
+ {
+ "name": "subscriber_id",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Subscriber ID"
+ },
+ {
+ "name": "imei",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "IMEI"
+ },
+ {
+ "name": "imsi",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "IMSI"
+ },
+ {
+ "name": "apn",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "APN"
+ },
+ {
+ "name": "phone_number",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Phone Number"
+ },
+ {
+ "name": "server_ip",
+ "type": "string",
+ "doc": {
+ "constraints": {
+ "type": "ip"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Server IP"
+ },
+ {
+ "name": "server_port",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "aggregation_functions": "COUNT, COUNT_DISTINCT"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Server Port"
+ },
+ {
+ "name": "server_os_desc",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Server OS Description"
+ },
+ {
+ "name": "server_geolocation",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Server Geolocation"
+ },
+ {
+ "name": "server_asn",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "aggregation_functions": "COUNT, COUNT_DISTINCT"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Server ASN"
+ },
+ {
+ "name": "server_fqdn",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Server FQDN"
+ },
+ {
+ "name": "server_domain",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Server Domain"
+ },
+ {
+ "name": "app_transition",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Application Transition"
+ },
+ {
+ "name": "app",
+ "type": "string",
+ "doc": {
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Application"
+ },
+ {
+ "name": "app_debug_info",
+ "type": "string",
+ "doc": {
+ "visibility": "hidden",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Application Debug Info"
+ },
+ {
+ "name": "app_content",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Application Content"
+ },
+ {
+ "name": "ip_protocol",
+ "type": "string",
+ "doc": {
+ "data": [
+ {
+ "code": "tcp",
+ "value": "tcp"
+ },
+ {
+ "code": "udp",
+ "value": "udp"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "IP Protocol"
+ },
+ {
+ "name": "decoded_path",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Decoded Path"
+ },
+ {
+ "name": "fqdn_category_list",
+ "type": {
+ "type": "array",
+ "items": "long",
+ "logicalType": "array"
+ },
+ "doc": {
+ "constraints": {
+ "operator_functions": "has,notEmpty,empty"
+ },
+ "dict_location": {
+ "path": "/v1/policy/object?type=fqdn_category",
+ "key": "category_id",
+ "value": "category_name"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "FQDN Category List"
+ },
+ {
+ "name": "sent_pkts",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Packets Sent"
+ },
+ {
+ "name": "received_pkts",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Packets Received"
+ },
+ {
+ "name": "sent_bytes",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "bytes"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Bytes Sent"
+ },
+ {
+ "name": "received_bytes",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "bytes"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Bytes Received"
+ },
+ {
+ "name": "tcp_c2s_ip_fragments",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Client-to-Server IP Fragments"
+ },
+ {
+ "name": "tcp_s2c_ip_fragments",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Server-to-Client IP Fragments"
+ },
+ {
+ "name": "tcp_c2s_lost_bytes",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "bytes"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Client-to-Server Lost Bytes"
+ },
+ {
+ "name": "tcp_s2c_lost_bytes",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "bytes"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Server-to-Client Lost Bytes"
+ },
+ {
+ "name": "tcp_c2s_o3_pkts",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Client-to-Server Out-of-Order Packets"
+ },
+ {
+ "name": "tcp_s2c_o3_pkts",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Server-to-Client Out-of-Order Packets"
+ },
+ {
+ "name": "tcp_c2s_rtx_pkts",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Client-to-Server Retransmission Packets"
+ },
+ {
+ "name": "tcp_s2c_rtx_pkts",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Server-to-Client Retransmission Packets"
+ },
+ {
+ "name": "tcp_c2s_rtx_bytes",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "bytes"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Client-to-Server Retransmission Bytes"
+ },
+ {
+ "name": "tcp_s2c_rtx_bytes",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "bytes"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Server-to-Client Retransmission Bytes"
+ },
+ {
+ "name": "tcp_rtt_ms",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal",
+ "aggregation_functions": "AVG, MAX, MIN, MEDIAN, P95_PERCENTILE, P99_PERCENTILE"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Round-trip Time (ms)"
+ },
+ {
+ "name": "tcp_client_isn",
+ "type": "long",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Client ISN"
+ },
+ {
+ "name": "tcp_server_isn",
+ "type": "long",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Server ISN"
+ },
+ {
+ "name": "packet_capture_file",
+ "type": "string",
+ "doc": {
+ "allow_query": "false",
+ "visibility": "enabled",
+ "constraints": {
+ "type": "file"
+ },
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Packet Capture File"
+ },
+ {
+ "name": "in_src_mac",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Incoming Source MAC"
+ },
+ {
+ "name": "out_src_mac",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Outgoing Source MAC"
+ },
+ {
+ "name": "in_dest_mac",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Incoming Destination MAC"
+ },
+ {
+ "name": "out_dest_mac",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Outgoing Destination MAC"
+ },
+ {
+ "name": "encapsulation",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Encapsulation"
+ },
+ {
+ "name": "dup_traffic_flag",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "operator_functions": "=,!=,in,not in"
+ },
+ "data": [
+ {
+ "code": "0",
+ "value": "No"
+ },
+ {
+ "code": "1",
+ "value": "Yes"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Duplicate Traffic Flag"
+ },
+ {
+ "name": "tunnel_endpoint_a_desc",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Tunnel Endpoint A Description"
+ },
+ {
+ "name": "tunnel_endpoint_b_desc",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Tunnel Endpoint B Description"
+ },
+ {
+ "name": "http_url",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.URL"
+ },
+ {
+ "name": "http_host",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Host"
+ },
+ {
+ "name": "http_request_line",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Request Line"
+ },
+ {
+ "name": "http_response_line",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Response Line"
+ },
+ {
+ "name": "http_request_content_length",
+ "type": "long",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Request Content-Length"
+ },
+ {
+ "name": "http_request_content_type",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Request Content-Type"
+ },
+ {
+ "name": "http_response_content_length",
+ "type": "long",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Response Content-Length"
+ },
+ {
+ "name": "http_response_content_type",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Response Content Type"
+ },
+ {
+ "name": "http_request_body",
+ "type": "string",
+ "doc": {
+ "allow_query": "false",
+ "constraints": {
+ "type": "file"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Request Body"
+ },
+ {
+ "name": "http_response_body",
+ "type": "string",
+ "doc": {
+ "allow_query": "false",
+ "constraints": {
+ "type": "file"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Response Body"
+ },
+ {
+ "name": "http_proxy_flag",
+ "type": "int",
+ "doc": {
+ "visibility": "hidden",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Proxy Flag"
+ },
+ {
+ "name": "http_sequence",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Sequence"
+ },
+ {
+ "name": "http_cookie",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Cookie"
+ },
+ {
+ "name": "http_referer",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Referer"
+ },
+ {
+ "name": "http_user_agent",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.User-Agent"
+ },
+ {
+ "name": "http_set_cookie",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Set-Cookie"
+ },
+ {
+ "name": "http_version",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Version"
+ },
+ {
+ "name": "http_status_code",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "constraints": {
+ "aggregation_functions": "COUNT, COUNT_DISTINCT"
+ },
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Status Code"
+ },
+ {
+ "name": "http_response_latency_ms",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal",
+ "aggregation_functions": "AVG, MAX, MIN, MEDIAN, P95_PERCENTILE, P99_PERCENTILE"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Response Latency (ms)"
+ },
+ {
+ "name": "http_action_file_size",
+ "type": "long",
+ "doc": {
+ "constraints": {
+ "type": "bytes"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Action File Size"
+ },
+ {
+ "name": "http_session_duration_ms",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal",
+ "aggregation_functions": "AVG, MAX, MIN, MEDIAN, P95_PERCENTILE, P99_PERCENTILE"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "HTTP.Session Duration (ms)"
+ },
+ {
+ "name": "mail_protocol_type",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "MAIL.Protocol Type"
+ },
+ {
+ "name": "mail_account",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "MAIL.Account"
+ },
+ {
+ "name": "mail_from_cmd",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "MAIL.From CMD"
+ },
+ {
+ "name": "mail_to_cmd",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "MAIL.To CMD"
+ },
+ {
+ "name": "mail_from",
+ "type": "string",
+ "doc": {
+ "constraints": {
+ "type": "email"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "MAIL.From"
+ },
+ {
+ "name": "mail_password",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "MAIL.Password"
+ },
+ {
+ "name": "mail_to",
+ "type": "string",
+ "doc": {
+ "constraints": {
+ "type": "email"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "MAIL.To"
+ },
+ {
+ "name": "mail_cc",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "MAIL.CC"
+ },
+ {
+ "name": "mail_bcc",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "MAIL.BCC"
+ },
+ {
+ "name": "mail_subject",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "MAIL.Subject"
+ },
+ {
+ "name": "mail_subject_charset",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "MAIL.Subject Charset"
+ },
+ {
+ "name": "mail_attachment_name",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "MAIL.Attachment Name"
+ },
+ {
+ "name": "mail_attachment_name_charset",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "MAIL.Attachment Name Charset"
+ },
+ {
+ "name": "mail_eml_file",
+ "type": "string",
+ "doc": {
+ "constraints": {
+ "type": "file"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "MAIL.EML File"
+ },
+ {
+ "name": "dns_message_id",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.Message ID"
+ },
+ {
+ "name": "dns_qr",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "operator_functions": "=,!=,in,not in"
+ },
+ "data": [
+ {
+ "code": "0",
+ "value": "QUERY"
+ },
+ {
+ "code": "1",
+ "value": "RESPONSE"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.QR"
+ },
+ {
+ "name": "dns_opcode",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "operator_functions": "=,!=,in,not in",
+ "aggregation_functions": "COUNT, COUNT_DISTINCT"
+ },
+ "data": [
+ {
+ "code": "0",
+ "value": "QUERY"
+ },
+ {
+ "code": "1",
+ "value": "IQUERY"
+ },
+ {
+ "code": "2",
+ "value": "STATUS"
+ },
+ {
+ "code": "5",
+ "value": "UPDATE"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.OPCODE"
+ },
+ {
+ "name": "dns_aa",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "constraints": {
+ "aggregation_functions": "COUNT, COUNT_DISTINCT"
+ },
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.AA"
+ },
+ {
+ "name": "dns_tc",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.TC"
+ },
+ {
+ "name": "dns_rd",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.RD"
+ },
+ {
+ "name": "dns_ra",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.RA"
+ },
+ {
+ "name": "dns_rcode",
+ "type": "int",
+ "doc": {
+ "data": [
+ {
+ "code": 0,
+ "value": "NoError"
+ },
+ {
+ "code": 1,
+ "value": "FormErr"
+ },
+ {
+ "code": 2,
+ "value": "ServFail"
+ },
+ {
+ "code": 3,
+ "value": "NXDomain"
+ },
+ {
+ "code": 4,
+ "value": "NotImp"
+ },
+ {
+ "code": 5,
+ "value": "Refused"
+ },
+ {
+ "code": 6,
+ "value": "YXDomain"
+ },
+ {
+ "code": 7,
+ "value": "YXRRSet"
+ },
+ {
+ "code": 8,
+ "value": "NXRRSet"
+ },
+ {
+ "code": 9,
+ "value": "NotAuth"
+ },
+ {
+ "code": 10,
+ "value": "NotZone"
+ },
+ {
+ "code": 16,
+ "value": "BADSIG"
+ },
+ {
+ "code": 17,
+ "value": "BADKEY"
+ },
+ {
+ "code": 18,
+ "value": "BADTIME"
+ },
+ {
+ "code": 19,
+ "value": "BADMODE"
+ },
+ {
+ "code": 20,
+ "value": "BADNAME"
+ },
+ {
+ "code": 21,
+ "value": "BADALG"
+ }
+ ],
+ "visibility": "enabled",
+ "constraints": {
+ "operator_functions": "=,!=,in,not in",
+ "aggregation_functions": "COUNT, COUNT_DISTINCT"
+ },
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.RCODE"
+ },
+ {
+ "name": "dns_qdcount",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.QDCOUNT"
+ },
+ {
+ "name": "dns_ancount",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.ANCOUNT"
+ },
+ {
+ "name": "dns_nscount",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.NSCOUNT"
+ },
+ {
+ "name": "dns_arcount",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.ARCOUNT"
+ },
+ {
+ "name": "dns_qname",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.QNAME"
+ },
+ {
+ "name": "dns_qtype",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "operator_functions": "=,!=,in,not in",
+ "aggregation_functions": "COUNT, COUNT_DISTINCT"
+ },
+ "data": [
+ {
+ "code": "1",
+ "value": "A"
+ },
+ {
+ "code": "2",
+ "value": "NS"
+ },
+ {
+ "code": "3",
+ "value": "MD"
+ },
+ {
+ "code": "4",
+ "value": "MF"
+ },
+ {
+ "code": "5",
+ "value": "CNAME"
+ },
+ {
+ "code": "6",
+ "value": "SOA"
+ },
+ {
+ "code": "7",
+ "value": "MB"
+ },
+ {
+ "code": "8",
+ "value": "MG"
+ },
+ {
+ "code": "9",
+ "value": "MR"
+ },
+ {
+ "code": "10",
+ "value": "NULL"
+ },
+ {
+ "code": "11",
+ "value": "WKS"
+ },
+ {
+ "code": "12",
+ "value": "PTR"
+ },
+ {
+ "code": "13",
+ "value": "HINFO"
+ },
+ {
+ "code": "14",
+ "value": "MINFO"
+ },
+ {
+ "code": "15",
+ "value": "MX"
+ },
+ {
+ "code": "16",
+ "value": "TXT"
+ },
+ {
+ "code": "17",
+ "value": "RP"
+ },
+ {
+ "code": "18",
+ "value": "AFSDB"
+ },
+ {
+ "code": "19",
+ "value": "X25"
+ },
+ {
+ "code": "20",
+ "value": "ISDN"
+ },
+ {
+ "code": "21",
+ "value": "RT"
+ },
+ {
+ "code": "22",
+ "value": "NSAP"
+ },
+ {
+ "code": "23",
+ "value": "NSAP"
+ },
+ {
+ "code": "24",
+ "value": "SIG"
+ },
+ {
+ "code": "25",
+ "value": "KEY"
+ },
+ {
+ "code": "26",
+ "value": "PX"
+ },
+ {
+ "code": "27",
+ "value": "GPOS"
+ },
+ {
+ "code": "28",
+ "value": "AAAA"
+ },
+ {
+ "code": "29",
+ "value": "LOC"
+ },
+ {
+ "code": "30",
+ "value": "EID"
+ },
+ {
+ "code": "31",
+ "value": "NIMLOC"
+ },
+ {
+ "code": "32",
+ "value": "NB"
+ },
+ {
+ "code": "33",
+ "value": "SRV"
+ },
+ {
+ "code": "34",
+ "value": "ATMA"
+ },
+ {
+ "code": "35",
+ "value": "NAPTR"
+ },
+ {
+ "code": "36",
+ "value": "KX"
+ },
+ {
+ "code": "37",
+ "value": "CERT"
+ },
+ {
+ "code": "38",
+ "value": "A6"
+ },
+ {
+ "code": "39",
+ "value": "DNAME"
+ },
+ {
+ "code": "40",
+ "value": "SINK"
+ },
+ {
+ "code": "41",
+ "value": "OPT"
+ },
+ {
+ "code": "42",
+ "value": "APL"
+ },
+ {
+ "code": "43",
+ "value": "DS"
+ },
+ {
+ "code": "44",
+ "value": "SSHFP"
+ },
+ {
+ "code": "45",
+ "value": "IPSECKEY"
+ },
+ {
+ "code": "46",
+ "value": "RRSIG"
+ },
+ {
+ "code": "47",
+ "value": "NSEC"
+ },
+ {
+ "code": "48",
+ "value": "DNSKEY"
+ },
+ {
+ "code": "49",
+ "value": "DHCID"
+ },
+ {
+ "code": "50",
+ "value": "NSEC3"
+ },
+ {
+ "code": "51",
+ "value": "NSEC3PARAM"
+ },
+ {
+ "code": "52",
+ "value": "TLSA"
+ },
+ {
+ "code": "53",
+ "value": "SMIMEA"
+ },
+ {
+ "code": "55",
+ "value": "HIP"
+ },
+ {
+ "code": "59",
+ "value": "CDS"
+ },
+ {
+ "code": "60",
+ "value": "CDNSKEY"
+ },
+ {
+ "code": "61",
+ "value": "OPENPGPKEY"
+ },
+ {
+ "code": "62",
+ "value": "CSYNC"
+ },
+ {
+ "code": "63",
+ "value": "ZONEMD"
+ },
+ {
+ "code": "64",
+ "value": "SVCB"
+ },
+ {
+ "code": "65",
+ "value": "HTTPS"
+ },
+ {
+ "code": "99",
+ "value": "SPF"
+ },
+ {
+ "code": "100",
+ "value": "UINFO"
+ },
+ {
+ "code": "101",
+ "value": "UID"
+ },
+ {
+ "code": "102",
+ "value": "GID"
+ },
+ {
+ "code": "103",
+ "value": "UNSPEC"
+ },
+ {
+ "code": "108",
+ "value": "EUI48"
+ },
+ {
+ "code": "109",
+ "value": "EUI64"
+ },
+ {
+ "code": "249",
+ "value": "TKEY"
+ },
+ {
+ "code": "250",
+ "value": "TSIG"
+ },
+ {
+ "code": "251",
+ "value": "IXFR"
+ },
+ {
+ "code": "252",
+ "value": "AXFR"
+ },
+ {
+ "code": "253",
+ "value": "MAILB"
+ },
+ {
+ "code": "254",
+ "value": "MAILA"
+ },
+ {
+ "code": "255",
+ "value": "*"
+ },
+ {
+ "code": "256",
+ "value": "URI"
+ },
+ {
+ "code": "257",
+ "value": "CAA"
+ },
+ {
+ "code": "32768",
+ "value": "TA"
+ },
+ {
+ "code": "32769",
+ "value": "DLV"
+ },
+ {
+ "code": "65521",
+ "value": "INTEGRITY"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.QTYPE"
+ },
+ {
+ "name": "dns_qclass",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "constraints": {
+ "operator_functions": "=,!=,in,not in",
+ "aggregation_functions": "COUNT, COUNT_DISTINCT"
+ },
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.QCLASS"
+ },
+ {
+ "name": "dns_cname",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.CNAME"
+ },
+ {
+ "name": "dns_sub",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "operator_functions": "=,!=,in,not in"
+ },
+ "data": [
+ {
+ "code": "1",
+ "value": "DNS"
+ },
+ {
+ "code": "2",
+ "value": "DNSSEC"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.SUB"
+ },
+ {
+ "name": "dns_rr",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.RR"
+ },
+ {
+ "name": "dns_response_latency_ms",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal",
+ "aggregation_functions": "AVG, MAX, MIN, MEDIAN, P95_PERCENTILE, P99_PERCENTILE"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DNS.Response Latency (ms)"
+ },
+ {
+ "name": "ssl_version",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSL.Version"
+ },
+ {
+ "name": "ssl_sni",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSL.SNI"
+ },
+ {
+ "name": "ssl_san",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSL.SAN"
+ },
+ {
+ "name": "ssl_cn",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSL.CN"
+ },
+ {
+ "name": "ssl_handshake_latency_ms",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal",
+ "aggregation_functions": "AVG, MAX, MIN, MEDIAN, P95_PERCENTILE, P99_PERCENTILE"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSL.Handshake Latency (ms)"
+ },
+ {
+ "name": "ssl_ja3_hash",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSL.JA3 Hash"
+ },
+ {
+ "name": "ssl_ja3s_hash",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSL.JA3S Hash"
+ },
+ {
+ "name": "ssl_cert_issuer",
+ "type": "string",
+ "doc": {
+ "constraints": {
+ "type": "items"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSL.Issuer"
+ },
+ {
+ "name": "ssl_cert_subject",
+ "type": "string",
+ "doc": {
+ "constraints": {
+ "type": "items"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSL.Subject"
+ },
+ {
+ "name": "ssl_esni_flag",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSL.ESNI Flag"
+ },
+ {
+ "name": "ssl_ech_flag",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": " SSL.ECH Flag"
+ },
+ {
+ "name": "dtls_cookie",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DTLS.Cookie"
+ },
+ {
+ "name": "dtls_version",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DTLS.Version"
+ },
+ {
+ "name": "dtls_sni",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DTLS.SNI"
+ },
+ {
+ "name": "dtls_san",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DTLS.SAN"
+ },
+ {
+ "name": "dtls_cn",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DTLS.CN"
+ },
+ {
+ "name": "dtls_handshake_latency_ms",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal",
+ "aggregation_functions": "AVG, MAX, MIN, MEDIAN, P95_PERCENTILE, P99_PERCENTILE"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DTLS.Handshake Latency (ms)"
+ },
+ {
+ "name": "dtls_ja3_fingerprint",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DTLS.JA3 Fingerprint"
+ },
+ {
+ "name": "dtls_ja3_hash",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DTLS.JA3 Hash"
+ },
+ {
+ "name": "dtls_cert_issuer",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DTLS.Certificate Issuer"
+ },
+ {
+ "name": "dtls_cert_subject",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "DTLS.Certificate Subject"
+ },
+ {
+ "name": "quic_version",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "QUIC.Version"
+ },
+ {
+ "name": "quic_sni",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "QUIC.SNI"
+ },
+ {
+ "name": "quic_user_agent",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "QUIC.User-Agent"
+ },
+ {
+ "name": "ftp_account",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "FTP.Account"
+ },
+ {
+ "name": "ftp_url",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "FTP.URL"
+ },
+ {
+ "name": "ftp_link_type",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "FTP.Link Type"
+ },
+ {
+ "name": "sip_call_id",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.Call-ID"
+ },
+ {
+ "name": "sip_originator_description",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.Originator"
+ },
+ {
+ "name": "sip_responder_description",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.Responder"
+ },
+ {
+ "name": "sip_user_agent",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.User-Agent"
+ },
+ {
+ "name": "sip_server",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.Server"
+ },
+ {
+ "name": "sip_originator_sdp_connect_ip",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.Originator IP"
+ },
+ {
+ "name": "sip_originator_sdp_media_port",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.Originator Port"
+ },
+ {
+ "name": "sip_originator_sdp_media_type",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.Originator Media Type"
+ },
+ {
+ "name": "sip_originator_sdp_content",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.Originator Content"
+ },
+ {
+ "name": "sip_responder_sdp_connect_ip",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.Responder IP"
+ },
+ {
+ "name": "sip_responder_sdp_media_port",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.Responder Port"
+ },
+ {
+ "name": "sip_responder_sdp_media_type",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.Responder Media Type"
+ },
+ {
+ "name": "sip_responder_sdp_content",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.Responder Content"
+ },
+ {
+ "name": "sip_duration_s",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.Duration (s)"
+ },
+ {
+ "name": "sip_bye",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SIP.Bye"
+ },
+ {
+ "name": "rtp_payload_type_c2s",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "operator_functions": "=,!=,in,not in"
+ },
+ "data": [
+ {
+ "code": "0",
+ "value": "PCMU"
+ },
+ {
+ "code": "1",
+ "value": "1016"
+ },
+ {
+ "code": "2",
+ "value": "G721"
+ },
+ {
+ "code": "3",
+ "value": "GSM"
+ },
+ {
+ "code": "4",
+ "value": "G723"
+ },
+ {
+ "code": "5",
+ "value": "DVI4_8000"
+ },
+ {
+ "code": "6",
+ "value": "DVI4_16000"
+ },
+ {
+ "code": "7",
+ "value": "LPC"
+ },
+ {
+ "code": "8",
+ "value": "PCMA"
+ },
+ {
+ "code": "9",
+ "value": "G722"
+ },
+ {
+ "code": "10",
+ "value": "L16_STEREO"
+ },
+ {
+ "code": "11",
+ "value": "L16_MONO"
+ },
+ {
+ "code": "12",
+ "value": "QCELP"
+ },
+ {
+ "code": "13",
+ "value": "CN"
+ },
+ {
+ "code": "14",
+ "value": "MPA"
+ },
+ {
+ "code": "15",
+ "value": "G728"
+ },
+ {
+ "code": "16",
+ "value": "DVI4_11025"
+ },
+ {
+ "code": "17",
+ "value": "DVI4_22050"
+ },
+ {
+ "code": "18",
+ "value": "G729"
+ },
+ {
+ "code": "19",
+ "value": "CN_OLD"
+ },
+ {
+ "code": "25",
+ "value": "CELB"
+ },
+ {
+ "code": "26",
+ "value": "JPEG"
+ },
+ {
+ "code": "28",
+ "value": "NV"
+ },
+ {
+ "code": "31",
+ "value": "H261"
+ },
+ {
+ "code": "32",
+ "value": "MPV"
+ },
+ {
+ "code": "33",
+ "value": "MP2T"
+ },
+ {
+ "code": "34",
+ "value": "H263"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RTP.Payload Type (C2S)"
+ },
+ {
+ "name": "rtp_payload_type_s2c",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "operator_functions": "=,!=,in,not in"
+ },
+ "data": [
+ {
+ "code": "0",
+ "value": "PCMU"
+ },
+ {
+ "code": "1",
+ "value": "1016"
+ },
+ {
+ "code": "2",
+ "value": "G721"
+ },
+ {
+ "code": "3",
+ "value": "GSM"
+ },
+ {
+ "code": "4",
+ "value": "G723"
+ },
+ {
+ "code": "5",
+ "value": "DVI4_8000"
+ },
+ {
+ "code": "6",
+ "value": "DVI4_16000"
+ },
+ {
+ "code": "7",
+ "value": "LPC"
+ },
+ {
+ "code": "8",
+ "value": "PCMA"
+ },
+ {
+ "code": "9",
+ "value": "G722"
+ },
+ {
+ "code": "10",
+ "value": "L16_STEREO"
+ },
+ {
+ "code": "11",
+ "value": "L16_MONO"
+ },
+ {
+ "code": "12",
+ "value": "QCELP"
+ },
+ {
+ "code": "13",
+ "value": "CN"
+ },
+ {
+ "code": "14",
+ "value": "MPA"
+ },
+ {
+ "code": "15",
+ "value": "G728"
+ },
+ {
+ "code": "16",
+ "value": "DVI4_11025"
+ },
+ {
+ "code": "17",
+ "value": "DVI4_22050"
+ },
+ {
+ "code": "18",
+ "value": "G729"
+ },
+ {
+ "code": "19",
+ "value": "CN_OLD"
+ },
+ {
+ "code": "25",
+ "value": "CELB"
+ },
+ {
+ "code": "26",
+ "value": "JPEG"
+ },
+ {
+ "code": "28",
+ "value": "NV"
+ },
+ {
+ "code": "31",
+ "value": "H261"
+ },
+ {
+ "code": "32",
+ "value": "MPV"
+ },
+ {
+ "code": "33",
+ "value": "MP2T"
+ },
+ {
+ "code": "34",
+ "value": "H263"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RTP.Payload Type (S2C)"
+ },
+ {
+ "name": "rtp_pcap_path",
+ "type": "string",
+ "doc": {
+ "allow_query": "false",
+ "constraints": {
+ "type": "file"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RTP.PCAP"
+ },
+ {
+ "name": "rtp_originator_dir",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "operator_functions": "=,!=,in,not in"
+ },
+ "data": [
+ {
+ "code": "0",
+ "value": "unknown"
+ },
+ {
+ "code": "1",
+ "value": "c2s"
+ },
+ {
+ "code": "2",
+ "value": "s2c"
+ }
+ ],
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RTP.Direction"
+ },
+ {
+ "name": "ssh_version",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSH.Version"
+ },
+ {
+ "name": "ssh_auth_success",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSH.Authentication Result"
+ },
+ {
+ "name": "ssh_client_version",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSH.Client Version"
+ },
+ {
+ "name": "ssh_server_version",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSH.Server Version"
+ },
+ {
+ "name": "ssh_cipher_alg",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSH.Encryption Algorithm"
+ },
+ {
+ "name": "ssh_mac_alg",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSH.Signing Algorithm"
+ },
+ {
+ "name": "ssh_compression_alg",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSH.Compression Algorithm"
+ },
+ {
+ "name": "ssh_kex_alg",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSH.Key Exchange Algorithm"
+ },
+ {
+ "name": "ssh_host_key_alg",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSH.Server Host Key Algorithm"
+ },
+ {
+ "name": "ssh_host_key",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSH.Server Key Fingerprint"
+ },
+ {
+ "name": "ssh_hassh",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "SSH.HASSH"
+ },
+ {
+ "name": "stratum_cryptocurrency",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Stratum.Cryptocurrency"
+ },
+ {
+ "name": "stratum_mining_pools",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Stratum.Mining Pools"
+ },
+ {
+ "name": "stratum_mining_program",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Stratum.Mining Program"
+ },
+ {
+ "name": "stratum_mining_subscribe",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "Stratum.Mining Subscribe"
+ },
+ {
+ "name": "rdp_cookie",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Cookie"
+ },
+ {
+ "name": "rdp_security_protocol",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Security Protocol"
+ },
+ {
+ "name": "rdp_client_channels",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Client Channels"
+ },
+ {
+ "name": "rdp_keyboard_layout",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Keyboard Layout"
+ },
+ {
+ "name": "rdp_client_version",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Client Version"
+ },
+ {
+ "name": "rdp_client_name",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Client Name"
+ },
+ {
+ "name": "rdp_client_product_id",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Client Product ID"
+ },
+ {
+ "name": "rdp_desktop_width",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Desktop Width"
+ },
+ {
+ "name": "rdp_desktop_height",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Desktop Height"
+ },
+ {
+ "name": "rdp_requested_color_depth",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Requested Color Depth"
+ },
+ {
+ "name": "rdp_certificate_type",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Certificate Type"
+ },
+ {
+ "name": "rdp_certificate_count",
+ "type": "int",
+ "doc": {
+ "constraints": {
+ "type": "decimal"
+ },
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Certificate Count"
+ },
+ {
+ "name": "rdp_certificate_permanent",
+ "type": "int",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Certificate Permanent"
+ },
+ {
+ "name": "rdp_encryption_level",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Encryption Level"
+ },
+ {
+ "name": "rdp_encryption_method",
+ "type": "string",
+ "doc": {
+ "visibility": "enabled",
+ "ttl": 2592000,
+ "size": 0
+ },
+ "label": "RDP.Encryption Method"
+ }
+ ]
+}
diff --git a/src/test/resources/examples/validDSLRequestTest.json b/src/test/resources/examples/validDSLRequestTest.json
new file mode 100644
index 00000000..c7e2225c
--- /dev/null
+++ b/src/test/resources/examples/validDSLRequestTest.json
@@ -0,0 +1,55 @@
+{
+ "query": {
+ "dataSource": "IP_LEARNING_VIEW",
+ "parameters": {
+ "intervals": [
+ "2024-03-14 00:00:00/2024-03-15 00:00:00"
+ ],
+ "limit": "3",
+ "match": [
+ {
+ "fieldKey": "PROTOCOL",
+ "fieldValues": [
+ "SSL",
+ "HTTP",
+ "DNS"
+ ],
+ "type": "exactly"
+ },
+ {
+ "fieldKey": "FQDN_NAME",
+ "fieldValues": [
+ "itunes.apple",
+ "itunes.apple.com"
+ ],
+ "type": "exactly"
+ }
+ ],
+ "range": [
+ {
+ "fieldKey": "VSYS_ID",
+ "fieldValues": [
+ 1
+ ],
+ "type": "eq"
+ },
+ {
+ "fieldKey": "DEPTH",
+ "fieldValues": [
+ 1
+ ],
+ "type": "eq"
+ },
+ {
+ "fieldKey": "UNIQ_CIP",
+ "fieldValues": [
+ 12
+ ],
+ "type": "gt"
+ }
+ ],
+ "sort": []
+ },
+ "queryType": "iplearning"
+ }
+} \ No newline at end of file
diff --git a/src/test/resources/parameters/applicationAndProtocolTest.json b/src/test/resources/parameters/applicationAndProtocolTest.json
new file mode 100644
index 00000000..2a8c043d
--- /dev/null
+++ b/src/test/resources/parameters/applicationAndProtocolTest.json
@@ -0,0 +1,60 @@
+{
+ "application_and_protocol_summary": {
+ "name": "application-and-protocol-summary",
+ "granularity": "PT5S",
+ "filter": "vsys_id = 1",
+ "intervals": [
+ "2024-01-30 00:00:00/2024-01-31 00:00:00"
+ ]
+ },
+ "application_and_protocol_tree_composition": {
+ "name": "application-and-protocol-tree-composition",
+ "filter": "vsys_id = 1",
+ "intervals": [
+ "2024-01-30T00:00:00+08:00/2024-01-31T00:00:00+08:00"
+ ]
+ },
+ "application_and_protocol_tree_throughput": {
+ "name": "application-and-protocol-tree-throughput",
+ "granularity": "PT1H",
+ "filter": " (vsys_id = 1) AND (protocol_stack_id = 'ETHERNET.IPv4' OR ( protocol_stack_id LIKE 'ETHERNET.IPv4.%' AND NOT CONTAINS_STRING(REPLACE(protocol_stack_id, 'ETHERNET.IPv4.', ''), '.')))",
+ "intervals": [
+ "2024-01-30T00:00:00.000+01:00/2024-01-31T00:00:00.000+01:00"
+ ]
+ },
+ "application_and_protocol_top_apps": {
+ "name": "application-and-protocol-top-apps",
+ "filter": "vsys_id = 1",
+ "intervals": [
+ "2024-01-30 00:00:00/2024-01-31 00:00:00"
+ ],
+ "limit": 10
+ },
+ "application_and_protocol_app_summary": {
+ "name": "application-and-protocol-app-summary",
+ "execution_mode":"oneshot",
+ "filter": " vsys_id = 1 AND app_name IN ('ftp', 'http')",
+ "intervals": [
+ "2024-01-30 00:00:00/2024-01-31 00:00:00"
+ ]
+ },
+ "application_and_protocol_app_related_internal_ips": {
+ "name": "application-and-protocol-app-related-internal-ips",
+ "execution_mode":"oneshot",
+ "filter": "vsys_id = 1",
+ "intervals": [
+ "2024-01-30 00:00:00/2024-01-31 00:00:00"
+ ],
+ "limit": 10
+ },
+ "application_and_protocol_app_throughput": {
+ "name": "application-and-protocol-app-throughput",
+ "execution_mode":"oneshot",
+ "granularity": "PT15S",
+ "filter": "vsys_id = 1",
+ "intervals": [
+ "2024-01-30 00:00:00/2024-01-31 00:00:00"
+ ],
+ "limit": 10
+ }
+} \ No newline at end of file
diff --git a/src/test/resources/parameters/dslAutoGranularityTest.json b/src/test/resources/parameters/dslAutoGranularityTest.json
new file mode 100644
index 00000000..2f8a9d4f
--- /dev/null
+++ b/src/test/resources/parameters/dslAutoGranularityTest.json
@@ -0,0 +1,27 @@
+{
+ "application_and_protocol_summary_auto": {
+ "name": "application-and-protocol-summary",
+ "filter": "vsys_id = 1"
+ },
+ "application_and_protocol_summary_const": {
+ "name": "application-and-protocol-summary",
+ "granularity": "PT1S",
+ "filter": "vsys_id = 1",
+ "interval": [
+ "2019-01-01 00:00:00/2019-10-01 00:00:10"
+ ]
+ },
+ "application_and_protocol_summary_auto_const_range": {
+ "name": "application-and-protocol-summary",
+ "granularity": "CHART_GRANULARITY('2019-01-01 00:00:00', '2019-10-01 00:00:10')",
+ "filter": "vsys_id = 1",
+ "interval": [
+ "2019-01-01 00:00:00/2019-10-01 00:00:10"
+ ]
+ },
+ "traffic_spectrum_network_throughput_trend_auto": {
+ "name": "traffic-spectrum-network-throughput-trend",
+ "filter": "vsys_id in (1) ",
+ "execution_mode": "oneshot"
+ }
+} \ No newline at end of file
diff --git a/src/test/resources/parameters/entityTest.json b/src/test/resources/parameters/entityTest.json
deleted file mode 100644
index 5a94461e..00000000
--- a/src/test/resources/parameters/entityTest.json
+++ /dev/null
@@ -1,132 +0,0 @@
-{
- "activeClientIp": {
- "clientId": null,
- "query": {
- "dataEngine": "BusinessEngine",
- "dataSource": "session_record",
- "limit": "10000",
- "parameters": {
- "match": [
- {
- "type": "exactly",
- "fieldKey": "app",
- "fieldValues": [
- "Freegate"
- ]
- }
- ],
- "range": [
- {
- "type": "eq",
- "fieldKey": "vsys_id",
- "fieldValues": [
- 1
- ]
- }
- ],
- "intervals": [
- "2020-08-15T00:00:00.865Z/2022-08-15T00:30:00.865Z"
- ]
- }
- }
- },
- "topServerIp": {
- "clientId": null,
- "query": {
- "dataEngine": "BusinessEngine",
- "dataSource": "session_record",
- "limit": "10000",
- "parameters": {
- "range": [
- {
- "type": "eq",
- "fieldKey": "vsys_id",
- "fieldValues": [
- 1
- ]
- }
- ],
- "intervals": [
- "2020-08-15T00:00:00Z/2022-08-16T00:00:00Z"
- ]
- }
- }
- },
- "topSni": {
- "clientId": null,
- "query": {
- "dataEngine": "BusinessEngine",
- "dataSource": "session_record",
- "limit": "10000",
- "parameters": {
- "range": [
- {
- "type": "eq",
- "fieldKey": "vsys_id",
- "fieldValues": [
- 1
- ]
- }
- ],
- "intervals": [
- "2020-08-15T00:00:00.865+08:00/2022-08-16T00:00:00.865+08:00"
- ]
- }
- }
- },
- "subScriberidPool": {
- "clientId":null,
- "query":{
- "dataEngine":"AnalysisEngine",
- "dataSource":"SUBSCRIBER_ID_VIEW",
- "parameters":{
- "match":[
- {
- "type":"exactly",
- "fieldKey":"SUBSCRIBER_ID",
- "fieldValues":[
- "test01",
- "test02"
- ]
- }
- ],
- "range":[
- {
- "type":"eq",
- "fieldKey":"vsys_id",
- "fieldValues":[
- 1
- ]
- }
- ]
- }
- }
- },
- "gtpc": {
- "clientId":null,
- "query":{
- "dataEngine":"AnalysisEngine",
- "dataSource":"gtpc_knowledge_base",
- "parameters":{
- "match":[
- {
- "type":"prefix",
- "fieldKey":"phone_number",
- "fieldValues":[
- "1761041"
- ]
- }
- ],
- "range":[
- {
- "type":"eq",
- "fieldKey":"vsys_id",
- "fieldValues":[
- 1
- ]
- }
- ]
- }
- }
- }
-}
diff --git a/src/test/resources/parameters/fieldDiscoveryTest.json b/src/test/resources/parameters/fieldDiscoveryTest.json
new file mode 100644
index 00000000..e6ed275b
--- /dev/null
+++ b/src/test/resources/parameters/fieldDiscoveryTest.json
@@ -0,0 +1,35 @@
+{
+ "field_discovery_default": {
+ "name": "field_discovery",
+ "data_source": "session_record",
+ "filter": "recv_time >= UNIX_TIMESTAMP(now()) - 500 AND recv_time <= UNIX_TIMESTAMP(now()) AND vsys_id = 1"
+ },
+ "field_discovery_sessions": {
+ "name": "field_discovery",
+ "data_source": "session_record",
+ "custom.field_discovery.metric": "sessions",
+ "custom.field_discovery.metric.fn": "count",
+ "filter": "recv_time >= UNIX_TIMESTAMP(now()) - 500 AND recv_time <= UNIX_TIMESTAMP(now()) AND vsys_id = 1"
+ },
+ "field_discovery_bytes": {
+ "name": "field_discovery",
+ "data_source": "session_record",
+ "custom.field_discovery.metric": "bytes",
+ "custom.field_discovery.metric.fn": "sum",
+ "filter": "recv_time >= UNIX_TIMESTAMP(now()) - 500 AND recv_time <= UNIX_TIMESTAMP(now()) AND vsys_id = 1"
+ },
+ "field_discovery_incoming_bytes": {
+ "name": "field_discovery",
+ "data_source": "session_record",
+ "custom.field_discovery.metric": "incoming_bytes",
+ "custom.field_discovery.metric.fn": "sum",
+ "filter": "recv_time >= UNIX_TIMESTAMP(now()) - 500 AND recv_time <= UNIX_TIMESTAMP(now()) AND vsys_id = 1"
+ },
+ "field_discovery_outgoing_bytes": {
+ "name": "field_discovery",
+ "data_source": "session_record",
+ "custom.field_discovery.metric": "outgoing_bytes",
+ "custom.field_discovery.metric.fn": "sum",
+ "filter": "recv_time >= UNIX_TIMESTAMP(now()) - 500 AND recv_time <= UNIX_TIMESTAMP(now()) AND vsys_id = 1"
+ }
+} \ No newline at end of file
diff --git a/src/test/resources/parameters/jobTest.json b/src/test/resources/parameters/jobTest.json
deleted file mode 100644
index 777418ff..00000000
--- a/src/test/resources/parameters/jobTest.json
+++ /dev/null
@@ -1,37 +0,0 @@
-{
- "field_discovery_default": {
- "query.type": "field_discovery",
- "query.data_source": "session_record",
- "custom.field_discovery.fields": [
- "log_id",
- "security_action"
- ],
- "custom.field_discovery.filter": "vsys_id in (1,2) and client_ip='192.168.0.1' AND server_port = 80"
- },
- "field_discovery_bytes": {
- "query.type": "field_discovery",
- "query.data_source": "session_record",
- "custom.field_discovery.metric": "bytes",
- "custom.field_discovery.metric.fn": "sum",
- "custom.field_discovery.fields": [
- "security_action",
- "proxy_action"
- ],
- "custom.field_discovery.filter": "vsys_id in (1,2) and client_ip='192.168.0.1' AND server_port = 80"
- },
- "long_term": {
- "query.type": "long_term",
- "query.data_source": "session_record",
- "custom.long_term.sql": "select client_ip, count(*) as count from session_record where vsys_id in (1,2) and client_ip='192.168.0.1' AND server_port = 80 group by client_ip order by count asc limit 10"
- },
- "report": {
- "query.type": "report",
- "query.data_source": "session_record",
- "custom.report.sql": "SELECT log_id, recv_time FROM session_record LIMIT 12 "
- },
- "statistics_top": {
- "query.type": "statistics",
- "query.data_source": "session_record",
- "custom.statistics.sql": "select client_ip, count(*) as count from session_record where vsys_id in (1,2) and client_ip='192.168.0.1' AND server_port = 80 group by client_ip order by count desc limit 10"
- }
-} \ No newline at end of file
diff --git a/src/test/resources/parameters/knowledgeBase.json b/src/test/resources/parameters/knowledgeBase.json
index c5eadaaa..8c2bd42e 100644
--- a/src/test/resources/parameters/knowledgeBase.json
+++ b/src/test/resources/parameters/knowledgeBase.json
@@ -1,7 +1,21 @@
{
- "publishTest": {
+ "publish": {
+ "kb_id": "test",
"name": "test",
- "format": "test",
- "type": "test"
+ "format": "format",
+ "category": "category",
+ "is_valid": 1
+ },
+ "update": {
+ "kb_id": "test",
+ "version": "latest"
+ },
+ "update_status": {
+ "kb_id": "test",
+ "version": "latest",
+ "is_valid": 0
+ },
+ "delete": {
+ "kb_id": "test"
}
} \ No newline at end of file
diff --git a/src/test/resources/parameters/recommendTest.json b/src/test/resources/parameters/recommendTest.json
new file mode 100644
index 00000000..34ad5030
--- /dev/null
+++ b/src/test/resources/parameters/recommendTest.json
@@ -0,0 +1,20 @@
+{
+ "ip_learning_fqdn_relate_ip": {
+ "name": "ip-learning-fqdn-relate-ip",
+ "filter": "VSYS_ID in (1,2,3,4,5) AND PROTOCOL in ('SSL', 'HTTP', 'DNS') AND DEPTH = 1 and UNIQ_NAME > 12 AND FQDN_NAME in ('google.com', 'itunes.apple.com')",
+ "intervals": [
+ "2024-01-30 00:00:00/2024-01-31 00:00:00"
+ ],
+ "limit": 100
+ },
+ "ip_learning_active_ip": {
+ "name": "ip-learning-active-ip",
+ "execution_mode": "oneshot",
+ "filter": "vsys_id in (1) AND 1=1",
+ "intervals": [
+ "2024-01-30 00:00:00/2024-01-31 00:00:00"
+ ],
+ "order_by": "BYTES_TOTAL desc, LAST_FOUND_TIME desc",
+ "limit": 1
+ }
+} \ No newline at end of file
diff --git a/src/test/resources/parameters/sqlAdHocTest.json b/src/test/resources/parameters/sqlAdHocTest.json
new file mode 100644
index 00000000..efc526fd
--- /dev/null
+++ b/src/test/resources/parameters/sqlAdHocTest.json
@@ -0,0 +1,29 @@
+{
+ "query_sql_default": {
+ "statement": "select * from session_record limit 1"
+ },
+ "query_sql_oneshot": {
+ "statement": "select * from session_record limit 1",
+ "execution_mode": "oneshot"
+ },
+ "query_sql_normal": {
+ "statement": "select * from session_record limit 1",
+ "execution_mode": "normal"
+ },
+ "query_sql_blocking": {
+ "statement": "select * from session_record limit 1",
+ "execution_mode": "blocking"
+ },
+ "query_sql_json": {
+ "statement": "select * from session_record limit 1",
+ "output_mode": "json"
+ },
+ "query_sql_csv": {
+ "statement": "select * from session_record limit 1",
+ "output_mode": "csv"
+ },
+ "query_sql_oneshot_error_trigger_sub_query": {
+ "statement": "SELECT COUNT_DISTINCT(client_ip) AS \"Client IP\" FROM security_event WHERE ((security_action = 'Deny')) AND recv_time >= UNIX_TIMESTAMP('2024-03-12T00:00:00+08:00') AND recv_time < UNIX_TIMESTAMP('2024-03-12T23:59:59+08:00') AND security_event.vsys_id IN (1) ORDER BY \"Client IP\" DESC LIMIT 20",
+ "execution_mode": "oneshot"
+ }
+} \ No newline at end of file
diff --git a/src/test/resources/parameters/sqlSavedTest.json b/src/test/resources/parameters/sqlSavedTest.json
new file mode 100644
index 00000000..dd128a67
--- /dev/null
+++ b/src/test/resources/parameters/sqlSavedTest.json
@@ -0,0 +1,6 @@
+{
+ "default": {
+ "statement": "select * from session_record limit 1",
+ "is_saved_query": 1
+ }
+} \ No newline at end of file
diff --git a/src/test/resources/parameters/unstructuredTest.json b/src/test/resources/parameters/unstructuredTest.json
deleted file mode 100644
index 64412dd7..00000000
--- a/src/test/resources/parameters/unstructuredTest.json
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "all": {
- "clientId": null,
- "query": {
- "dataEngine": "BusinessEngine",
- "dataSource": "tsg_galaxy_v3",
- "limit": "1000",
- "parameters": {
- "intervals": [
- "2023-03-01T00:00:00+08:00/2023-04-06T00:00:00+08:00"
- ]
- }
- }
- },
- "mail": {
- "clientId": null,
- "query": {
- "dataEngine": "BusinessEngine",
- "dataSource": "session_record",
- "limit": "1000",
- "parameters": {
- "intervals": [
- "2023-03-01T00:00:00+08:00/2023-04-06T00:00:00+08:00"
- ]
- }
- }
- },
- "http": {
- "clientId": null,
- "query": {
- "dataEngine": "BusinessEngine",
- "dataSource": "security_event",
- "limit": "1000",
- "parameters": {
- "intervals": [
- "2023-03-01T00:00:00+08:00/2023-04-06T00:00:00+08:00"
- ]
- }
- }
- },
- "pcap": {
- "clientId": null,
- "query": {
- "dataEngine": "BusinessEngine",
- "dataSource": "voip_record",
- "limit": "1000",
- "parameters": {
- "intervals": [
- "2023-03-01T00:00:00+08:00/2023-04-06T00:00:00+08:00"
- ]
- }
- }
- }
-} \ No newline at end of file