summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authordoufenghu <[email protected]>2022-03-24 14:32:35 +0800
committerdoufenghu <[email protected]>2022-03-24 14:32:35 +0800
commit21672fd043605ecd4718c8647f8543d90841f8f7 (patch)
tree5ebb3c160fed13492c47446ad342c9bc2451213d
parent9d85bb2dd4374176d4b5ada09afc6401bfd34b21 (diff)
* 修改原Syntax-validation接口,为其提供尽可能随机的样例数据
* 增加故障诊断SQL测试集parse选项,用于SQL语法解析校验 * 更改故障诊断SQL测试集默认选项,由validation改为parse * 暂注释掉原解析工具类,调用验证接口功能 * 故障诊断业务类属于查询网关的一部分,引擎为统一查询入口,可内部引用业务类ApiService
-rw-r--r--config/nacos/config/fixed-127.0.0.1_8848-dev_nacos/data/config-data-tenant/dev/Galaxy/ck-queries-template.sql24
-rw-r--r--config/nacos/config/fixed-127.0.0.1_8848-dev_nacos/data/config-data-tenant/dev/Galaxy/galaxy-qgw-service.yml2
-rw-r--r--docs/release/release-354.md5
-rw-r--r--src/main/java/com/mesalab/common/base/BaseResult.java1
-rw-r--r--src/main/java/com/mesalab/common/enums/DiagnosisOptionEnum.java7
-rw-r--r--src/main/java/com/mesalab/common/utils/sqlparser/SQLSyntaxParserUtil.java2
-rw-r--r--src/main/java/com/mesalab/qgw/controller/DiagnosisController.java2
-rw-r--r--src/main/java/com/mesalab/qgw/dialect/ClickHouseDialect.java54
-rw-r--r--src/main/java/com/mesalab/qgw/model/api/udf/TIME_FLOOR_WITH_FILL.java2
-rw-r--r--src/main/java/com/mesalab/qgw/service/impl/DiagnosisServiceImpl.java176
-rw-r--r--src/test/java/com/mesalab/qgw/service/EngineTest.java4
11 files changed, 146 insertions, 133 deletions
diff --git a/config/nacos/config/fixed-127.0.0.1_8848-dev_nacos/data/config-data-tenant/dev/Galaxy/ck-queries-template.sql b/config/nacos/config/fixed-127.0.0.1_8848-dev_nacos/data/config-data-tenant/dev/Galaxy/ck-queries-template.sql
index 56b64c9d..51a37d1b 100644
--- a/config/nacos/config/fixed-127.0.0.1_8848-dev_nacos/data/config-data-tenant/dev/Galaxy/ck-queries-template.sql
+++ b/config/nacos/config/fixed-127.0.0.1_8848-dev_nacos/data/config-data-tenant/dev/Galaxy/ck-queries-template.sql
@@ -33,21 +33,21 @@ SELECT * FROM tsg_galaxy_v3.session_record AS session_record WHERE common_log_id
--Q17.Session Logs Sent to Database Trend(Time Grain 5 minute)
SELECT toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(common_recv_time),INTERVAL 5 MINUTE))) AS "Receive Time", count(common_log_id) AS "logs" FROM tsg_galaxy_v3.session_record AS session_record WHERE ( ( common_recv_time >= toUnixTimestamp(@start) AND common_recv_time < toUnixTimestamp(@end) ) ) GROUP BY "Receive Time" LIMIT 10000
--Q18.Traffic Bandwidth Trend(Time Grain 30 second)
-SELECT toDateTime(toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(common_recv_time),INTERVAL 30 SECOND)))) AS stat_time, sum(common_c2s_byte_num) AS bytes_sent, sum(common_s2c_byte_num) AS bytes_received, sum(common_c2s_byte_num + common_s2c_byte_num) AS bytes, sum(common_c2s_pkt_num + common_s2c_pkt_num) AS packets, sum(common_sessions) AS sessions FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) GROUP BY stat_time ORDER BY stat_time ASC LIMIT 10000
+SELECT toDateTime(toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(common_recv_time),INTERVAL 30 SECOND)))) AS stat_time, sum(common_c2s_byte_num) AS bytes_sent, sum(common_s2c_byte_num) AS bytes_received, sum(common_c2s_byte_num + common_s2c_byte_num) AS bytes, sum(common_c2s_pkt_num + common_s2c_pkt_num) AS packets, sum(common_sessions) AS sessions FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) GROUP BY stat_time ORDER BY stat_time ASC LIMIT 100
--Q19.Log Tend by Type (Time Grain 5 minute)
-SELECT toDateTime(toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(common_recv_time),INTERVAL 5 MINUTE)))) AS stat_time, common_schema_type AS type, sum(common_sessions) AS sessions, sum(common_c2s_byte_num + common_s2c_byte_num) AS bytes, sum(common_c2s_pkt_num + common_s2c_pkt_num) AS packets FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toUnixTimestamp(@start) AND common_recv_time < toUnixTimestamp(@end) GROUP BY stat_time, common_schema_type ORDER BY stat_time ASC LIMIT 10000
+SELECT toDateTime(toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(common_recv_time),INTERVAL 5 MINUTE)))) AS stat_time, common_schema_type AS type, sum(common_sessions) AS sessions, sum(common_c2s_byte_num + common_s2c_byte_num) AS bytes, sum(common_c2s_pkt_num + common_s2c_pkt_num) AS packets FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toUnixTimestamp(@start) AND common_recv_time < toUnixTimestamp(@end) GROUP BY stat_time, common_schema_type ORDER BY stat_time ASC LIMIT 100
--Q20.Traffic Metrics Analytic
SELECT round(sum(common_s2c_byte_num) * 8 / 300,2) AS trafficInBits, round(sum(common_c2s_byte_num) * 8 / 300,2) AS trafficOutBits, round(sum(common_s2c_byte_num + common_c2s_byte_num) * 8 / 300,2) AS trafficTotalBits, round(sum(common_s2c_pkt_num) / 300,2) AS trafficInPackets, round(sum(common_c2s_pkt_num) / 300,2) AS trafficOutPackets, round(sum(common_s2c_pkt_num + common_c2s_pkt_num) / 300,2) AS trafficTotalPackets, round(sum(common_sessions) / 300,2) AS sessions FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toUnixTimestamp(@start) AND common_recv_time < toUnixTimestamp(@end)
--Q21.Traffic Endpoints Metrics Trend(Time Grain 5 minute)
-SELECT toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(common_recv_time),INTERVAL 5 MINUTE))) AS "Receive Time", uniq(common_internal_ip) AS "Unique Internal IP", uniq(common_external_ip) AS "Unique External IP", uniq(common_subscriber_id) AS "Unique Subscriber ID", sum(coalesce(common_c2s_byte_num + common_s2c_byte_num, 0)) AS "Bytes", sum(coalesce(common_sessions, 0)) AS "Sessions" FROM tsg_galaxy_v3.session_record AS session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) GROUP BY "Receive Time" LIMIT 10000
+SELECT toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(common_recv_time),INTERVAL 5 MINUTE))) AS "Receive Time", uniq(common_internal_ip) AS "Unique Internal IP", uniq(common_external_ip) AS "Unique External IP", uniq(common_subscriber_id) AS "Unique Subscriber ID", sum(coalesce(common_c2s_byte_num + common_s2c_byte_num, 0)) AS "Bytes", sum(coalesce(common_sessions, 0)) AS "Sessions" FROM tsg_galaxy_v3.session_record AS session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) GROUP BY "Receive Time" LIMIT 100
--Q22.Endpoint Unique Num by L4 Protocol
SELECT 'all' AS type, uniq(common_client_ip) AS client_ips, uniq(common_internal_ip) AS internal_ips, uniq(common_server_ip) AS server_ips, uniq(common_external_ip) AS external_ips, uniq(common_subscriber_id) as subscriber_ids FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) UNION ALL SELECT 'tcp' AS type, uniq(common_client_ip) AS client_ips, uniq(common_internal_ip) AS internal_ips, uniq(common_server_ip) AS server_ips, uniq(common_external_ip) AS external_ips, uniq(common_subscriber_id) as subscriber_ids FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) AND common_l4_protocol IN ( 'IPv4_TCP', 'IPv6_TCP' ) UNION ALL SELECT 'UDP' AS type, uniq(common_client_ip) AS client_ips, uniq(common_internal_ip) AS internal_ips, uniq(common_server_ip) AS server_ips, uniq(common_external_ip) AS external_ips, uniq(common_subscriber_id) as subscriber_ids FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) AND common_l4_protocol IN ( 'IPv4_UDP', 'IPv6_UDP' )
--Q23.One-sided Connection Trend(Time Grain 5 minute)
-SELECT toDateTime(toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(common_recv_time),INTERVAL 5 MINUTE)))) AS stat_time, (CASE WHEN common_stream_dir = 1 THEN 'c2s' WHEN common_stream_dir = 2 THEN 's2c' WHEN common_stream_dir = 3 THEN 'double' ELSE 'None' END) AS type, sum(common_sessions) AS sessions FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) GROUP BY stat_time, common_stream_dir ORDER BY stat_time ASC LIMIT 10000
+SELECT toDateTime(toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(common_recv_time),INTERVAL 5 MINUTE)))) AS stat_time, (CASE WHEN common_stream_dir = 1 THEN 'c2s' WHEN common_stream_dir = 2 THEN 's2c' WHEN common_stream_dir = 3 THEN 'double' ELSE 'None' END) AS type, sum(common_sessions) AS sessions FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) GROUP BY stat_time, common_stream_dir ORDER BY stat_time ASC LIMIT 100
--Q24. Estimated One-sided Sessions with Bandwidth
-SELECT toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(common_recv_time),INTERVAL 5 MINUTE))) AS "Receive Time", sum(common_sessions) AS "sessions", sum(if(common_stream_dir <> 3, common_sessions, 0)) AS "one_side_sessions", sum(coalesce(common_c2s_byte_num + common_s2c_byte_num, 0)) AS "Bytes", round(one_side_sessions / sessions, 2) AS one_side_percent FROM tsg_galaxy_v3.session_record AS session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) GROUP BY "Receive Time" LIMIT 10000
+SELECT toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(common_recv_time),INTERVAL 5 MINUTE))) AS "Receive Time", sum(common_sessions) AS "sessions", sum(if(common_stream_dir <> 3, common_sessions, 0)) AS "one_side_sessions", sum(coalesce(common_c2s_byte_num + common_s2c_byte_num, 0)) AS "Bytes", round(one_side_sessions / sessions, 2) AS one_side_percent FROM tsg_galaxy_v3.session_record AS session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) GROUP BY "Receive Time" LIMIT 100
--Q25.Estimated TCP Sequence Gap Loss
-SELECT toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(common_recv_time),INTERVAL 5 MINUTE))) AS "Receive Time", sum(common_c2s_byte_num + common_s2c_byte_num) AS "bytes", sum(common_c2s_tcp_lostlen + common_s2c_tcp_lostlen) AS "gap_loss_bytes", round(gap_loss_bytes / bytes, 2) AS gap_loss_percent FROM tsg_galaxy_v3.session_record AS session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) AND ( common_l4_protocol IN ( 'IPv4_TCP', 'IPv6_TCP' ) ) GROUP BY "Receive Time" LIMIT 10000
+SELECT toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(common_recv_time),INTERVAL 5 MINUTE))) AS "Receive Time", sum(common_c2s_byte_num + common_s2c_byte_num) AS "bytes", sum(common_c2s_tcp_lostlen + common_s2c_tcp_lostlen) AS "gap_loss_bytes", round(gap_loss_bytes / bytes, 2) AS gap_loss_percent FROM tsg_galaxy_v3.session_record AS session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) AND ( common_l4_protocol IN ( 'IPv4_TCP', 'IPv6_TCP' ) ) GROUP BY "Receive Time" LIMIT 100
--Q26.Top30 Server IP by Bytes
SELECT "server_ip" AS "server_ip" , SUM(coalesce("bytes",0)) AS "bytes" , SUM(coalesce("bytes_sent",0)) AS "Sent" , SUM(coalesce("bytes_received",0)) AS "Received" , SUM(coalesce("sessions",0)) AS "sessions" FROM ( SELECT SUM(coalesce(common_c2s_byte_num,0)) AS "bytes_sent" , SUM(coalesce(common_s2c_byte_num,0)) AS "bytes_received" , SUM(common_c2s_byte_num+common_s2c_byte_num) AS "bytes" , SUM(coalesce(common_sessions,0)) AS "sessions" , common_server_ip AS "server_ip" FROM tsg_galaxy_v3.session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) AND ( notEmpty( common_server_ip) ) GROUP BY "server_ip" ORDER BY "bytes" desc ) GROUP BY "server_ip" ORDER BY "bytes" desc LIMIT 30
--Q27.Top30 Client IP by Sessions
@@ -73,15 +73,15 @@ SELECT "Http URL" AS "Http URL", sum(coalesce("Sessions", 0)) AS "Sessions" FROM
--Q37.Top30 Destination Transmission APP by Bandwidth
SELECT "server_ip" AS "server_ip", groupUniqArray(coalesce("trans_app", 0)) AS "trans_app", sum(coalesce("bytes", 0)) AS "bytes", sum(coalesce("bytes_sent", 0)) AS "Sent", sum(coalesce("bytes_received", 0)) AS "Received" FROM (SELECT sum(coalesce(common_c2s_byte_num, 0)) AS "bytes_sent", sum(coalesce(common_s2c_byte_num, 0)) AS "bytes_received", sum(common_c2s_byte_num + common_s2c_byte_num) AS "bytes", groupUniqArray(concat(common_l4_protocol, '/', toString(common_server_port))) AS "trans_app", common_server_ip AS "server_ip" FROM tsg_galaxy_v3.session_record AS session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) AND ( notEmpty(common_server_ip) ) GROUP BY "server_ip" ORDER BY "bytes" DESC LIMIT 1048576) GROUP BY "server_ip" ORDER BY "bytes" DESC LIMIT 30
--Q38.Browsing Users by Website domains and Sessions
-SELECT "Subscriber ID" AS "Subscriber ID", "Http.Domain" AS "Http.Domain", sum(coalesce("sessions", 0)) AS "sessions" FROM (SELECT http_domain AS "Http.Domain", common_subscriber_id AS "Subscriber ID", sum(coalesce(common_sessions, 0)) AS "sessions" FROM tsg_galaxy_v3.session_record AS session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) AND ( notEmpty(http_domain) AND notEmpty(common_subscriber_id) ) GROUP BY "Http.Domain", "Subscriber ID" ORDER BY "sessions" DESC LIMIT 1048576) GROUP BY "Subscriber ID", "Http.Domain" ORDER BY "sessions" DESC LIMIT 10000
+SELECT "Subscriber ID" AS "Subscriber ID", "Http.Domain" AS "Http.Domain", sum(coalesce("sessions", 0)) AS "sessions" FROM (SELECT http_domain AS "Http.Domain", common_subscriber_id AS "Subscriber ID", sum(coalesce(common_sessions, 0)) AS "sessions" FROM tsg_galaxy_v3.session_record AS session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) AND ( notEmpty(http_domain) AND notEmpty(common_subscriber_id) ) GROUP BY "Http.Domain", "Subscriber ID" ORDER BY "sessions" DESC LIMIT 1048576) GROUP BY "Subscriber ID", "Http.Domain" ORDER BY "sessions" DESC LIMIT 100
--Q39.Top Domain and Server IP by Bytes Sent
-SELECT "Http.Domain" AS "Http.Domain" , "Server IP" AS "Server IP" , SUM(coalesce("Bytes Sent",0)) AS "Bytes Sent" FROM ( SELECT common_server_ip AS "Server IP" , http_domain AS "Http.Domain" , SUM(coalesce(common_c2s_byte_num+common_s2c_byte_num,0)) AS "Bytes" , SUM(coalesce(common_c2s_byte_num,0)) AS "Bytes Sent" , SUM(coalesce(common_s2c_byte_num,0)) AS "Bytes Received" FROM tsg_galaxy_v3.session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) AND ( notEmpty( http_domain) ) GROUP BY "Server IP" , "Http.Domain" ORDER BY "Bytes" desc LIMIT 1048576 ) GROUP BY "Http.Domain" , "Server IP" ORDER BY "Bytes Sent" desc LIMIT 10000
+SELECT "Http.Domain" AS "Http.Domain" , "Server IP" AS "Server IP" , SUM(coalesce("Bytes Sent",0)) AS "Bytes Sent" FROM ( SELECT common_server_ip AS "Server IP" , http_domain AS "Http.Domain" , SUM(coalesce(common_c2s_byte_num+common_s2c_byte_num,0)) AS "Bytes" , SUM(coalesce(common_c2s_byte_num,0)) AS "Bytes Sent" , SUM(coalesce(common_s2c_byte_num,0)) AS "Bytes Received" FROM tsg_galaxy_v3.session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) AND ( notEmpty( http_domain) ) GROUP BY "Server IP" , "Http.Domain" ORDER BY "Bytes" desc LIMIT 1048576 ) GROUP BY "Http.Domain" , "Server IP" ORDER BY "Bytes Sent" desc LIMIT 100
--Q40.Top30 Website Domains by Client IP and Sessions
-SELECT "Http.Domain" AS "Http.Domain", "Client IP" AS "Client IP", sum(coalesce("sessions", 0)) AS "sessions" FROM (SELECT common_client_ip AS "Client IP", http_domain AS "Http.Domain", sum(coalesce(common_sessions, 0)) AS "sessions" FROM tsg_galaxy_v3.session_record AS session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) AND ( notEmpty(http_domain) ) GROUP BY "Client IP", "Http.Domain" ORDER BY "sessions" DESC LIMIT 1048576) GROUP BY "Http.Domain", "Client IP" ORDER BY "sessions" DESC LIMIT 10000
+SELECT "Http.Domain" AS "Http.Domain", "Client IP" AS "Client IP", sum(coalesce("sessions", 0)) AS "sessions" FROM (SELECT common_client_ip AS "Client IP", http_domain AS "Http.Domain", sum(coalesce(common_sessions, 0)) AS "sessions" FROM tsg_galaxy_v3.session_record AS session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) AND ( notEmpty(http_domain) ) GROUP BY "Client IP", "Http.Domain" ORDER BY "sessions" DESC LIMIT 1048576) GROUP BY "Http.Domain", "Client IP" ORDER BY "sessions" DESC LIMIT 100
--Q41.Domain is Accessed by Unique Client IP Trend(bytes Time Grain 5 minute)
-SELECT toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))),300)*300) AS _time , http_domain AS Domain, COUNT(DISTINCT(common_client_ip)) AS nums FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) AND notEmpty(http_domain) AND http_domain IN ( SELECT http_domain FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) AND notEmpty(http_domain) GROUP BY http_domain ORDER BY SUM(common_s2c_byte_num+common_c2s_byte_num) DESC LIMIT 5 ) GROUP BY toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))),300)*300) , http_domain ORDER BY toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))),300)*300) DESC LIMIT 10000
+SELECT toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))),300)*300) AS _time , http_domain AS Domain, COUNT(DISTINCT(common_client_ip)) AS nums FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) AND notEmpty(http_domain) AND http_domain IN ( SELECT http_domain FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) AND notEmpty(http_domain) GROUP BY http_domain ORDER BY SUM(common_s2c_byte_num+common_c2s_byte_num) DESC LIMIT 5 ) GROUP BY toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))),300)*300) , http_domain ORDER BY toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))),300)*300) DESC LIMIT 100
--Q42. Domain is Accessed by Unique Client IP Trend(sessions,Time Grain 5 minute)
-SELECT toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))),3600)*3600) AS stat_time , http_domain , uniq (common_client_ip) AS nums FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start)-604800 AND common_recv_time < toDateTime(@end) AND http_domain IN ( SELECT http_domain FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) AND notEmpty(http_domain) GROUP BY http_domain ORDER BY COUNT(*) desc LIMIT 5 ) group by toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))), 3600)*3600), http_domain ORDER BY stat_time desc LIMIT 10000
+SELECT toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))),3600)*3600) AS stat_time , http_domain , uniq (common_client_ip) AS nums FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start)-604800 AND common_recv_time < toDateTime(@end) AND http_domain IN ( SELECT http_domain FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) AND notEmpty(http_domain) GROUP BY http_domain ORDER BY COUNT(*) desc LIMIT 5 ) group by toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))), 3600)*3600), http_domain ORDER BY stat_time desc LIMIT 100
--Q43.Bandwidth Trend with Device ID(Time Grain 5 minute)
SELECT toUnixTimestamp(toDateTime(toStartOfInterval(toDateTime(common_recv_time),INTERVAL 5 MINUTE))) AS "Receive Time", common_device_id AS "Device ID", sum(coalesce(common_c2s_byte_num + common_s2c_byte_num, 0)) AS "Bytes" FROM tsg_galaxy_v3.session_record AS session_record WHERE ( ( common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) ) ) GROUP BY "Receive Time", "Device ID" LIMIT 10000
--Q44.Internal IP by Sled IP and Sessions
@@ -115,4 +115,4 @@ select common_client_ip as "Client IP" , count(1) as Sessions,sum(common_c2s_byt
--Q58.L7 Protocol SIP Drill down Server IP
select common_server_ip as "Server IP" , count(1) as Sessions,sum(common_c2s_byte_num) as "Bytes Out", sum(common_s2c_byte_num) as "Bytes In",any(common_server_location) as Location FROM tsg_galaxy_v3.session_record where common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) and common_l7_protocol='SIP' group by "Server IP" order by Sessions desc limit 100
--Q59.Top5 Server IP keys with Unique Client IPs Trend (Grain 5 minute)
-SELECT toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))),300)*300) AS _time , common_server_ip AS server_ip, COUNT(DISTINCT(common_client_ip)) AS nums FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) AND common_server_ip IN ( SELECT common_server_ip FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) GROUP BY common_server_ip ORDER BY count(*) DESC LIMIT 5 ) GROUP BY toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))),300)*300) , server_ip ORDER BY toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))),300)*300) DESC LIMIT 10000
+SELECT toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))),300)*300) AS _time , common_server_ip AS server_ip, COUNT(DISTINCT(common_client_ip)) AS nums FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) AND common_server_ip IN ( SELECT common_server_ip FROM tsg_galaxy_v3.session_record AS session_record WHERE common_recv_time >= toDateTime(@start) AND common_recv_time < toDateTime(@end) GROUP BY common_server_ip ORDER BY count(*) DESC LIMIT 5 ) GROUP BY toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))),300)*300) , server_ip ORDER BY toDateTime(intDiv(toUInt32(toDateTime(toDateTime(common_recv_time))),300)*300) DESC LIMIT 100
diff --git a/config/nacos/config/fixed-127.0.0.1_8848-dev_nacos/data/config-data-tenant/dev/Galaxy/galaxy-qgw-service.yml b/config/nacos/config/fixed-127.0.0.1_8848-dev_nacos/data/config-data-tenant/dev/Galaxy/galaxy-qgw-service.yml
index bce0422d..d5468d34 100644
--- a/config/nacos/config/fixed-127.0.0.1_8848-dev_nacos/data/config-data-tenant/dev/Galaxy/galaxy-qgw-service.yml
+++ b/config/nacos/config/fixed-127.0.0.1_8848-dev_nacos/data/config-data-tenant/dev/Galaxy/galaxy-qgw-service.yml
@@ -63,7 +63,7 @@ clickhouse:
realTimeAccount:
username: ENC(Ao/e/87KQ8vOabP81PV/i4f/wvgztdKW)
pin: ENC(KtRdLd84bYGGgk/Uv3+lUQFSxBfQ9ctJ)
- socketTimeOut: 60000
+ socketTimeOut: 120000
longTermAccount:
username: ENC(lgnlVXRIN8qNh+B0scCBKp9QMID22Nc7)
pin: ENC(AkmSSo5VBw6TOz+AXhorb+tmuLn3WANm)
diff --git a/docs/release/release-354.md b/docs/release/release-354.md
index b5113608..f9966f13 100644
--- a/docs/release/release-354.md
+++ b/docs/release/release-354.md
@@ -1 +1,4 @@
- Release 354 (TSG-22.04) \ No newline at end of file
+ Release 354 (TSG-22.04)
+* 修改原Syntax-validation接口,为其提供尽可能随机的样例数据
+* 增加故障诊断SQL测试集parse选项,用于SQL语法解析校验
+* 更改故障诊断SQL测试集默认选项,由validation改为parse \ No newline at end of file
diff --git a/src/main/java/com/mesalab/common/base/BaseResult.java b/src/main/java/com/mesalab/common/base/BaseResult.java
index 99cb6a38..0a20289f 100644
--- a/src/main/java/com/mesalab/common/base/BaseResult.java
+++ b/src/main/java/com/mesalab/common/base/BaseResult.java
@@ -53,7 +53,6 @@ public class BaseResult<T> implements Serializable {
this.meta = meta;
this.data = data;
}
-
/**
* 判断是否是成功结果
* JsonIgnore使之不在json序列化结果当中
diff --git a/src/main/java/com/mesalab/common/enums/DiagnosisOptionEnum.java b/src/main/java/com/mesalab/common/enums/DiagnosisOptionEnum.java
index eb5c35c9..2bf22760 100644
--- a/src/main/java/com/mesalab/common/enums/DiagnosisOptionEnum.java
+++ b/src/main/java/com/mesalab/common/enums/DiagnosisOptionEnum.java
@@ -11,9 +11,12 @@ import lombok.Getter;
*/
@Getter
public enum DiagnosisOptionEnum {
-
/**
- * 验证SQL测试集,对数据源进行功能诊断
+ * 解析SQL测试集,用于SQL语法解析
+ */
+ PARSE("parse"),
+ /**
+ * 验证SQL测试集,用户各个数据库引擎SQL校验
*/
VALIDATION("validation"),
/**
diff --git a/src/main/java/com/mesalab/common/utils/sqlparser/SQLSyntaxParserUtil.java b/src/main/java/com/mesalab/common/utils/sqlparser/SQLSyntaxParserUtil.java
index 5a181f66..c008a89a 100644
--- a/src/main/java/com/mesalab/common/utils/sqlparser/SQLSyntaxParserUtil.java
+++ b/src/main/java/com/mesalab/common/utils/sqlparser/SQLSyntaxParserUtil.java
@@ -48,7 +48,7 @@ public class SQLSyntaxParserUtil {
List resultList = Lists.newArrayList();
try {
parseSQL(sql, resultList);
- addTypeForSelectItems(sql, resultList);
+ //addTypeForSelectItems(sql, resultList);
} catch (RuntimeException | JSQLParserException e) {
log.error("syntax-parse: sql-" + sql + ", error: ", e);
throw new BusinessException(ResultStatusEnum.SERVER_ERROR.getCode(), ResultCodeEnum.ENGINE_PARSE_ERROR.getCode(), "SQL syntax-parse Error:", e);
diff --git a/src/main/java/com/mesalab/qgw/controller/DiagnosisController.java b/src/main/java/com/mesalab/qgw/controller/DiagnosisController.java
index 8dacd3ca..e4e0417d 100644
--- a/src/main/java/com/mesalab/qgw/controller/DiagnosisController.java
+++ b/src/main/java/com/mesalab/qgw/controller/DiagnosisController.java
@@ -37,7 +37,7 @@ public class DiagnosisController {
}
@RequestMapping(value = "/runSql")
- public BaseResult executePocSql(String isSaved, @RequestParam(defaultValue = "validation") String option) throws BusinessException {
+ public BaseResult executePocSql(String isSaved, @RequestParam(defaultValue = "parse") String option) throws BusinessException {
if (Arrays.stream(DiagnosisOptionEnum.values()).noneMatch(o -> o.getValue().equalsIgnoreCase(option))) {
throw new BusinessException(HttpStatus.HTTP_BAD_REQUEST, ResultCodeEnum.PARAM_SYNTAX_ERROR.getCode(),
"Param Syntax Error: The option must be either validation or execution");
diff --git a/src/main/java/com/mesalab/qgw/dialect/ClickHouseDialect.java b/src/main/java/com/mesalab/qgw/dialect/ClickHouseDialect.java
index b6be63a4..43e2d09d 100644
--- a/src/main/java/com/mesalab/qgw/dialect/ClickHouseDialect.java
+++ b/src/main/java/com/mesalab/qgw/dialect/ClickHouseDialect.java
@@ -139,7 +139,7 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
public String getSyntaxCheckQuery() {
String sql = param.getDbQuerySource().getSqlBody();
try {
- sql = String.valueOf(parserAndUpdateSql(sql));
+ sql = String.valueOf(convertSampleSql(sql));
} catch (JSQLParserException e) {
log.error("syntax-check sql error {}, execute original sql: {}, error is: {} ",
sql, sql = param.getDbQuerySource().getSqlBody(), e.getMessage() == null ? e.getCause() : e.getMessage());
@@ -1041,6 +1041,7 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
/**
+ * 通过递归方式将原SQL转换为样例SQL
* 做数据集限制:
* 1. 将最内层sql的表名, 替换成(select * from tableName limit XXX) as alias
* 2. 对where添加:OR 1=1
@@ -1050,7 +1051,7 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
* @return
* @throws JSQLParserException
*/
- private SelectBody parserAndUpdateSql(String sql) throws JSQLParserException {
+ private SelectBody convertSampleSql(String sql) throws JSQLParserException {
Statement parse = CCJSqlParserUtil.parse(sql);
if (parse instanceof Select) {
Select select = (Select) parse;
@@ -1060,21 +1061,21 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
FromItem fromItem = plainSelect.getFromItem();
if (fromItem instanceof SubSelect) {
SubSelect subSelect = (SubSelect) plainSelect.getFromItem();
- subSelect.setSelectBody(parserAndUpdateSql(subSelect.getSelectBody().toString()));
+ subSelect.setSelectBody(convertSampleSql(subSelect.getSelectBody().toString()));
} else if (fromItem instanceof Table) {
Table table = (Table) fromItem;
- plainSelect.setFromItem(generateDataSetSql(table.getName(), table.getAlias() != null ? table.getAlias().getName() : table.getName()));
+ plainSelect.setFromItem(generateSampleDatasetSql(table.getName(), table.getAlias() != null ? table.getAlias().getName() : table.getName()));
}
if (StringUtil.isNotEmpty(plainSelect.getWhere())) {
- plainSelect.setWhere(generateDateSetWhere(plainSelect.getWhere().toString()));
+ plainSelect.setWhere(generateDatasetWhereSql(plainSelect.getWhere().toString()));
}
- parseWhereAndUpdate(plainSelect.getWhere());
+ convertWhereForSampleSql(plainSelect.getWhere());
return plainSelect;
} else if (selectBody instanceof SetOperationList) {
SetOperationList setOperationList = (SetOperationList) selectBody;
List<SelectBody> selects = setOperationList.getSelects();
for (int i = 0; i < selects.size(); i++) {
- selects.set(i, parserAndUpdateSql(selects.get(i).toString()));
+ selects.set(i, convertSampleSql(selects.get(i).toString()));
}
return setOperationList;
} else {
@@ -1090,48 +1091,48 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
* @param whereExpression
* @throws JSQLParserException
*/
- private void parseWhereAndUpdate(Expression whereExpression) throws JSQLParserException {
+ private void convertWhereForSampleSql(Expression whereExpression) throws JSQLParserException {
if (whereExpression instanceof ComparisonOperator) {
ComparisonOperator whereOperator = (ComparisonOperator) whereExpression;
Expression left = whereOperator.getLeftExpression();
- setSelectBody(left);
+ setSampleSelectBody(left);
Expression right = whereOperator.getRightExpression();
- setSelectBody(right);
+ setSampleSelectBody(right);
} else if (whereExpression instanceof InExpression) {
InExpression inExpression = (InExpression) whereExpression;
Expression leftExpression = inExpression.getLeftExpression();
- setSelectBody(leftExpression);
+ setSampleSelectBody(leftExpression);
ItemsList rightItemsList = inExpression.getRightItemsList();
if (rightItemsList instanceof SubSelect) {
SubSelect subSelect = (SubSelect) rightItemsList;
- setSelectBody(subSelect);
+ setSampleSelectBody(subSelect);
}
} else if (whereExpression instanceof Between) {
Between whereBetween = (Between) whereExpression;
Expression betweenExpressionStart = whereBetween.getBetweenExpressionStart();
- setSelectBody(betweenExpressionStart);
+ setSampleSelectBody(betweenExpressionStart);
Expression betweenExpressionEnd = whereBetween.getBetweenExpressionEnd();
- setSelectBody(betweenExpressionEnd);
+ setSampleSelectBody(betweenExpressionEnd);
} else if (whereExpression instanceof AndExpression) {
AndExpression whereAnd = (AndExpression) whereExpression;
- parseWhereAndUpdate(whereAnd.getLeftExpression());
- parseWhereAndUpdate(whereAnd.getRightExpression());
+ convertWhereForSampleSql(whereAnd.getLeftExpression());
+ convertWhereForSampleSql(whereAnd.getRightExpression());
} else if (whereExpression instanceof OrExpression) {
OrExpression whereOr = (OrExpression) whereExpression;
- parseWhereAndUpdate(whereOr.getLeftExpression());
- parseWhereAndUpdate(whereOr.getRightExpression());
+ convertWhereForSampleSql(whereOr.getLeftExpression());
+ convertWhereForSampleSql(whereOr.getRightExpression());
} else if (whereExpression instanceof Parenthesis) {
Parenthesis whereExpressionParenthesis = (Parenthesis) whereExpression;
Expression expression = whereExpressionParenthesis.getExpression();
- parseWhereAndUpdate(expression);
+ convertWhereForSampleSql(expression);
}
}
- private void setSelectBody(Expression expression) throws JSQLParserException {
+ private void setSampleSelectBody(Expression expression) throws JSQLParserException {
if (expression instanceof SubSelect) {
SubSelect subSelect = (SubSelect) expression;
SelectBody selectBody = subSelect.getSelectBody();
- subSelect.setSelectBody(parserAndUpdateSql(selectBody.toString()));
+ subSelect.setSelectBody(convertSampleSql(selectBody.toString()));
}
}
@@ -1143,18 +1144,17 @@ public class ClickHouseDialect extends AbstractDataSourceDialect {
* @return
* @throws JSQLParserException
*/
- private FromItem generateDataSetSql(String tableName, String alias) throws JSQLParserException {
- String sql = String.format("SELECT 1 FROM (SELECT * FROM %s LIMIT %s ) as %s ",
- tableName, 100, alias);
+ private FromItem generateSampleDatasetSql(String tableName, String alias) throws JSQLParserException {
+ String sql = StrUtil.format("SELECT 1 FROM (SELECT * FROM {} where rand() % 100 = 0 LIMIT {} ) as {} ",
+ tableName, 1000, alias);
Statement parse = CCJSqlParserUtil.parse(sql);
Select select = (Select) parse;
PlainSelect plainSelect = (PlainSelect) select.getSelectBody();
return plainSelect.getFromItem();
}
- private Expression generateDateSetWhere(String where) throws JSQLParserException {
- String sql = String.format("SELECT * FROM tableName WHERE %s %s ",
- where, "OR 1=1");
+ private Expression generateDatasetWhereSql(String where) throws JSQLParserException {
+ String sql = String.format("SELECT * FROM tableName WHERE %s %s ", where, "OR 1=1");
Statement parse = CCJSqlParserUtil.parse(sql);
Select select = (Select) parse;
PlainSelect selectBody = (PlainSelect) select.getSelectBody();
diff --git a/src/main/java/com/mesalab/qgw/model/api/udf/TIME_FLOOR_WITH_FILL.java b/src/main/java/com/mesalab/qgw/model/api/udf/TIME_FLOOR_WITH_FILL.java
index 1b41a70a..2743793f 100644
--- a/src/main/java/com/mesalab/qgw/model/api/udf/TIME_FLOOR_WITH_FILL.java
+++ b/src/main/java/com/mesalab/qgw/model/api/udf/TIME_FLOOR_WITH_FILL.java
@@ -129,7 +129,7 @@ public class TIME_FLOOR_WITH_FILL implements UDF{
dateRangeList = Lists.newArrayList(Iterables.concat(leftDateRange, rightDateRange));
} else {
- log.warn("SQL中时间条件不完整[startTime:{} - endTime:{}],补全功能无效!", whereTimeRange.getStart(), whereTimeRange.getEnd());
+ log.warn("The time range is empty [startTime:{},endTime:{}], so the time series data cannot be filled.", whereTimeRange.getStart(), whereTimeRange.getEnd());
return results;
}
diff --git a/src/main/java/com/mesalab/qgw/service/impl/DiagnosisServiceImpl.java b/src/main/java/com/mesalab/qgw/service/impl/DiagnosisServiceImpl.java
index d4bf101c..0fb65683 100644
--- a/src/main/java/com/mesalab/qgw/service/impl/DiagnosisServiceImpl.java
+++ b/src/main/java/com/mesalab/qgw/service/impl/DiagnosisServiceImpl.java
@@ -3,10 +3,7 @@ package com.mesalab.qgw.service.impl;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.io.file.FileReader;
import cn.hutool.core.io.file.FileWriter;
-import cn.hutool.core.net.NetUtil;
-import cn.hutool.core.util.CharsetUtil;
import cn.hutool.core.util.IdUtil;
-import cn.hutool.core.util.StrUtil;
import cn.hutool.core.util.URLUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
@@ -30,6 +27,7 @@ import com.mesalab.qgw.model.api.ApiParam;
import com.mesalab.qgw.model.api.ClickHouseHttpQuery;
import com.mesalab.qgw.model.api.ClickHouseHttpSource;
import com.mesalab.qgw.model.api.DruidIoHttpSource;
+import com.mesalab.qgw.service.ApiService;
import com.mesalab.qgw.service.DiagnosisService;
import com.mesalab.qgw.service.MetadataService;
import com.zdjizhi.utils.CommonUtil;
@@ -49,11 +47,8 @@ import org.apache.http.client.utils.URLEncodedUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
-
import java.io.File;
import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.util.*;
import java.util.concurrent.TimeUnit;
@@ -74,6 +69,10 @@ public class DiagnosisServiceImpl implements DiagnosisService {
private HttpClientService httpClientService;
@Autowired
private HttpConfig httpConfig;
+
+ @Autowired
+ private ApiService apiService;
+
@Autowired
private ClickhouseWriter clickhouseWriter;
@Autowired
@@ -88,12 +87,6 @@ public class DiagnosisServiceImpl implements DiagnosisService {
private DruidIoHttpSource druidIoHttpSource;
@Autowired
private MetadataService metadataService;
- @Autowired
- private Environment environment;
- private static final String localHostAddress = NetUtil.getLocalhostStr();
- public String getPort(){
- return environment.getProperty("local.server.port");
- }
/**
* 验证Schema格式是否正确
*
@@ -160,18 +153,30 @@ public class DiagnosisServiceImpl implements DiagnosisService {
Map<String, Map<String, String>> hbaseResult;
try {
Stopwatch watch = Stopwatch.createStarted();
-
Optional<File> druidTestFile = Optional.of(new File(druidWriter.buildPocSQL()));
- druidResult = executePocSqlByDatasource(druidTestFile, isSaved, option);
-
Optional<File> ckTestFile = Optional.of(new File(clickhouseWriter.buildPocSQL()));
- clickhouseResult = executePocSqlByDatasource(ckTestFile, isSaved, option);
-
Optional<File> hbaseTestFile = Optional.of(new File(hbaseWriter.buildPocSQL()));
- hbaseResult = executePocSqlByDatasource(hbaseTestFile, isSaved, option);
-
Optional<File> engineTestFile = Optional.of(new File(engineWriter.buildPocSQL()));
- engineResult = executePocSqlByDatasource(engineTestFile, isSaved, option);
+
+ if (DiagnosisOptionEnum.PARSE.getValue().equalsIgnoreCase(option)) {
+ druidResult = executeSyntaxCheckByDatasource(druidTestFile, QueryOptionEnum.SYNTAX_PARSE.getValue());
+ clickhouseResult = executeSyntaxCheckByDatasource(ckTestFile, QueryOptionEnum.SYNTAX_PARSE.getValue());
+ hbaseResult = executeSyntaxCheckByDatasource(hbaseTestFile, QueryOptionEnum.SYNTAX_PARSE.getValue());
+ engineResult = executeSyntaxCheckByDatasource(engineTestFile, QueryOptionEnum.SYNTAX_PARSE.getValue());
+ } else if (DiagnosisOptionEnum.VALIDATION.getValue().equalsIgnoreCase(option)) {
+ druidResult = executeSyntaxCheckByDatasource(druidTestFile, QueryOptionEnum.SYNTAX_VALIDATION.getValue());
+ clickhouseResult = executeSyntaxCheckByDatasource(ckTestFile, QueryOptionEnum.SYNTAX_VALIDATION.getValue());
+ hbaseResult = executeSyntaxCheckByDatasource(hbaseTestFile, QueryOptionEnum.SYNTAX_VALIDATION.getValue());
+ engineResult = executeSyntaxCheckByDatasource(engineTestFile, QueryOptionEnum.SYNTAX_VALIDATION.getValue());
+ } else if (DiagnosisOptionEnum.EXECUTION.getValue().equalsIgnoreCase(option)) {
+ druidResult = executePocSqlByDatasource(druidTestFile, isSaved, QueryOptionEnum.REAL_TIME.getValue());
+ clickhouseResult = executePocSqlByDatasource(ckTestFile, isSaved, QueryOptionEnum.REAL_TIME.getValue());
+ hbaseResult = executePocSqlByDatasource(hbaseTestFile, isSaved, QueryOptionEnum.REAL_TIME.getValue());
+ engineResult = executePocSqlByDatasource(engineTestFile, isSaved, QueryOptionEnum.REAL_TIME.getValue());
+ } else {
+ throw new BusinessException(HttpStatus.SC_BAD_REQUEST,
+ ResultCodeEnum.PARAM_SYNTAX_ERROR.getCode(), "The option is not supported.");
+ }
mergeResult = Stream
.concat(clickhouseResult.entrySet().stream(), druidResult.entrySet().stream())
@@ -212,8 +217,6 @@ public class DiagnosisServiceImpl implements DiagnosisService {
try {
Stopwatch watch = Stopwatch.createStarted();
-
-
if (testFile.isPresent()) {
if (!testFile.get().isFile()) {
log.warn("UnKnow File " + testFile.get());
@@ -232,25 +235,51 @@ public class DiagnosisServiceImpl implements DiagnosisService {
return result;
}
+ /**
+ * 对各个数据库引擎的测试集SQL进行语法解析或验证
+ * @param file
+ * @param queryOption
+ * @return
+ */
+ private Map<String, Map<String, String>> executeSyntaxCheckByDatasource(Optional<File> file, String queryOption) {
+ Map<String, Map<String, String>> resultMap = Maps.newLinkedHashMap();
+ Map<String, String> infoMap = Maps.newLinkedHashMap();
+ if (file.isPresent() && file.get().isFile()) {
+ List<String> rawLines = new FileReader(file.get()).readLines();
+ int index = 0;
+ for (String line : rawLines) {
+ index++;
+ if (Strings.isNullOrEmpty(line)) {
+ continue;
+ }
+ Stopwatch watch = Stopwatch.createStarted();
+ BaseResult queryResult = getBaseResultByEngine(ApiParam.builder().option(queryOption).query(line).build());
+ if (StringUtil.isEmpty(queryResult.getStatistics())) {
+ Map<String, Object> statistics = Maps.newLinkedHashMap();
+ statistics.put("elapsed", watch.elapsed(TimeUnit.MILLISECONDS));
+ queryResult.setStatistics(statistics);
+ }
+ processQueryResult(file, infoMap, null, index, queryResult);
+ }
+ resultMap.put(file.get().getName(), buildQueryTimeMetric(infoMap));
+ } else {
+ log.error("File is not exist or is a directory.");
+ }
+ return resultMap;
+
+ }
/**
* 执行POC SQL测试集,并返回执行结果
*
- * @param file sql 查询语句列表
+ * @param file sql 查询语句列表
* @param isSaved 是否保存查询结果
- * @param option SQL诊断方式
+ * @param queryOption SQL诊断方式
* @return
*/
- private Map<String, Map<String, String>> executePocSqlByDatasource(Optional<File> file, boolean isSaved, String option) {
+ private Map<String, Map<String, String>> executePocSqlByDatasource(Optional<File> file, boolean isSaved, String queryOption) {
Map<String, Map<String, String>> resultMap = Maps.newLinkedHashMap();
Map<String, String> infoMap = Maps.newLinkedHashMap();
- Map<String, Object> paramMap = Maps.newHashMap();
- if (DiagnosisOptionEnum.VALIDATION.getValue().equalsIgnoreCase(option)) {
- paramMap.put("option", QueryOptionEnum.SYNTAX_VALIDATION.getValue());
- } else if (DiagnosisOptionEnum.EXECUTION.getValue().equalsIgnoreCase(option)) {
- paramMap.put("option", QueryOptionEnum.REAL_TIME.getValue());
- }
- paramMap.put("format", QueryFormatEnum.CSV.getValue());
FileWriter writer = null;
int index = 0;
if (file.isPresent() && file.get().isFile()) {
@@ -259,6 +288,7 @@ public class DiagnosisServiceImpl implements DiagnosisService {
if (isSaved) {
writer = new FileWriter(file.get().getAbsolutePath() + "." + IdUtil.simpleUUID() + ".dat");
}
+
if (StringUtil.lowerCase(benchFileName).startsWith(DRUID_PREFIX)) {
for (String line : rawLines) {
index++;
@@ -266,7 +296,7 @@ public class DiagnosisServiceImpl implements DiagnosisService {
continue;
}
BaseResult queryResult = generateDruidBaseResult(line);
- resultProcess(file, infoMap, writer, index, queryResult);
+ processQueryResult(file, infoMap, writer, index, queryResult);
}
} else if (StringUtil.lowerCase(benchFileName).startsWith(CK_PREFIX)) {
for (String line : rawLines) {
@@ -275,7 +305,7 @@ public class DiagnosisServiceImpl implements DiagnosisService {
continue;
}
BaseResult queryResult = generateClickHouseBaseResult(line);
- resultProcess(file, infoMap, writer, index, queryResult);
+ processQueryResult(file, infoMap, writer, index, queryResult);
}
} else if (StringUtil.lowerCase(benchFileName).startsWith(HBASE_PREFIX)) {
for (String line : rawLines) {
@@ -284,7 +314,7 @@ public class DiagnosisServiceImpl implements DiagnosisService {
continue;
}
BaseResult queryResult = generateHbaseBaseResult(line);
- resultProcess(file, infoMap, writer, index, queryResult);
+ processQueryResult(file, infoMap, writer, index, queryResult);
}
} else if (StringUtil.lowerCase(benchFileName).startsWith(ENGINE_PREFIX)) {
for (String line : rawLines) {
@@ -292,41 +322,33 @@ public class DiagnosisServiceImpl implements DiagnosisService {
if (Strings.isNullOrEmpty(line)) {
continue;
}
- Map<String, String> queryResult = executeQuery(line, paramMap);
- int statusCode = Integer.parseInt(queryResult.get("status").trim());
- if (statusCode == HttpStatus.SC_OK) {
- if (writer != null) {
- writer.append("--Query" + index + "\t" + "Elapsed(ms):" + JsonPath.read(queryResult.get("result"), "$.statistics.elapsed") + "\n");
- writer.append(JsonPath.read(queryResult.get("result"), "$.meta") + "\n");
- writer.append(JsonPath.read(queryResult.get("result"), "$.data") + "\n");
- }
- infoMap.put("Query" + index, JsonPath.read(queryResult.get("result"), "$.statistics.elapsed") + "");
- } else if (statusCode == HttpStatus.SC_GATEWAY_TIMEOUT || statusCode == HttpStatus.SC_BAD_GATEWAY) {
- infoMap.put("Query" + index, "status:" + queryResult.get("status") + ",message:" + queryResult.get("message"));
-
- } else {
- throw new BusinessException(statusCode, ResultCodeEnum.SQL_EXECUTION_ERROR.getCode(), file.get().getName()
- + ",Query" + index + "," + queryResult.get("status") + "," + queryResult.get("message"), null);
- }
+ BaseResult queryResult = getBaseResultByEngine(
+ ApiParam.builder().option(queryOption).format(QueryFormatEnum.CSV.getValue()).query(line).build());
+ processQueryResult(file, infoMap, writer, index, queryResult);
}
}
- if (StringUtil.isNotEmpty(infoMap)) {
- double[] values = infoMap.values().stream().mapToDouble(value -> Double.valueOf(value)).sorted().toArray();
- infoMap.put("Min", String.valueOf(CommonUtil.round(StatUtils.min(values),2)));
- infoMap.put("Mean", String.valueOf(CommonUtil.round(StatUtils.mean(values),2)));
- infoMap.put("Median", String.valueOf(CommonUtil.round(StatUtils.percentile(values, 50),2)));
- infoMap.put("P95", String.valueOf(CommonUtil.round(StatUtils.percentile(values, 95),2)));
- infoMap.put("P99", String.valueOf(CommonUtil.round(StatUtils.percentile(values, 99),2)));
- infoMap.put("MAX", String.valueOf(CommonUtil.round(StatUtils.max(values),2)));
- }
- resultMap.put(file.get().getName(), infoMap);
+
+ resultMap.put(file.get().getName(), buildQueryTimeMetric(infoMap));
} else {
- log.error("File is not exist or is directory.");
+ log.error("File is not exist or is a directory.");
}
return resultMap;
}
- private void resultProcess(Optional<File> file, Map<String, String> infoMap, FileWriter writer, int index, BaseResult queryResult) {
+ private Map<String, String> buildQueryTimeMetric(Map<String, String> infoMap) {
+ if (StringUtil.isNotEmpty(infoMap)) {
+ double[] values = infoMap.values().stream().mapToDouble(value -> Double.valueOf(value)).sorted().toArray();
+ infoMap.put("Min", String.valueOf(CommonUtil.round(StatUtils.min(values),2)));
+ infoMap.put("Mean", String.valueOf(CommonUtil.round(StatUtils.mean(values),2)));
+ infoMap.put("Median", String.valueOf(CommonUtil.round(StatUtils.percentile(values, 50),2)));
+ infoMap.put("P95", String.valueOf(CommonUtil.round(StatUtils.percentile(values, 95),2)));
+ infoMap.put("P99", String.valueOf(CommonUtil.round(StatUtils.percentile(values, 99),2)));
+ infoMap.put("MAX", String.valueOf(CommonUtil.round(StatUtils.max(values),2)));
+ }
+ return infoMap;
+ }
+
+ private void processQueryResult(Optional<File> file, Map<String, String> infoMap, FileWriter writer, int index, BaseResult queryResult) {
int statusCode = Integer.parseInt(queryResult.getStatus().toString());
if (statusCode == HttpStatus.SC_OK) {
if (writer != null) {
@@ -429,6 +451,7 @@ public class DiagnosisServiceImpl implements DiagnosisService {
.append("password=").append(clickHouseHttpSource.getRealTimeAccountPin()).append("&")
.append("database=").append(clickHouseHttpSource.getDbName());
ClickHouseHttpQuery clickHouseHttpQuery = new ClickHouseHttpQuery();
+ clickHouseHttpQuery.setSocketTimeOut(httpConfig.getCkRealTimeAccountSocketTimeOut());
clickHouseHttpQuery.setQueryParameter(queryParamBuilder.toString());
log.info("DB Engine is :{}, Execute Http Query is: {}", DBTypeEnum.CLICKHOUSE.getValue(), com.mesalab.common.utils.JsonMapper.toJsonString(clickHouseHttpQuery));
List<NameValuePair> values = URLEncodedUtils.parse(clickHouseHttpQuery.getQueryParameter(), Charset.forName("UTF-8"));
@@ -497,15 +520,14 @@ public class DiagnosisServiceImpl implements DiagnosisService {
for (String tableName : tables) {
Schema schema = metadataService.getSchemaByName(tableName);
if (!dbName.equals(schema.getNamespace())) continue;
- Map<String, String> result = executeQuery("describe " + tableName, null);
- if (HttpStatus.SC_OK != Integer.parseInt(result.get("status"))
- || Integer.parseInt(JsonPath.read(result.get("result"), "$.status").toString()) != HttpStatus.SC_OK) {
- log.error("查询表结构失败:" + result.get("result"));
+ BaseResult baseResult = getBaseResultByEngine(ApiParam.builder().option(QueryOptionEnum.REAL_TIME.getValue()).query("describe " + tableName).build());
+ if (HttpStatus.SC_OK != baseResult.getStatus()) {
+ log.error("查询表结构失败:" + baseResult.getMessage());
throw new BusinessException(HttpStatus.SC_INTERNAL_SERVER_ERROR,
ResultCodeEnum.SQL_EXECUTION_ERROR.getCode(),
" The Table[" + tableName + "] Structure query failed", null);
}
- List<Map<String, Object>> dbFields = JsonPath.read(result.get("result"), "$.data.fields.*");
+ List<Map<String, Object>> dbFields = JsonPath.read(JsonMapper.toJsonString(baseResult.getData()), "$.fields.*");
Map<String, Object> dbFieldMap = Maps.newHashMap();
dbFields.stream().forEach(objectMap -> {
@@ -535,27 +557,13 @@ public class DiagnosisServiceImpl implements DiagnosisService {
}
}
-
-
resultMap.put(METADATA_PREFIX + dbName, infoMap);
-
return resultMap;
}
- private Map<String, String> executeQuery(String sql, Map<String, Object> urlParam) {
- Map<String, String> resultMap = Maps.newHashMap();
- try {
- String param = URLUtil.buildQuery(urlParam, CharsetUtil.CHARSET_UTF_8);
- sql = URLEncoder.encode(sql, "utf-8").replaceAll("\\+", "%20");
- String queryURL = URLUtil.normalize(localHostAddress + ":" + getPort() + "/?query=");
- int socketTimeOut = httpConfig.getServerResponseTimeOut();
- resultMap = httpClientService.httpGet(queryURL + sql + (StrUtil.isBlank(param) ? "" : "&" + param), socketTimeOut);
- } catch (UnsupportedEncodingException e) {
- log.error("Unsupported Encoding error: ", e);
- }
-
- return resultMap;
+ private BaseResult getBaseResultByEngine(ApiParam apiParam) {
+ return apiService.executeQuery(apiParam);
}
diff --git a/src/test/java/com/mesalab/qgw/service/EngineTest.java b/src/test/java/com/mesalab/qgw/service/EngineTest.java
index 6e5d65e8..ef480bbf 100644
--- a/src/test/java/com/mesalab/qgw/service/EngineTest.java
+++ b/src/test/java/com/mesalab/qgw/service/EngineTest.java
@@ -32,8 +32,8 @@ public class EngineTest extends GalaxyQGWApplicationTests {
@Test
public void testPocSql() {
- BaseResult baseResult = diagnosisService.runPocSQL(false, DiagnosisOptionEnum.VALIDATION.getValue());
- assertEquals("POC SQL validate failure.", String.valueOf(baseResult.getStatus()), String.valueOf(HttpStatus.SC_OK));
+ BaseResult baseResult = diagnosisService.runPocSQL(false, DiagnosisOptionEnum.PARSE.getValue());
+ assertEquals("POC sql parse failure.", String.valueOf(baseResult.getStatus()), String.valueOf(HttpStatus.SC_OK));
}
}