summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorhandingkang <[email protected]>2024-01-17 21:36:05 +0800
committerhandingkang <[email protected]>2024-01-17 21:36:05 +0800
commitf7e4c3a2357851a8f6d835bf66c8505cb80bd20a (patch)
tree42d139e1afe05b36c7d5b8fe57b6126fffef75d5
parente0eabcd767df2c408527c382dad3d00fc17a9c4e (diff)
完成数据统计接口开发
-rw-r--r--app.py2
-rw-r--r--apps/stats.py52
-rw-r--r--script/neo4jcommand.py57
-rw-r--r--script/sched.py (renamed from script/schedule.py)71
-rw-r--r--settings.py1
5 files changed, 144 insertions, 39 deletions
diff --git a/app.py b/app.py
index 6f794c3..34e852c 100644
--- a/app.py
+++ b/app.py
@@ -2,7 +2,7 @@ from apiflask import APIFlask
from apps.apiv1 import Response
from apps.apiv1 import bp as apiv1_bp
-from script.schedule import run
+from script.sched import run
from util import log
# 注册蓝图
diff --git a/apps/stats.py b/apps/stats.py
index 6ebe3b9..b056806 100644
--- a/apps/stats.py
+++ b/apps/stats.py
@@ -1,17 +1,47 @@
# 展示统计信息
-from apiflask import APIBlueprint
-
-bp = APIBlueprint("stats", __name__, url_prefix="/stats")
-
-# 返回top10 运营商
-
-
-# 返回高风险节点信息
+import json
+from apiflask import APIBlueprint
-# 返回ip关联
+from .sysinfo import da
-# 返回AS关联
+bp = APIBlueprint("stats", __name__, url_prefix="/stats")
-# 返回ISP间关联
+# 返回国家分布
+def stats_cou():
+ dist = da.getcommondata("coudist")[0][0]
+ data = json.loads(dist)
+ # 处理为符合echart dataset的格式
+ dimensions = ["国家", "IPv6 DNS数量"]
+ source = []
+ for v in data.values():
+ source.append({dimensions[0]: v['cou'], dimensions[1]: v['c']})
+ return {"code": 0, "data": {"dimensions": dimensions, "source": source}, "msg": "success"}
+
+
+# 返回AS分布
+def stats_asn():
+ dist = da.getcommondata("asndist")[0][0]
+ data = json.loads(dist)
+ # 处理为符合echart dataset的格式
+ dimensions = ["自治域", "IPv6 DNS数量"]
+ source = []
+ for v in data.values():
+ source.append({dimensions[0]: "ASN" + str(v['asn']), dimensions[1]: v['c']})
+ return {"code": 0, "data": {"dimensions": dimensions, "source": source}, "msg": "success"}
+
+
+# 返回ISP分布
+def stats_isp():
+ dist = da.getcommondata("ispdist")[0][0]
+ data = json.loads(dist)
+ # 处理为符合echart dataset的格式
+ dimensions = ["服务商", "IPv6 DNS数量"]
+ source = []
+ for v in data.values():
+ source.append({dimensions[0]: v['isp'], dimensions[1]: v['c']})
+ return {"code": 0, "data": {"dimensions": dimensions, "source": source}, "msg": "success"}
diff --git a/script/neo4jcommand.py b/script/neo4jcommand.py
index 0aa8cdd..b12d509 100644
--- a/script/neo4jcommand.py
+++ b/script/neo4jcommand.py
@@ -36,42 +36,49 @@ WHERE n.IPType contains "6"
RETURN n.IP as ip, n.ISP as isp,n.CCODE as ccode,n.COU as cou,n.PROV as prov, score
ORDER BY score DESC, ip ASC,isp ASC,ccode ASC,cou ASC,prov ASC limit 30'''
-
# 节点邻居
-node_neighbors=''''''
-
-
+node_neighbors = '''MATCH(n:NodeResolver53{IP:'%s'})-[]->(m:NodeResolver53) RETURN m'''
-# IP关联数据
-IP_relate=''''''
+# 节点全部信息
+# IP关联数据,以地理位置代表
+IP_relate = '''MATCH (n:NodeResolver53)-[]->(m:NodeResolver53)
+with n.LAT as flat,n.LNG as flng,m.LAT as tlat,m.LNG as tlng ,count(*) as c
+RETURN flat,flng,tlat,tlng,c
+order by c DESC limit 10000
+'''
# AS关联数据
-AS_relate=''''''
-
+AS_relate = '''
+MATCH(n:NodeResolver53)-[l:IP_LINK]->(d:NodeResolver53)
+WITH n.AS AS fasn, d.AS AS tasn, count(*) AS c
+RETURN fasn,tasn,c
+order by c DESC limit 10000
+'''
# ISP关联数据
-ISP_relate=''''''
-
-
-# 所有节点信息
-all_node=''''''
-
+ISP_relate = '''MATCH(n:NodeResolver53)-[l:IP_LINK]->(d:NodeResolver53)
+WITH n.ISP AS fisp, d.ISP AS tisp, count(*) AS c
+RETURN fisp,tisp,c
+order by c DESC limit 10000'''
# AS分布
-AS_dist=''''''
-
+AS_dist = '''MATCH(n:NodeResolver53{IPType:"v6"})
+ with n.AS as asn,count(*) as c
+ RETURN asn,c
+ order by c DESC limit 10000
+ '''
# ISP分布统计
-ISP_dist=''''''
+ISP_dist = '''MATCH(n:NodeResolver53{IPType:"v6"})
+with n.ISP as isp,count(*) as c
+RETURN isp,c
+order by c DESC limit 10000
+'''
# 国家分布统计
-cou_dist=''''''
-
-
-
-
-
+cou_dist = '''MATCH(n:NodeResolver53{IPType:"v6"}) with n.COU as cou,count(*) as c RETURN cou,c'''
+ccode_dist = '''MATCH(n:NodeResolver53{IPType:"v6"}) with n.CCODE as ccode,count(*) as c RETURN ccode,c'''
# 双栈服务数量统计
dualcountcypher = '''
@@ -114,7 +121,7 @@ v6count = '''
match (n:NodeResolver53) where n.IPType contains "6" return count(n)'''
delQuery = '''
-CALL apoc.periodic.commit("MATCH (n:NodeResolverQuery) WITH n LIMIT $limit DETACH DELETE n RETURN count(*)",{limit: 10000})
+CALL apoc.periodic.commit("MATCH (n:NodeResolverQuery)
+WITH n LIMIT $limit DETACH DELETE n RETURN count(*)",{limit: 10000})
YIELD updates, executions, runtime, batches
RETURN updates, executions, runtime, batches;'''
-
diff --git a/script/schedule.py b/script/sched.py
index 54b67c5..44b3622 100644
--- a/script/schedule.py
+++ b/script/sched.py
@@ -97,9 +97,9 @@ def refresh_neo4j():
log.error(e)
# v6dns计数
- result = len(NodeResolver53.nodes.filter(IPType="v6"))
+ v6nodecountresult = len(NodeResolver53.nodes.filter(IPType="v6"))
sql = "INSERT INTO %s(date,data) VALUES (str_to_date('%s','%%Y-%%m-%%d'),%s) ON DUPLICATE KEY UPDATE data=%s" % (
- da.v6dnstabname, str(datetime.date.today()), result, result)
+ da.v6dnstabname, str(datetime.date.today()), v6nodecountresult, v6nodecountresult)
try:
da.cursor.execute(sql)
da.conn.commit()
@@ -107,6 +107,73 @@ def refresh_neo4j():
except Exception as e:
log.error(e)
+
+ # 节点关联地理数据
+ all_geodataresult = to_dataframe(db.cypher_query(IP_relate, retry_on_session_expire=True))
+ res = all_geodataresult.to_json(orient="index")
+ sql = "REPLACE INTO %s(id,name,data) VALUES (3,'%s','%s')" % (da.tabname, "iprelate", escape_string(res))
+ try:
+ da.cursor.execute(sql)
+ da.conn.commit()
+ log.info("完成IP关联信息数据刷新")
+ except Exception as e:
+ log.error(e)
+
+ # AS关联数据
+ ASN_relateresult = to_dataframe(db.cypher_query(AS_relate, retry_on_session_expire=True))
+ res = ASN_relateresult.to_json(orient="index")
+ sql = "REPLACE INTO %s(id,name,data) VALUES (4,'%s','%s')" % (da.tabname, "asnrelate", escape_string(res))
+ try:
+ da.cursor.execute(sql)
+ da.conn.commit()
+ log.info("完成ASN关联信息数据刷新")
+ except Exception as e:
+ log.error(e)
+
+ # ISP关联数据
+ ISP_relateresult = to_dataframe(db.cypher_query(ISP_relate, retry_on_session_expire=True))
+ res = ISP_relateresult.to_json(orient="index")
+ sql = "REPLACE INTO %s(id,name,data) VALUES (5,'%s','%s')" % (da.tabname, "isprelate", escape_string(res))
+ try:
+ da.cursor.execute(sql)
+ da.conn.commit()
+ log.info("完成ISP关联信息数据刷新")
+ except Exception as e:
+ log.error(e)
+
+ # AS分布数据
+ AS_distresult = to_dataframe(db.cypher_query(AS_dist, retry_on_session_expire=True))
+ res = AS_distresult.to_json(orient="index")
+ sql = "REPLACE INTO %s(id,name,data) VALUES (6,'%s','%s')" % (da.tabname, "asndist", escape_string(res))
+ try:
+ da.cursor.execute(sql)
+ da.conn.commit()
+ log.info("完成AS分布数据刷新")
+ except Exception as e:
+ log.error(e)
+
+ # ISP分布数据
+ ISP_distresult = to_dataframe(db.cypher_query(ISP_dist, retry_on_session_expire=True))
+ res = ISP_distresult.to_json(orient="index")
+ sql = "REPLACE INTO %s(id,name,data) VALUES (7,'%s','%s')" % (da.tabname, "ispdist", escape_string(res))
+ try:
+ da.cursor.execute(sql)
+ da.conn.commit()
+ log.info("完成ISP分布数据刷新")
+ except Exception as e:
+ log.error(e)
+
+ # 国家分布数据
+ COU_distresult = to_dataframe(db.cypher_query(cou_dist, retry_on_session_expire=True))
+ res = COU_distresult.to_json(orient="index")
+ sql = "REPLACE INTO %s(id,name,data) VALUES (8,'%s','%s')" % (da.tabname, "coudist", escape_string(res))
+ try:
+ da.cursor.execute(sql)
+ da.conn.commit()
+ log.info("完成国家分布数据刷新")
+ except Exception as e:
+ log.error(e)
+
log.info("完成数据一轮刷新,下一次刷新开始于: " + str(int(idle_seconds())) + "秒后")
diff --git a/settings.py b/settings.py
index 673b70e..f77c639 100644
--- a/settings.py
+++ b/settings.py
@@ -7,6 +7,7 @@ MYSQL_DATADB_CONFIGTAB = "config"
MYSQL_DATADB = "v6dnsminerdata"
MYSQL_DATADB_COMDATATAB = "data"
+MYSQL_DATADB_NODETAB = "neo4jdnsnode"
MYSQL_DATADB_DATETAB = "v6data"