summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorhandingkang <[email protected]>2024-01-16 16:27:02 +0800
committerhandingkang <[email protected]>2024-01-16 16:27:02 +0800
commit610d8c593fd00c7708ff829afc30673b9864d3e7 (patch)
treef551c7cd6e530bd2ccc8842e7ea07ed0e4b963c2
parent260b2827e847d0e2af9e4801c2787396a4022db7 (diff)
修复多项问题
-rw-r--r--.gitignore1
-rw-r--r--app.py7
-rw-r--r--apps/apiv1.py6
-rw-r--r--apps/data.py2
-rw-r--r--apps/stats.py2
-rw-r--r--apps/sysinfo.py4
-rw-r--r--script/neo4jcommand.py8
-rw-r--r--script/schedule.py42
-rw-r--r--settings.py2
-rw-r--r--util.py14
10 files changed, 51 insertions, 37 deletions
diff --git a/.gitignore b/.gitignore
index f2cc8c5..238003d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
.idea
*.out
*.pyc
+*.log
./app/__pycache__
diff --git a/app.py b/app.py
index a9e58dc..82e372f 100644
--- a/app.py
+++ b/app.py
@@ -7,7 +7,6 @@ from util import log
# 注册蓝图
app = APIFlask(__name__, template_folder='./static/templates')
app.register_blueprint(apiv1_bp)
-app.config.from_pyfile("settings.py")
@app.get('/')
@@ -17,7 +16,7 @@ def hello():
if __name__ == '__main__':
- log.debug("数据缓存服务启动")
+ log.info("数据缓存服务启动")
run()
- log.debug("flask主服务启动")
- app.run(host="0.0.0.0", port=8484)
+ log.info("flask主服务启动")
+ app.run(host="0.0.0.0", debug=False, port=8484)
diff --git a/apps/apiv1.py b/apps/apiv1.py
index d9dd783..178902a 100644
--- a/apps/apiv1.py
+++ b/apps/apiv1.py
@@ -1,8 +1,8 @@
from apiflask import APIBlueprint
-from data import bp as databp
-from stats import bp as statsbp
-from sysinfo import bp as sysbp
+from .data import bp as databp
+from .stats import bp as statsbp
+from .sysinfo import bp as sysbp
bp = APIBlueprint('apiv1', __name__, url_prefix="/api")
bp.register_blueprint(statsbp)
diff --git a/apps/data.py b/apps/data.py
index 0944ed1..1c0ac6b 100644
--- a/apps/data.py
+++ b/apps/data.py
@@ -1,4 +1,4 @@
# 负责数据检索查询
from apiflask import APIBlueprint
-bp = APIBlueprint('data', __name__, url_prefix='/data', description='数据检索接口')
+bp = APIBlueprint('data', __name__, url_prefix='/data')
diff --git a/apps/stats.py b/apps/stats.py
index b17e277..729291d 100644
--- a/apps/stats.py
+++ b/apps/stats.py
@@ -1,4 +1,4 @@
# 展示统计信息
from apiflask import APIBlueprint
-bp = APIBlueprint("stats", __name__, url_prefix="/stats", description="获取统计信息")
+bp = APIBlueprint("stats", __name__, url_prefix="/stats")
diff --git a/apps/sysinfo.py b/apps/sysinfo.py
index 0dc1cab..6e53e87 100644
--- a/apps/sysinfo.py
+++ b/apps/sysinfo.py
@@ -1,4 +1,4 @@
-# 处理系统信息,包括状态,已运行时间
+# 获取系统运行时间、v6&双栈数据总数、每日增长
import datetime
import pymysql
@@ -6,7 +6,7 @@ from apiflask import APIBlueprint
from settings import *
-bp = APIBlueprint('sys', __name__, url_prefix='/sys', description='获取系统运行时间、v6&双栈数据总数、每日增长')
+bp = APIBlueprint('sys', __name__, url_prefix='/sys')
class DataAccess():
diff --git a/script/neo4jcommand.py b/script/neo4jcommand.py
index e825d26..c46e750 100644
--- a/script/neo4jcommand.py
+++ b/script/neo4jcommand.py
@@ -41,7 +41,7 @@ dualcountcypher = '''
CALL gds.wcc.stream('myGraph')
YIELD nodeId,componentId
with componentId as cid,count(*) as c
- where 1<c<5
+ where 1<c<4
with collect(cid) as cc
CALL gds.wcc.stream('myGraph')
YIELD nodeId,componentId
@@ -55,7 +55,7 @@ dualdatacypher = '''
CALL gds.wcc.stream('myGraph')
YIELD nodeId,componentId
with componentId as cid,count(*) as c
- where 1<c<5
+ where 1<c<4
with collect(cid) as cc
CALL gds.wcc.stream('myGraph')
YIELD nodeId,componentId
@@ -67,8 +67,8 @@ dualdatacypher = '''
YIELD nodeId,componentId
where componentId in ccl
with gds.util.asNode(nodeId) as n,componentId
- return n.IP,n.ISP,n.COU,n.CCODE,n.PROV,n.LAT,n.LNG,componentId limit 30
- order by componentId
+ return n.IP,n.ISP,n.COU,n.CCODE,n.PROV,n.LAT,n.LNG,componentId
+ order by componentId limit 30
'''
# v6dns计数
diff --git a/script/schedule.py b/script/schedule.py
index 8b4ed0d..50bc9e9 100644
--- a/script/schedule.py
+++ b/script/schedule.py
@@ -1,14 +1,16 @@
import datetime
+import threading
+import time
import pymysql
from neomodel import config, db
from neomodel.integration.pandas import to_dataframe
-from schedule import repeat, every
+from schedule import repeat, every, run_pending, run_all, idle_seconds
from apps.model import NodeResolver53
-from neo4jcommand import *
from settings import *
from util import log
+from .neo4jcommand import *
class DataSaver():
@@ -27,13 +29,13 @@ class DataSaver():
CREATE TABLE IF NOT EXISTS %s (
id INT auto_increment PRIMARY KEY ,
name VARCHAR(50) NOT NULL UNIQUE,
- data JSON NOT NULL ,
+ data JSON NOT NULL
)ENGINE=innodb DEFAULT CHARSET=utf8; """ % self.tabname
v6dnstablesql = """
CREATE TABLE IF NOT EXISTS %s (
id INT auto_increment PRIMARY KEY ,
date DATE NOT NULL UNIQUE,
- data INT NOT NULL ,
+ data INT NOT NULL
)ENGINE=innodb DEFAULT CHARSET=utf8; """ % self.v6dnstabname
# 执行sql语句
@@ -44,7 +46,7 @@ class DataSaver():
self.cursor.execute(tablesql)
self.cursor.execute(v6dnstablesql)
self.conn.commit()
- log.debug("Data Saver created")
+ log.info("Data Saver created")
except Exception as e:
log.error(e)
@@ -54,50 +56,52 @@ da = DataSaver()
@repeat(every().day)
def refresh_neo4j():
- log.debug("开始从neo4j刷新数据")
- url = "neo4j://neo4j:[email protected]:7678"
- config.DATABASE_URL = url
- db.set_connection(url)
+ log.info("开始从neo4j刷新数据")
+ config.DATABASE_URL = NEO4J_URL
+ db.set_connection(NEO4J_URL)
# 去重
db.cypher_query(distinct)
- log.debug("完成去重")
+ log.info("完成去重")
# 建立计算图
db.cypher_query(gds_delgraph)
db.cypher_query(gds_newgraph)
- log.debug("完成计算图刷新")
+ log.info("完成计算图刷新")
# 双栈计数
dual_countresult = db.cypher_query(dualcountcypher, retry_on_session_expire=True)[0][0]
- log.debug("双栈计数结果为: %s", str(dual_countresult))
- sql = "REPLACE INTO %s(name,data) VALUES (%s,'{%s:%s}')" % (
- da.tabname, "dualcount", "count", str(dual_countresult))
+ sql = "REPLACE INTO %s(id,name,data) VALUES (1,'%s','{\"%s\":\"%s\"}')" % (
+ da.tabname, "dualcount", "count", str(dual_countresult[0]))
try:
da.cursor.execute(sql)
da.conn.commit()
+ log.info("完成双栈统计数据刷新")
except Exception as e:
log.error(e)
# 双栈信息
dual_dataresult = to_dataframe(db.cypher_query(dualdatacypher, retry_on_session_expire=True))
res = dual_dataresult.to_json(orient="index")
- log.debug("双栈数据结果{前20个字符}为: %s", res[:20])
- sql = "REPLACE INTO %s(name,data) VALUES (%s,%s)" % (da.tabname, "dualdata", res)
+ sql = "REPLACE INTO %s(id,name,data) VALUES (2,'%s','%s')" % (da.tabname, "dualdata", res)
try:
da.cursor.execute(sql)
da.conn.commit()
+ log.info("完成双栈信息数据刷新")
except Exception as e:
log.error(e)
# v6dns计数
result = len(NodeResolver53.nodes.filter(IPType="v6"))
- log.debug("v6dns计数结果为: %s", str(result))
- sql = "INSERT INTO %s(date,data) VALUES (%s,%s)" % (da.v6dnstabname, str(datetime.date.today()), result)
+ sql = "INSERT INTO %s(date,data) VALUES (str_to_date('%s','%%Y-%%m-%%d'),%s) ON DUPLICATE KEY UPDATE data=%s" % (
+ da.v6dnstabname, str(datetime.date.today()), result, result)
try:
da.cursor.execute(sql)
da.conn.commit()
+ log.info("完成v6dns统计数据刷新")
+
except Exception as e:
log.error(e)
+ log.info("完成数据一轮刷新,下一次刷新开始于: " + str(int(idle_seconds())) + "秒后")
def run_continuously(interval=300):
@@ -116,6 +120,8 @@ def run_continuously(interval=300):
class ScheduleThread(threading.Thread):
@classmethod
def run(cls):
+ log.info("开始执行")
+ run_all()
while not cease_continuous_run.is_set():
try:
run_pending()
diff --git a/settings.py b/settings.py
index 6a33e71..74abf6b 100644
--- a/settings.py
+++ b/settings.py
@@ -1,5 +1,5 @@
DEBUG = True
-MYSQL_HOST = ''
+MYSQL_HOST = '124.221.228.62'
MYSQL_PORT = 6033
MYSQL_PAWD = 'Diamondv77'
diff --git a/util.py b/util.py
index 38aeb8e..f8fd630 100644
--- a/util.py
+++ b/util.py
@@ -1,7 +1,15 @@
import logging
+from settings import DEBUG
+
log = logging.getLogger("mylogger")
-logging.basicConfig(filename='run.log',
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s-%(funcName)s',
- level=logging.DEBUG)
+if DEBUG:
+ # logging.basicConfig(filename='debug.log',
+ logging.basicConfig(
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s-%(funcName)s',
+ level=logging.INFO)
+else:
+ logging.basicConfig(filename="server.log",
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s-%(funcName)s',
+ level=logging.ERROR)