summaryrefslogtreecommitdiff
path: root/script/schedule.py
diff options
context:
space:
mode:
Diffstat (limited to 'script/schedule.py')
-rw-r--r--script/schedule.py42
1 files changed, 24 insertions, 18 deletions
diff --git a/script/schedule.py b/script/schedule.py
index 8b4ed0d..50bc9e9 100644
--- a/script/schedule.py
+++ b/script/schedule.py
@@ -1,14 +1,16 @@
import datetime
+import threading
+import time
import pymysql
from neomodel import config, db
from neomodel.integration.pandas import to_dataframe
-from schedule import repeat, every
+from schedule import repeat, every, run_pending, run_all, idle_seconds
from apps.model import NodeResolver53
-from neo4jcommand import *
from settings import *
from util import log
+from .neo4jcommand import *
class DataSaver():
@@ -27,13 +29,13 @@ class DataSaver():
CREATE TABLE IF NOT EXISTS %s (
id INT auto_increment PRIMARY KEY ,
name VARCHAR(50) NOT NULL UNIQUE,
- data JSON NOT NULL ,
+ data JSON NOT NULL
)ENGINE=innodb DEFAULT CHARSET=utf8; """ % self.tabname
v6dnstablesql = """
CREATE TABLE IF NOT EXISTS %s (
id INT auto_increment PRIMARY KEY ,
date DATE NOT NULL UNIQUE,
- data INT NOT NULL ,
+ data INT NOT NULL
)ENGINE=innodb DEFAULT CHARSET=utf8; """ % self.v6dnstabname
# 执行sql语句
@@ -44,7 +46,7 @@ class DataSaver():
self.cursor.execute(tablesql)
self.cursor.execute(v6dnstablesql)
self.conn.commit()
- log.debug("Data Saver created")
+ log.info("Data Saver created")
except Exception as e:
log.error(e)
@@ -54,50 +56,52 @@ da = DataSaver()
@repeat(every().day)
def refresh_neo4j():
- log.debug("开始从neo4j刷新数据")
- url = "neo4j://neo4j:[email protected]:7678"
- config.DATABASE_URL = url
- db.set_connection(url)
+ log.info("开始从neo4j刷新数据")
+ config.DATABASE_URL = NEO4J_URL
+ db.set_connection(NEO4J_URL)
# 去重
db.cypher_query(distinct)
- log.debug("完成去重")
+ log.info("完成去重")
# 建立计算图
db.cypher_query(gds_delgraph)
db.cypher_query(gds_newgraph)
- log.debug("完成计算图刷新")
+ log.info("完成计算图刷新")
# 双栈计数
dual_countresult = db.cypher_query(dualcountcypher, retry_on_session_expire=True)[0][0]
- log.debug("双栈计数结果为: %s", str(dual_countresult))
- sql = "REPLACE INTO %s(name,data) VALUES (%s,'{%s:%s}')" % (
- da.tabname, "dualcount", "count", str(dual_countresult))
+ sql = "REPLACE INTO %s(id,name,data) VALUES (1,'%s','{\"%s\":\"%s\"}')" % (
+ da.tabname, "dualcount", "count", str(dual_countresult[0]))
try:
da.cursor.execute(sql)
da.conn.commit()
+ log.info("完成双栈统计数据刷新")
except Exception as e:
log.error(e)
# 双栈信息
dual_dataresult = to_dataframe(db.cypher_query(dualdatacypher, retry_on_session_expire=True))
res = dual_dataresult.to_json(orient="index")
- log.debug("双栈数据结果{前20个字符}为: %s", res[:20])
- sql = "REPLACE INTO %s(name,data) VALUES (%s,%s)" % (da.tabname, "dualdata", res)
+ sql = "REPLACE INTO %s(id,name,data) VALUES (2,'%s','%s')" % (da.tabname, "dualdata", res)
try:
da.cursor.execute(sql)
da.conn.commit()
+ log.info("完成双栈信息数据刷新")
except Exception as e:
log.error(e)
# v6dns计数
result = len(NodeResolver53.nodes.filter(IPType="v6"))
- log.debug("v6dns计数结果为: %s", str(result))
- sql = "INSERT INTO %s(date,data) VALUES (%s,%s)" % (da.v6dnstabname, str(datetime.date.today()), result)
+ sql = "INSERT INTO %s(date,data) VALUES (str_to_date('%s','%%Y-%%m-%%d'),%s) ON DUPLICATE KEY UPDATE data=%s" % (
+ da.v6dnstabname, str(datetime.date.today()), result, result)
try:
da.cursor.execute(sql)
da.conn.commit()
+ log.info("完成v6dns统计数据刷新")
+
except Exception as e:
log.error(e)
+ log.info("完成数据一轮刷新,下一次刷新开始于: " + str(int(idle_seconds())) + "秒后")
def run_continuously(interval=300):
@@ -116,6 +120,8 @@ def run_continuously(interval=300):
class ScheduleThread(threading.Thread):
@classmethod
def run(cls):
+ log.info("开始执行")
+ run_all()
while not cease_continuous_run.is_set():
try:
run_pending()