This commit is contained in:
DingZQ
2025-04-19 11:15:02 +08:00
parent e23761195e
commit 6eedd0d55a

View File

@@ -41,6 +41,18 @@ def queryallrecordsbydate(querydate: str, redis_client: redis.Redis):
nodes_links: tuple = influxdb_api.query_all_records_by_date(query_date=querydate)
redis_client.set(cache_key, msgpack.packb(nodes_links, default=encode_datetime))
def queryallrecordsbydate_by_url(querydate: str):
print(f'queryallrecordsbydateproperty: {querydate}')
try:
response = urllib.request.urlopen(
f"http://localhost/queryallrecordsbydate/?querydate={querydate}"
)
html = response.read().decode("utf-8")
except urllib.error.URLError as e:
print("Error")
def queryallscadarecordsbydate(querydate: str, redis_client: redis.Redis):
cache_key = f"queryallscadarecordsbydate_{querydate}"
exists = redis_client.exists(cache_key)
@@ -49,6 +61,19 @@ def queryallscadarecordsbydate(querydate: str, redis_client: redis.Redis):
result_dict = influxdb_api.query_all_SCADA_records_by_date(query_date=querydate)
redis_client.set(cache_key, msgpack.packb(result_dict, default=encode_datetime))
def queryallscadarecordsbydate_by_url(querydate: str):
print(f'queryallscadarecordsbydate_by_url: {querydate}')
try:
response = urllib.request.urlopen(
f"http://localhost/queryallrecordsbydate/?querydate={querydate}"
)
html = response.read().decode("utf-8")
except urllib.error.URLError as e:
print("Error")
def auto_cache_data():
# 初始化 Redis 连接
# 用redis 限制并发访u
@@ -66,14 +91,26 @@ def auto_cache_data():
redis_client.close()
def auto_cache_data_by_url():
# auto cache data for the last 3 days
today = datetime.date.today()
for i in range(1, 4):
prev_day = today - datetime.timedelta(days=i)
str_prev_day = prev_day.strftime('%Y-%m-%d')
print(str_prev_day)
queryallrecordsbydate_by_url(str_prev_day)
queryallscadarecordsbydate_by_url(str_prev_day)
redis_client.close()
if __name__ == "__main__":
auto_cache_data()
auto_cache_data_by_url()
# auto run in the midnight
schedule.every().day.at("03:00").do(auto_cache_data)
schedule.every().day.at("03:00").do(auto_cache_data_by_url)
while True:
schedule.run_pending()
time.sleep(1)