完成分布式查询
This commit is contained in:
14
main.py
14
main.py
@@ -1,20 +1,18 @@
|
||||
# encoding=utf8
|
||||
import sys
|
||||
from time import sleep
|
||||
|
||||
from py12306.helpers.func import *
|
||||
from py12306.helpers.app import *
|
||||
from py12306.app import *
|
||||
from py12306.log.common_log import CommonLog
|
||||
from py12306.query.query import Query
|
||||
from py12306.user.user import User
|
||||
|
||||
|
||||
def main():
|
||||
if '--test' in sys.argv or '-t' in sys.argv: test()
|
||||
App.run()
|
||||
CommonLog.print_welcome().print_configs()
|
||||
|
||||
App.run_check()
|
||||
User.run()
|
||||
App.did_start()
|
||||
# App.run_check()
|
||||
# User.run()
|
||||
Query.run()
|
||||
if not Const.IS_TEST:
|
||||
while True:
|
||||
@@ -34,7 +32,7 @@ def test():
|
||||
:return:
|
||||
"""
|
||||
Const.IS_TEST = True
|
||||
config.OUT_PUT_LOG_TO_FILE_ENABLED = False
|
||||
Config.OUT_PUT_LOG_TO_FILE_ENABLED = False
|
||||
if '--test-notification' in sys.argv or '-n' in sys.argv:
|
||||
Const.IS_TEST_NOTIFICATION = True
|
||||
pass
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import signal
|
||||
import sys
|
||||
|
||||
from py12306.helpers.func import *
|
||||
from py12306.config import *
|
||||
from py12306.config import Config
|
||||
from py12306.helpers.notification import Notification
|
||||
from py12306.log.common_log import CommonLog
|
||||
from py12306.log.order_log import OrderLog
|
||||
@@ -17,31 +20,66 @@ def app_available_check():
|
||||
return True
|
||||
|
||||
|
||||
@singleton
|
||||
class App:
|
||||
"""
|
||||
程序主类
|
||||
TODO 需要完善
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def run(cls):
|
||||
self = cls()
|
||||
self.start()
|
||||
|
||||
def start(self):
|
||||
Config().run()
|
||||
for sign in [signal.SIGINT, signal.SIGHUP, signal.SIGTERM]: signal.signal(sign, self.handler_exit)
|
||||
self.init_class()
|
||||
|
||||
@classmethod
|
||||
def did_start(cls):
|
||||
self = cls()
|
||||
if Config.is_cluster_enabled():
|
||||
from py12306.cluster.cluster import Distributed
|
||||
Distributed().join_cluster()
|
||||
|
||||
def init_class(self):
|
||||
from py12306.cluster.cluster import Distributed
|
||||
if Config.is_cluster_enabled(): Distributed()
|
||||
|
||||
def handler_exit(self, *args, **kwargs):
|
||||
"""
|
||||
程序退出
|
||||
:param args:
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
if Config.is_cluster_enabled():
|
||||
from py12306.cluster.cluster import Distributed
|
||||
Distributed().left_cluster()
|
||||
|
||||
sys.exit()
|
||||
|
||||
@classmethod
|
||||
def check_auto_code(cls):
|
||||
if not config.AUTO_CODE_ACCOUNT.get('user') or not config.AUTO_CODE_ACCOUNT.get('pwd'):
|
||||
if not Config().AUTO_CODE_ACCOUNT.get('user') or not Config().AUTO_CODE_ACCOUNT.get('pwd'):
|
||||
return False
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def check_user_account_is_empty(cls):
|
||||
if config.USER_ACCOUNTS:
|
||||
for account in config.USER_ACCOUNTS:
|
||||
if Config().USER_ACCOUNTS:
|
||||
for account in Config().USER_ACCOUNTS:
|
||||
if account:
|
||||
return True
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def test_send_notifications(cls):
|
||||
if config.NOTIFICATION_BY_VOICE_CODE: # 语音通知
|
||||
if Config().NOTIFICATION_BY_VOICE_CODE: # 语音通知
|
||||
CommonLog.add_quick_log(CommonLog.MESSAGE_TEST_SEND_VOICE_CODE).flush()
|
||||
Notification.voice_code(config.NOTIFICATION_VOICE_CODE_PHONE, '张三',
|
||||
Notification.voice_code(Config().NOTIFICATION_VOICE_CODE_PHONE, '张三',
|
||||
OrderLog.MESSAGE_ORDER_SUCCESS_NOTIFICATION_OF_VOICE_CODE_CONTENT.format('北京',
|
||||
'深圳'))
|
||||
|
||||
0
py12306/cluster/__init__.py
Normal file
0
py12306/cluster/__init__.py
Normal file
67
py12306/cluster/cluster.py
Normal file
67
py12306/cluster/cluster.py
Normal file
@@ -0,0 +1,67 @@
|
||||
import redis
|
||||
from redis.client import PubSub
|
||||
|
||||
from py12306.cluster.redis import Redis
|
||||
from py12306.config import Config
|
||||
from py12306.helpers.func import *
|
||||
from py12306.log.cluster_log import ClusterLog
|
||||
|
||||
|
||||
@singleton
|
||||
class Distributed():
|
||||
KEY_QUERY_COUNT = 'query_count'
|
||||
KEY_QUERY_LAST_TIME = 'query_last_time'
|
||||
KEY_CONFIGS = 'configs'
|
||||
KEY_NODES = 'nodes'
|
||||
KEY_CHANNEL_LOG = 'channel_log'
|
||||
|
||||
session: Redis = None
|
||||
pubsub: PubSub = None
|
||||
refresh_channel_time = 0.5
|
||||
retry_time = 2
|
||||
|
||||
nodes = {}
|
||||
|
||||
def __init__(self, *args):
|
||||
self.session = Redis()
|
||||
self.pubsub = self.session.pubsub()
|
||||
self.pubsub.subscribe(self.KEY_CHANNEL_LOG)
|
||||
create_thread_and_run(self, 'refresh_data', wait=False)
|
||||
create_thread_and_run(self, 'subscribe', wait=False)
|
||||
return self
|
||||
|
||||
def join_cluster(self):
|
||||
node_name = Config().NODE_NAME
|
||||
if node_name in self.nodes:
|
||||
node_name = node_name + '_' + str(dict_count_key_num(self.nodes, node_name))
|
||||
ClusterLog.add_quick_log(ClusterLog.MESSAGE_NODE_ALREADY_IN_CLUSTER.format(node_name)).flush()
|
||||
|
||||
self.session.hset(self.KEY_NODES, node_name, Config().NODE_IS_MASTER)
|
||||
message = ClusterLog.MESSAGE_JOIN_CLUSTER_SUCCESS.format(Config().NODE_NAME, list(self.get_nodes()))
|
||||
# ClusterLog.add_quick_log(message).flush()
|
||||
self.session.publish(self.KEY_CHANNEL_LOG, message)
|
||||
|
||||
def left_cluster(self):
|
||||
self.session.hdel(self.KEY_NODES, Config().NODE_NAME)
|
||||
message = ClusterLog.MESSAGE_LEFT_CLUSTER.format(Config().NODE_NAME, list(self.get_nodes()))
|
||||
# ClusterLog.add_quick_log(message).flush()
|
||||
self.session.publish(self.KEY_CHANNEL_LOG, message)
|
||||
|
||||
def get_nodes(self):
|
||||
res = self.session.hgetall(self.KEY_NODES)
|
||||
res = res if res else {}
|
||||
self.nodes = res
|
||||
return res
|
||||
|
||||
def refresh_data(self):
|
||||
while True:
|
||||
self.get_nodes()
|
||||
stay_second(self.retry_time)
|
||||
|
||||
def subscribe(self):
|
||||
while True:
|
||||
message = self.pubsub.get_message()
|
||||
if message:
|
||||
if message.get('type') == 'message' and message.get('data'):
|
||||
ClusterLog.add_quick_log(message.get('data')).flush()
|
||||
stay_second(self.refresh_channel_time)
|
||||
59
py12306/cluster/redis.py
Normal file
59
py12306/cluster/redis.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import json
|
||||
import pickle
|
||||
|
||||
import redis
|
||||
|
||||
from py12306.config import Config
|
||||
from py12306.helpers.func import *
|
||||
from py12306.log.redis_log import RedisLog
|
||||
from redis import Redis as PyRedis
|
||||
|
||||
|
||||
@singleton
|
||||
class Redis(PyRedis):
|
||||
# session = None
|
||||
|
||||
def __init__(self, *args):
|
||||
if Config.is_cluster_enabled():
|
||||
args = {
|
||||
'host': Config().REDIS_HOST,
|
||||
'port': Config().REDIS_PORT,
|
||||
'db': 0,
|
||||
'password': Config().REDIS_PASSWORD,
|
||||
'decode_responses': True
|
||||
}
|
||||
super().__init__(**args)
|
||||
RedisLog.add_quick_log(RedisLog.MESSAGE_REDIS_INIT_SUCCESS)
|
||||
else:
|
||||
super().__init__(**args)
|
||||
return self
|
||||
|
||||
def get(self, name, default=None):
|
||||
res = super().get(name)
|
||||
# if decode: res = res.decode()
|
||||
return res if res else default
|
||||
|
||||
def set(self, name, value, ex=None, px=None, nx=False, xx=False):
|
||||
return super().set(name, available_value(value), ex=ex, px=px, nx=nx, xx=xx)
|
||||
|
||||
def set_dict(self, name, value):
|
||||
return self.set_pickle(name, value)
|
||||
# return self.set(name, json.dumps(value))
|
||||
|
||||
def get_dict(self, name, default={}):
|
||||
return self.get_pickle(name, default)
|
||||
# res = self.get(name)
|
||||
# if res:
|
||||
# return json.loads(res)
|
||||
# return default
|
||||
|
||||
def set_pickle(self, name, value):
|
||||
return self.set(name, pickle.dumps(value, 0).decode())
|
||||
|
||||
def get_pickle(self, name, default=None):
|
||||
res = self.get(name).encode()
|
||||
return pickle.loads(res) if res else default
|
||||
|
||||
# def smembers(self, name, default=[]):
|
||||
# res = super().smembers(name)
|
||||
# return [val.decode() for val in list(res)] if res else default
|
||||
@@ -1,80 +1,144 @@
|
||||
import json
|
||||
import re
|
||||
from os import path
|
||||
|
||||
# 12306 账号
|
||||
USER_ACCOUNTS = []
|
||||
|
||||
# 查询任务
|
||||
QUERY_JOBS = []
|
||||
|
||||
# 查询间隔
|
||||
QUERY_INTERVAL = 1
|
||||
|
||||
# 用户心跳检测间隔
|
||||
USER_HEARTBEAT_INTERVAL = 120
|
||||
|
||||
# 多线程查询
|
||||
QUERY_JOB_THREAD_ENABLED = 0
|
||||
|
||||
# 打码平台账号
|
||||
AUTO_CODE_ACCOUNT = {
|
||||
'user': '',
|
||||
'pwd': ''
|
||||
}
|
||||
# 输出日志到文件
|
||||
OUT_PUT_LOG_TO_FILE_ENABLED = 0
|
||||
OUT_PUT_LOG_TO_FILE_PATH = 'runtime/12306.log'
|
||||
from py12306.helpers.func import *
|
||||
|
||||
|
||||
SEAT_TYPES = {
|
||||
'特等座': 25,
|
||||
'商务座': 32,
|
||||
'一等座': 31,
|
||||
'二等座': 30,
|
||||
'软卧': 23,
|
||||
'硬卧': 28,
|
||||
'硬座': 29,
|
||||
'无座': 26,
|
||||
}
|
||||
@singleton
|
||||
class Config:
|
||||
USER_ACCOUNTS = []
|
||||
# 查询任务
|
||||
QUERY_JOBS = []
|
||||
# 查询间隔
|
||||
QUERY_INTERVAL = 1
|
||||
# 用户心跳检测间隔
|
||||
USER_HEARTBEAT_INTERVAL = 120
|
||||
# 多线程查询
|
||||
QUERY_JOB_THREAD_ENABLED = 0
|
||||
# 打码平台账号
|
||||
AUTO_CODE_ACCOUNT = {'user': '', 'pwd': ''}
|
||||
# 输出日志到文件
|
||||
OUT_PUT_LOG_TO_FILE_ENABLED = 0
|
||||
OUT_PUT_LOG_TO_FILE_PATH = 'runtime/12306.log'
|
||||
|
||||
ORDER_SEAT_TYPES = {
|
||||
'特等座': 'P',
|
||||
'商务座': 9,
|
||||
'一等座': 'M',
|
||||
'二等座': 'O',
|
||||
'软卧': 4,
|
||||
'硬卧': 3,
|
||||
'硬座': 1,
|
||||
'无座': 1,
|
||||
}
|
||||
SEAT_TYPES = {'特等座': 25, '商务座': 32, '一等座': 31, '二等座': 30, '软卧': 23, '硬卧': 28, '硬座': 29, '无座': 26, }
|
||||
|
||||
PROJECT_DIR = path.dirname(path.dirname(path.abspath(__file__))) + '/'
|
||||
ORDER_SEAT_TYPES = {'特等座': 'P', '商务座': 9, '一等座': 'M', '二等座': 'O', '软卧': 4, '硬卧': 3, '硬座': 1, '无座': 1}
|
||||
|
||||
# Query
|
||||
RUNTIME_DIR = PROJECT_DIR + 'runtime/'
|
||||
QUERY_DATA_DIR = RUNTIME_DIR + 'query/'
|
||||
USER_DATA_DIR = RUNTIME_DIR + 'user/'
|
||||
PROJECT_DIR = path.dirname(path.dirname(path.abspath(__file__))) + '/'
|
||||
|
||||
STATION_FILE = PROJECT_DIR + 'data/stations.txt'
|
||||
CONFIG_FILE = PROJECT_DIR + 'env.py'
|
||||
# Query
|
||||
RUNTIME_DIR = PROJECT_DIR + 'runtime/'
|
||||
QUERY_DATA_DIR = RUNTIME_DIR + 'query/'
|
||||
USER_DATA_DIR = RUNTIME_DIR + 'user/'
|
||||
|
||||
# 语音验证码
|
||||
NOTIFICATION_BY_VOICE_CODE = 0
|
||||
NOTIFICATION_VOICE_CODE_PHONE = ''
|
||||
NOTIFICATION_API_APP_CODE = ''
|
||||
STATION_FILE = PROJECT_DIR + 'data/stations.txt'
|
||||
CONFIG_FILE = PROJECT_DIR + 'env.py'
|
||||
|
||||
if path.exists(CONFIG_FILE):
|
||||
exec(open(CONFIG_FILE, encoding='utf8').read())
|
||||
# 语音验证码
|
||||
NOTIFICATION_BY_VOICE_CODE = 0
|
||||
NOTIFICATION_VOICE_CODE_PHONE = ''
|
||||
NOTIFICATION_API_APP_CODE = ''
|
||||
|
||||
# 集群配置
|
||||
CLUSTER_ENABLED = 1
|
||||
NODE_IS_MASTER = 1
|
||||
NODE_NAME = ''
|
||||
REDIS_HOST = ''
|
||||
REDIS_PORT = '6379'
|
||||
REDIS_PASSWORD = ''
|
||||
|
||||
envs = []
|
||||
retry_time = 5
|
||||
|
||||
@classmethod
|
||||
def run(cls):
|
||||
self = cls()
|
||||
self.start()
|
||||
|
||||
# @classmethod
|
||||
# def keep_work(cls):
|
||||
# self = cls()
|
||||
|
||||
def start(self):
|
||||
self.init_envs()
|
||||
self.save_to_remote()
|
||||
# self.refresh_configs()
|
||||
create_thread_and_run(self, 'refresh_configs', wait=False)
|
||||
|
||||
def refresh_configs(self):
|
||||
if not self.is_cluster_enabled(): return
|
||||
while True:
|
||||
remote_configs = self.get_remote_config()
|
||||
self.update_configs_from_remote(remote_configs)
|
||||
stay_second(self.retry_time)
|
||||
|
||||
def get_remote_config(self):
|
||||
if not self.is_cluster_enabled(): return
|
||||
from py12306.cluster.cluster import Distributed
|
||||
return Distributed().session.get_pickle(Distributed().KEY_CONFIGS, {})
|
||||
|
||||
def save_to_remote(self):
|
||||
if not self.is_master(): return
|
||||
from py12306.cluster.cluster import Distributed
|
||||
Distributed().session.set_pickle(Distributed().KEY_CONFIGS, self.envs)
|
||||
|
||||
def init_envs(self):
|
||||
self.envs = EnvLoader.load_with_file(self.CONFIG_FILE)
|
||||
self.update_configs(self.envs)
|
||||
|
||||
def update_configs(self, envs):
|
||||
for key, value in envs:
|
||||
setattr(self, key, value)
|
||||
|
||||
def update_configs_from_remote(self, envs):
|
||||
if envs == self.envs: return
|
||||
from py12306.query.query import Query
|
||||
for key, value in envs:
|
||||
if key == 'USER_ACCOUNTS' and value != self.USER_ACCOUNTS: # 用户修改
|
||||
setattr(self, key, value)
|
||||
print('用户修改了') # TODO
|
||||
elif key == 'QUERY_JOBS' and value != self.QUERY_JOBS: # 任务修改
|
||||
setattr(self, key, value) and Query().update_query_jobs(auto=True)
|
||||
elif key == 'QUERY_INTERVAL' and value != self.QUERY_INTERVAL: # 任务修改
|
||||
setattr(self, key, value) and Query().update_query_interval(auto=True)
|
||||
if value != -1:
|
||||
setattr(self, key, value)
|
||||
|
||||
@staticmethod
|
||||
def is_master(): # 是不是 主
|
||||
return Config.CLUSTER_ENABLED and Config.NODE_IS_MASTER
|
||||
|
||||
@staticmethod
|
||||
def is_slave(): # 是不是 从
|
||||
return Config.CLUSTER_ENABLED and not Config.NODE_IS_MASTER
|
||||
|
||||
@staticmethod
|
||||
def is_cluster_enabled():
|
||||
return Config.CLUSTER_ENABLED
|
||||
|
||||
# @staticmethod
|
||||
# def get_members():
|
||||
# members = []
|
||||
# for name, value in vars(Config).items():
|
||||
# if name.isupper():
|
||||
# members.append(([name, value]))
|
||||
# return members
|
||||
|
||||
|
||||
class UserType:
|
||||
ADULT = 1
|
||||
CHILD = 2
|
||||
STUDENT = 3
|
||||
SOLDIER = 4
|
||||
class EnvLoader():
|
||||
envs = []
|
||||
|
||||
dicts = {
|
||||
'成人': ADULT,
|
||||
'儿童': CHILD,
|
||||
'学生': STUDENT,
|
||||
'残疾军人、伤残人民警察': SOLDIER,
|
||||
}
|
||||
@classmethod
|
||||
def load_with_file(cls, file):
|
||||
self = cls()
|
||||
if path.exists(file):
|
||||
env_content = open(file, encoding='utf8').read()
|
||||
content = re.sub(r'^([A-Z]+)_', r'self.\1_', env_content, flags=re.M)
|
||||
exec(content)
|
||||
return self.envs
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
self.envs.append(([key, value]))
|
||||
|
||||
@@ -16,10 +16,11 @@ class AuthCode:
|
||||
验证码类
|
||||
"""
|
||||
session = None
|
||||
data_path = config.RUNTIME_DIR
|
||||
data_path = None
|
||||
retry_time = 5
|
||||
|
||||
def __init__(self, session):
|
||||
self.data_path = config.RUNTIME_DIR
|
||||
self.session = session
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -6,7 +6,8 @@ import functools
|
||||
from time import sleep
|
||||
from types import MethodType
|
||||
|
||||
from py12306 import config
|
||||
|
||||
# from py12306 import config
|
||||
|
||||
|
||||
def singleton(cls):
|
||||
@@ -34,13 +35,13 @@ def singleton(cls):
|
||||
return cls
|
||||
|
||||
|
||||
# 座位
|
||||
def get_seat_number_by_name(name):
|
||||
return config.SEAT_TYPES[name]
|
||||
# 座位 # TODO
|
||||
# def get_number_by_name(name):
|
||||
# return config.SEAT_TYPES[name]
|
||||
|
||||
|
||||
def get_seat_name_by_number(number):
|
||||
return [k for k, v in config.SEAT_TYPES.items() if v == number].pop()
|
||||
# def get_seat_name_by_number(number): # TODO remove config
|
||||
# return [k for k, v in config.SEAT_TYPES.items() if v == number].pop()
|
||||
|
||||
|
||||
# 初始化间隔
|
||||
@@ -106,6 +107,16 @@ def dict_find_key_by_value(data, value, default=None):
|
||||
return result.pop() if len(result) else default
|
||||
|
||||
|
||||
def dict_count_key_num(data: dict, key, like=False):
|
||||
count = 0
|
||||
for k in data.keys():
|
||||
if like:
|
||||
if k.find(key) >= 0: count += 1
|
||||
elif k == key:
|
||||
count += 1
|
||||
return count
|
||||
|
||||
|
||||
def array_dict_find_by_key_value(data, key, value, default=None):
|
||||
result = [v for k, v in enumerate(data) if key in v and v[key] == value]
|
||||
return result.pop() if len(result) else default
|
||||
@@ -127,6 +138,12 @@ def expand_class(cls, key, value, keep_old=True):
|
||||
return cls
|
||||
|
||||
|
||||
def available_value(value):
|
||||
if isinstance(value, str) or isinstance(value, bytes):
|
||||
return value
|
||||
return str(value)
|
||||
|
||||
|
||||
@singleton
|
||||
class Const:
|
||||
IS_TEST = False
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from py12306.helpers.app import *
|
||||
from py12306.helpers.func import *
|
||||
from requests_html import HTMLSession, HTMLResponse
|
||||
|
||||
@@ -38,7 +37,7 @@ class Request(HTMLSession):
|
||||
重写 json 方法,拦截错误
|
||||
:return:
|
||||
"""
|
||||
from py12306.helpers.app import Dict
|
||||
from py12306.app import Dict
|
||||
try:
|
||||
result = self.old_json()
|
||||
return Dict(result)
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
from os import path
|
||||
|
||||
from py12306.config import Config
|
||||
from py12306.helpers.func import *
|
||||
|
||||
|
||||
@@ -7,8 +9,8 @@ class Station:
|
||||
stations = []
|
||||
|
||||
def __init__(self):
|
||||
if path.exists(config.STATION_FILE):
|
||||
result = open(config.STATION_FILE, encoding='utf-8').read()
|
||||
if path.exists(Config().STATION_FILE):
|
||||
result = open(Config().STATION_FILE, encoding='utf-8').read()
|
||||
result = result.lstrip('@').split('@')
|
||||
for i in result:
|
||||
tmp_info = i.split('|')
|
||||
|
||||
46
py12306/helpers/type.py
Normal file
46
py12306/helpers/type.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from py12306.helpers.func import *
|
||||
|
||||
|
||||
@singleton
|
||||
class UserType:
|
||||
ADULT = 1
|
||||
CHILD = 2
|
||||
STUDENT = 3
|
||||
SOLDIER = 4
|
||||
|
||||
dicts = {
|
||||
'成人': ADULT,
|
||||
'儿童': CHILD,
|
||||
'学生': STUDENT,
|
||||
'残疾军人、伤残人民警察': SOLDIER,
|
||||
}
|
||||
|
||||
|
||||
@singleton
|
||||
class OrderSeatType:
|
||||
dicts = {
|
||||
'特等座': 'P',
|
||||
'商务座': 9,
|
||||
'一等座': 'M',
|
||||
'二等座': 'O',
|
||||
'软卧': 4,
|
||||
'硬卧': 3,
|
||||
'硬座': 1,
|
||||
'无座': 1,
|
||||
}
|
||||
|
||||
|
||||
@singleton
|
||||
class SeatType:
|
||||
dicts = {
|
||||
'特等座': 25,
|
||||
'商务座': 32,
|
||||
'一等座': 31,
|
||||
'二等座': 30,
|
||||
'软卧': 23,
|
||||
'硬卧': 28,
|
||||
'硬座': 29,
|
||||
'无座': 26,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from py12306.config import Config
|
||||
from py12306.helpers.func import *
|
||||
|
||||
|
||||
@@ -26,8 +27,8 @@ class BaseLog:
|
||||
self = cls()
|
||||
logs = self.get_logs()
|
||||
# 输出到文件
|
||||
if file == None and config.OUT_PUT_LOG_TO_FILE_ENABLED: # TODO 文件无法写入友好提示
|
||||
file = open(config.OUT_PUT_LOG_TO_FILE_PATH, 'a')
|
||||
if file == None and Config().OUT_PUT_LOG_TO_FILE_ENABLED: # TODO 文件无法写入友好提示
|
||||
file = open(Config().OUT_PUT_LOG_TO_FILE_PATH, 'a')
|
||||
if not file: file = None
|
||||
print(*logs, sep=sep, end=end, file=file)
|
||||
self.empty_logs(logs)
|
||||
|
||||
15
py12306/log/cluster_log.py
Normal file
15
py12306/log/cluster_log.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from py12306.log.base import BaseLog
|
||||
from py12306.helpers.func import *
|
||||
|
||||
|
||||
@singleton
|
||||
class ClusterLog(BaseLog):
|
||||
# 这里如果不声明,会出现重复打印,目前不知道什么原因
|
||||
logs = []
|
||||
thread_logs = {}
|
||||
quick_log = []
|
||||
|
||||
MESSAGE_JOIN_CLUSTER_SUCCESS = '# 节点 {} 成功加入到集群,当前节点列表 {} #'
|
||||
MESSAGE_LEFT_CLUSTER = '# 节点 {} 已离开集群,当前节点列表 {} #'
|
||||
|
||||
MESSAGE_NODE_ALREADY_IN_CLUSTER = '# 当前节点已存在于集群中,自动分配新的节点名称 {} #'
|
||||
@@ -37,9 +37,9 @@ class CommonLog(BaseLog):
|
||||
if Const.IS_TEST:
|
||||
self.add_quick_log()
|
||||
self.add_quick_log('当前为测试模式,程序运行完成后自动结束')
|
||||
if not Const.IS_TEST and config.OUT_PUT_LOG_TO_FILE_ENABLED:
|
||||
if not Const.IS_TEST and Config().OUT_PUT_LOG_TO_FILE_ENABLED:
|
||||
self.add_quick_log()
|
||||
self.add_quick_log('日志已输出到文件中: {}'.format(config.OUT_PUT_LOG_TO_FILE_PATH))
|
||||
self.add_quick_log('日志已输出到文件中: {}'.format(Config().OUT_PUT_LOG_TO_FILE_PATH))
|
||||
|
||||
self.add_quick_log()
|
||||
self.flush(file=False)
|
||||
@@ -52,10 +52,10 @@ class CommonLog(BaseLog):
|
||||
enable = '已开启'
|
||||
disable = '未开启'
|
||||
self.add_quick_log('**** 当前配置 ****')
|
||||
self.add_quick_log('多线程查询: {}'.format(get_true_false_text(config.QUERY_JOB_THREAD_ENABLED, enable, disable)))
|
||||
self.add_quick_log('语音验证码: {}'.format(get_true_false_text(config.NOTIFICATION_BY_VOICE_CODE, enable, disable)))
|
||||
self.add_quick_log('查询间隔: {} 秒'.format(config.QUERY_INTERVAL))
|
||||
self.add_quick_log('用户心跳检测间隔: {} 秒'.format(config.USER_HEARTBEAT_INTERVAL))
|
||||
self.add_quick_log('多线程查询: {}'.format(get_true_false_text(Config().QUERY_JOB_THREAD_ENABLED, enable, disable)))
|
||||
self.add_quick_log('语音验证码: {}'.format(get_true_false_text(Config().NOTIFICATION_BY_VOICE_CODE, enable, disable)))
|
||||
self.add_quick_log('查询间隔: {} 秒'.format(Config().QUERY_INTERVAL))
|
||||
self.add_quick_log('用户心跳检测间隔: {} 秒'.format(Config().USER_HEARTBEAT_INTERVAL))
|
||||
self.add_quick_log()
|
||||
self.flush()
|
||||
return self
|
||||
|
||||
@@ -2,6 +2,9 @@ import datetime
|
||||
import json
|
||||
import sys
|
||||
from os import path
|
||||
|
||||
from py12306.config import Config
|
||||
from py12306.cluster.cluster import Distributed
|
||||
from py12306.log.base import BaseLog
|
||||
from py12306.helpers.func import *
|
||||
|
||||
@@ -17,7 +20,7 @@ class QueryLog(BaseLog):
|
||||
'query_count': 1,
|
||||
'last_time': '',
|
||||
}
|
||||
data_path = config.QUERY_DATA_DIR + 'status.json'
|
||||
data_path = None
|
||||
|
||||
LOG_INIT_JOBS = ''
|
||||
|
||||
@@ -26,20 +29,45 @@ class QueryLog(BaseLog):
|
||||
MESSAGE_QUERY_LOG_OF_TRAIN_INFO = '{} {}'
|
||||
MESSAGE_QUERY_START_BY_DATE = '出发日期 {}: {} - {}'
|
||||
|
||||
MESSAGE_JOBS_DID_CHANGED = '\n任务已更新,正在重新加载...'
|
||||
|
||||
cluster = None
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.data_path = Config().QUERY_DATA_DIR + 'status.json'
|
||||
self.cluster = Distributed()
|
||||
self.init_data()
|
||||
|
||||
def init_data(self):
|
||||
# 获取上次记录
|
||||
if Const.IS_TEST: return
|
||||
if path.exists(self.data_path):
|
||||
result = False
|
||||
if not Config.is_cluster_enabled() and path.exists(self.data_path):
|
||||
with open(self.data_path, encoding='utf-8') as f:
|
||||
result = f.read()
|
||||
if result:
|
||||
result = json.loads(result)
|
||||
self.data = {**self.data, **result}
|
||||
self.print_data_restored()
|
||||
|
||||
if Config.is_cluster_enabled():
|
||||
result = self.get_data_from_cluster()
|
||||
|
||||
if result:
|
||||
self.data = {**self.data, **result}
|
||||
self.print_data_restored()
|
||||
|
||||
def get_data_from_cluster(self):
|
||||
query_count = self.cluster.session.get(Distributed.KEY_QUERY_COUNT, 0)
|
||||
last_time = self.cluster.session.get(Distributed.KEY_QUERY_LAST_TIME, '')
|
||||
if query_count and last_time:
|
||||
return {'query_count': query_count, 'last_time': last_time}
|
||||
return False
|
||||
|
||||
def refresh_data_of_cluster(self):
|
||||
return {
|
||||
'query_count': self.cluster.session.incr(Distributed.KEY_QUERY_COUNT),
|
||||
'last_time': self.cluster.session.set(Distributed.KEY_QUERY_LAST_TIME, time_now()),
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def print_init_jobs(cls, jobs):
|
||||
@@ -112,9 +140,9 @@ class QueryLog(BaseLog):
|
||||
@classmethod
|
||||
def print_job_start(cls):
|
||||
self = cls()
|
||||
self.refresh_data()
|
||||
self.add_log('=== 正在进行第 {query_count} 次查询 === {time}'.format(query_count=self.data.get('query_count'),
|
||||
time=datetime.datetime.now()))
|
||||
self.refresh_data()
|
||||
if is_main_thread():
|
||||
self.flush()
|
||||
return self
|
||||
@@ -134,9 +162,12 @@ class QueryLog(BaseLog):
|
||||
return self
|
||||
|
||||
def refresh_data(self):
|
||||
self.data['query_count'] += 1
|
||||
self.data['last_time'] = str(datetime.datetime.now())
|
||||
self.save_data()
|
||||
if Config.is_cluster_enabled():
|
||||
self.data = {**self.data, **self.refresh_data_of_cluster()}
|
||||
else:
|
||||
self.data['query_count'] += 1
|
||||
self.data['last_time'] = str(datetime.datetime.now())
|
||||
self.save_data()
|
||||
|
||||
def save_data(self):
|
||||
with open(self.data_path, 'w') as file:
|
||||
|
||||
12
py12306/log/redis_log.py
Normal file
12
py12306/log/redis_log.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from py12306.log.base import BaseLog
|
||||
from py12306.helpers.func import *
|
||||
|
||||
|
||||
@singleton
|
||||
class RedisLog(BaseLog):
|
||||
# 这里如果不声明,会出现重复打印,目前不知道什么原因
|
||||
logs = []
|
||||
thread_logs = {}
|
||||
quick_log = []
|
||||
|
||||
MESSAGE_REDIS_INIT_SUCCESS = 'Redis 初始化成功'
|
||||
@@ -1,13 +1,10 @@
|
||||
import urllib
|
||||
import random
|
||||
|
||||
from py12306.config import UserType
|
||||
# from py12306.config import UserType
|
||||
from py12306.helpers.api import *
|
||||
from py12306.helpers.app import *
|
||||
from py12306.helpers.func import *
|
||||
from py12306.helpers.notification import Notification
|
||||
from py12306.log.order_log import OrderLog
|
||||
from py12306.log.user_log import UserLog
|
||||
|
||||
|
||||
# from py12306.query.job import Job
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from py12306.config import Config
|
||||
from py12306.helpers.api import LEFT_TICKETS
|
||||
from py12306.helpers.station import Station
|
||||
from py12306.helpers.type import OrderSeatType, SeatType
|
||||
from py12306.log.query_log import QueryLog
|
||||
from py12306.helpers.func import *
|
||||
from py12306.log.user_log import UserLog
|
||||
@@ -97,10 +99,9 @@ class Job:
|
||||
通过日期进行查询
|
||||
:return:
|
||||
"""
|
||||
QueryLog.add_log(
|
||||
('\n' if not is_main_thread() else '') + QueryLog.MESSAGE_QUERY_START_BY_DATE.format(date,
|
||||
self.left_station,
|
||||
self.arrive_station))
|
||||
QueryLog.add_log(('\n' if not is_main_thread() else '') + QueryLog.MESSAGE_QUERY_START_BY_DATE.format(date,
|
||||
self.left_station,
|
||||
self.arrive_station))
|
||||
url = LEFT_TICKETS.get('url').format(left_date=date, left_station=self.left_station_code,
|
||||
arrive_station=self.arrive_station_code, type='leftTicket/queryZ')
|
||||
|
||||
@@ -128,7 +129,7 @@ class Job:
|
||||
if not self.is_has_ticket(ticket_info):
|
||||
continue
|
||||
allow_seats = self.allow_seats if self.allow_seats else list(
|
||||
config.SEAT_TYPES.values()) # 未设置 则所有可用 TODO 合法检测
|
||||
Config.SEAT_TYPES.values()) # 未设置 则所有可用 TODO 合法检测
|
||||
self.handle_seats(allow_seats, ticket_info)
|
||||
|
||||
def handle_seats(self, allow_seats, ticket_info):
|
||||
@@ -192,8 +193,8 @@ class Job:
|
||||
self.passengers = passengers
|
||||
|
||||
def set_seat(self, seat):
|
||||
self.current_seat = get_seat_number_by_name(seat)
|
||||
self.current_order_seat = config.ORDER_SEAT_TYPES[seat]
|
||||
self.current_seat = SeatType.dicts.get(seat)
|
||||
self.current_order_seat = OrderSeatType.dicts.get(seat)
|
||||
|
||||
def get_user(self):
|
||||
user = User.get_user(self.account_key)
|
||||
|
||||
@@ -1,27 +1,41 @@
|
||||
import threading
|
||||
|
||||
|
||||
from py12306.helpers.app import app_available_check
|
||||
from py12306.config import Config
|
||||
from py12306.cluster.cluster import Distributed
|
||||
from py12306.app import app_available_check
|
||||
from py12306.helpers.func import *
|
||||
from py12306.helpers.request import Request
|
||||
from py12306.log.query_log import QueryLog
|
||||
from py12306.query.job import Job
|
||||
|
||||
|
||||
@singleton
|
||||
class Query:
|
||||
"""
|
||||
余票查询
|
||||
|
||||
"""
|
||||
jobs = []
|
||||
query_jobs = []
|
||||
session = {}
|
||||
|
||||
# 查询间隔
|
||||
interval = {}
|
||||
cluster = None
|
||||
|
||||
def __init__(self):
|
||||
self.interval = init_interval_by_number(config.QUERY_INTERVAL)
|
||||
self.session = Request()
|
||||
self.cluster = Distributed()
|
||||
self.update_query_interval()
|
||||
self.update_query_jobs()
|
||||
|
||||
def update_query_interval(self, auto=False):
|
||||
self.interval = init_interval_by_number(Config().QUERY_INTERVAL)
|
||||
|
||||
def update_query_jobs(self, auto=False):
|
||||
self.query_jobs = Config().QUERY_JOBS
|
||||
if auto:
|
||||
self.jobs = []
|
||||
QueryLog.add_quick_log(QueryLog.MESSAGE_JOBS_DID_CHANGED).flush()
|
||||
self.init_jobs()
|
||||
|
||||
@classmethod
|
||||
def run(cls):
|
||||
@@ -33,20 +47,35 @@ class Query:
|
||||
def start(self):
|
||||
# return # DEBUG
|
||||
self.init_jobs()
|
||||
QueryLog.print_init_jobs(jobs=self.jobs)
|
||||
stay_second(1)
|
||||
|
||||
while True:
|
||||
app_available_check()
|
||||
if config.QUERY_JOB_THREAD_ENABLED: # 多线程
|
||||
if Config().QUERY_JOB_THREAD_ENABLED: # 多线程
|
||||
create_thread_and_run(jobs=self.jobs, callback_name='run')
|
||||
else:
|
||||
for job in self.jobs:
|
||||
job.run()
|
||||
for job in self.jobs: job.run()
|
||||
if Const.IS_TEST: return
|
||||
# self.refresh_jobs() # 刷新任务
|
||||
|
||||
def init_jobs(self):
|
||||
jobs = config.QUERY_JOBS
|
||||
for job in jobs:
|
||||
for job in self.query_jobs:
|
||||
job = Job(info=job, query=self)
|
||||
self.jobs.append(job)
|
||||
QueryLog.print_init_jobs(jobs=self.jobs)
|
||||
|
||||
# def get_jobs_from_cluster(self):
|
||||
# jobs = self.cluster.session.get_dict(Distributed.KEY_JOBS)
|
||||
# return jobs
|
||||
#
|
||||
# def update_jobs_of_cluster(self):
|
||||
# if config.CLUSTER_ENABLED and config.NODE_IS_MASTER:
|
||||
# return self.cluster.session.set_dict(Distributed.KEY_JOBS, self.query_jobs)
|
||||
#
|
||||
# def refresh_jobs(self):
|
||||
# if not config.CLUSTER_ENABLED: return
|
||||
# jobs = self.get_jobs_from_cluster()
|
||||
# if jobs != self.query_jobs:
|
||||
# self.jobs = []
|
||||
# self.query_jobs = jobs
|
||||
# QueryLog.add_quick_log(QueryLog.MESSAGE_JOBS_DID_CHANGED).flush()
|
||||
# self.init_jobs()
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
import json
|
||||
import pickle
|
||||
import re
|
||||
from os import path
|
||||
|
||||
from py12306.config import *
|
||||
from py12306.helpers.api import *
|
||||
from py12306.helpers.app import *
|
||||
from py12306.app import *
|
||||
from py12306.helpers.auth_code import AuthCode
|
||||
from py12306.helpers.func import *
|
||||
from py12306.helpers.request import Request
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from py12306.helpers.app import *
|
||||
from py12306.app import *
|
||||
from py12306.helpers.func import *
|
||||
from py12306.log.user_log import UserLog
|
||||
from py12306.user.job import UserJob
|
||||
|
||||
Reference in New Issue
Block a user