Merge branch 'config_dev'

This commit is contained in:
lzybetter
2023-09-09 13:39:39 +08:00
5 changed files with 219 additions and 86 deletions

5
.idea/copyright/lzybetter.xml generated Normal file
View File

@@ -0,0 +1,5 @@
<component name="CopyrightManager">
<copyright>
<option name="myName" value="lzybetter" />
</copyright>
</component>

63
util/config.py Normal file
View File

@@ -0,0 +1,63 @@
import yaml
import os
class Config:
__BASE_PATH = os.getcwd()
__CONFIG_PATH = os.path.join(__BASE_PATH, 'config')
__CONFIG_NAME = 'test.yaml'
__SCHEDULER_DB_FILE_NAME = 'schedule_db.db'
__LOG_FILE_NAME = 'myAssistant.log'
__CONFIG_DICT = {}
def __init__(self):
if self.__CONFIG_DICT == {}:
with open(os.path.join(self.__CONFIG_PATH, self.__CONFIG_NAME)) as f:
self.__CONFIG_DICT = yaml.safe_load(f)
def __new__(cls, *args, **kwargs):
if not hasattr(Config, "_instance"):
Config._instance = object.__new__(cls)
return Config._instance
@property
def CONFIG_PATH(self):
return self.__CONFIG_PATH
@property
def BASE_PATH(self):
return self.__BASE_PATH
def get_scheduler_db_file_path(self):
try:
return os.path.join(self.__CONFIG_PATH, self.__CONFIG_DICT['SCHEDULER_DB_PATH'],
self.__SCHEDULER_DB_FILE_NAME)
except:
return os.path.join(self.__CONFIG_PATH, 'schedule_db', self.__SCHEDULER_DB_FILE_NAME)
def set_scheduler_db_path(self, new_path):
self.__CONFIG_DICT['SCHEDULER_DB_PATH'] = new_path
with open(os.path.join(self.__CONFIG_PATH, self.__CONFIG_NAME), 'w') as f:
f.write(yaml.safe_dump(self.__CONFIG_DICT, sort_keys=False))
def get_log_file_path(self):
try:
return os.path.join(self.__CONFIG_PATH, self.__CONFIG_DICT['LOG_PATH'], self.__LOG_FILE_NAME)
except:
return os.path.join(self.__CONFIG_PATH, 'log', self.__LOG_FILE_NAME)
def set_log_path(self, new_path):
self.__CONFIG_DICT['LOG_PATH'] = new_path
with open(os.path.join(self.__CONFIG_PATH, self.__CONFIG_NAME), 'w') as f:
f.write(yaml.safe_dump(self.__CONFIG_DICT, sort_keys=False))
def get_proxy(self):
try:
return self.__CONFIG_DICT['PROXY']
except:
return None
def set_proxy(self, new_proxy):
self.__CONFIG_DICT['PROXY'] = new_proxy
with open(os.path.join(self.__CONFIG_PATH, self.__CONFIG_NAME), 'w') as f:
f.write(yaml.safe_dump(self.__CONFIG_DICT, sort_keys=False))

View File

@@ -0,0 +1,46 @@
import requests
import os
from util import config
import json
class RequestWithProxy:
__proxies = {}
__no_proxy = []
def __init__(self):
c = config.Config()
proxies_tmp = c.get_proxy()
if proxies_tmp:
for k in sorted(proxies_tmp):
if k in ('http', 'https'):
if 'http://' in proxies_tmp[k] or "https://" in proxies_tmp[k]:
self.__proxies[k] = proxies_tmp[k]
else:
self.__proxies[k] = "http://" + proxies_tmp[k]
elif k == 'proxy_web':
for w in proxies_tmp[k]:
if ('http' in self.__proxies and self.__proxies['http']) or ('https' in self.__proxies and self.__proxies['https']):
if 'http://' in w:
self.__proxies[w] = self.__proxies['http']
elif 'https://' in w:
self.__proxies[w] = self.__proxies['https']
else:
self.__proxies["https://" + w] = self.__proxies['https']
elif k == 'no_proxy_web':
for w in proxies_tmp[k]:
self.__no_proxy.append(w.replace("https://", "").replace("http://", ""))
if self.__no_proxy:
os.environ['NO_PROXY'] = ','.join(self.__no_proxy)
def get(self, url, headers=None):
try:
if url.replace("https://", "").replace("http://", "") in self.__no_proxy:
r = requests.get(url=url, headers=headers)
else:
r = requests.get(url=url, headers=headers, proxies=self.__proxies)
return r
except:
return None
def post(self, url, data=None, json_d=None, headers=None):
pass

View File

@@ -1,86 +0,0 @@
import sqlite3
from apscheduler.util import undefined
import logging
import uuid
logger = logging.getLogger("scheduler_logger")
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler('mylog.log')
fh.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
logger.addHandler(fh)
def add_interval_schedule(schedudler, func, **kwargs):
params = {
'args': None,
'kwargs': None,
'name': None,
'misfire_grace_time': undefined,
'coalesce': undefined,
'max_instances': undefined,
'next_run_time': undefined,
'jobstore': 'default',
'executor': 'default',
'trigger': 'interval',
'replace_existing':False}
for k, v in kwargs.items():
if k not in ('trigger','args', 'kwargs', 'id', 'name', 'misfire_grace_time', 'coalesce'
'max_instances', 'next_run_time', 'jobstore', 'executor', 'replace_existing',
'trigger', 'days', 'weeks', 'hours', 'minutes', 'seconds', 'start_date', 'end_date'):
logger.error('%s is not supported'%k)
else:
params[k] = v
if params['name']:
logger.info('定时器\'%s\'的参数为%s' % (params['name'], params))
else:
logger.info('定时器\'%s\'的参数为%s'%(func.__name__, params))
id = str(uuid.uuid1())
try:
schedudler.add_job(id=id, func=func, **params)
return "定时器添加成功"
except Exception as e:
logger.error("定时器添加失败,错误信息:%s"%e)
return "定时器添加失败详细信息请见log文件"
def del_scheduler(scheduler, id):
if query_scheduler(id):
try:
scheduler.remove_job(id)
return "已删除:%s"%id
except Exception as e:
logger.error("定时器删除失败,错误信息:%s" % e)
return "定时器删除失败详细信息请见log文件"
else:
return "定时器删除失败无该id的job: %s"%id
logger.info("无该id的job: %s"%id)
def query_scheduler(id=None):
db_path = 'config/scheduler/jobstores.db'
conn = sqlite3.connect(db_path)
cur = conn.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='apscheduler_jobs';")
is_table_exists = cur.fetchall()
result = []
if is_table_exists and id:
cur.execute("select * from apscheduler_jobs where id='%s'"%id)
result = cur.fetchall()
else:
cur.execute("select * from apscheduler_jobs")
result = cur.fetchall()
if result:
return True
else:
return False
def get_scheduler_list(scheduler):
job_infos = []
for j in scheduler.get_jobs():
job_infos.append({'id': j.id, 'name': j.name})
return job_infos

105
util/scheduler.py Normal file
View File

@@ -0,0 +1,105 @@
import sqlite3
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from apscheduler.util import undefined
import logging
import uuid
from flask_apscheduler import APScheduler
class Scheduler_DB_Config(object):
# SCHEDULER_API_ENABLED = True
SCHEDULER_JOBSTORES = {
'default': SQLAlchemyJobStore('sqlite:///config/schedule_db/schedule_db.db')
}
class Scheduler():
__logger = None
__scheduler = None
def __init__(self, app, config):
self.__logger = logging.getLogger("scheduler_logger")
self.__logger.setLevel(logging.INFO)
fh = logging.FileHandler(config.get_log_file_path())
fh.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
self.__logger.addHandler(fh)
self.__scheduler = APScheduler()
scheduler_db_path = config.get_scheduler_db_file_path()
scheduler_db_config = Scheduler_DB_Config()
scheduler_db_config.SCHEDULER_JOBSTORES['default'] = SQLAlchemyJobStore('sqlite:///%s'%scheduler_db_path)
app.config.from_object(scheduler_db_config)
self.__scheduler.init_app(app)
self.__scheduler.start()
def add_interval_schedule(self, func, **kwargs):
params = {
'args': None,
'kwargs': None,
'name': None,
'misfire_grace_time': undefined,
'coalesce': undefined,
'max_instances': undefined,
'next_run_time': undefined,
'jobstore': 'default',
'executor': 'default',
'trigger': 'interval',
'replace_existing':False}
for k, v in kwargs.items():
if k not in ('trigger','args', 'kwargs', 'id', 'name', 'misfire_grace_time', 'coalesce'
'max_instances', 'next_run_time', 'jobstore', 'executor', 'replace_existing',
'trigger', 'days', 'weeks', 'hours', 'minutes', 'seconds', 'start_date', 'end_date'):
self.__logger.error('%s is not supported'%k)
else:
params[k] = v
if params['name']:
self.__logger.info('定时器\'%s\'的参数为%s' % (params['name'], params))
else:
self.__logger.info('定时器\'%s\'的参数为%s'%(func.__name__, params))
id = str(uuid.uuid1())
try:
self.__scheduler.add_job(id=id, func=func, **params)
return "定时器添加成功"
except Exception as e:
self.__logger.error("定时器添加失败,错误信息:%s"%e)
return "定时器添加失败详细信息请见log文件"
def del_scheduler(self, id):
if self.query_scheduler(id):
try:
self.__scheduler.remove_job(id)
return "已删除:%s"%id
except Exception as e:
self.__logger.error("定时器删除失败,错误信息:%s" % e)
return "定时器删除失败详细信息请见log文件"
else:
return "定时器删除失败无该id的job: %s"%id
logger.info("无该id的job: %s"%id)
def query_scheduler(self, id=None):
db_path = 'config/schedule_db/jobstores.db'
conn = sqlite3.connect(db_path)
cur = conn.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='apscheduler_jobs';")
is_table_exists = cur.fetchall()
result = []
if is_table_exists and id:
cur.execute("select * from apscheduler_jobs where id='%s'"%id)
result = cur.fetchall()
else:
cur.execute("select * from apscheduler_jobs")
result = cur.fetchall()
if result:
return True
else:
return False
def get_scheduler_list(self):
job_infos = []
for j in self.__scheduler.get_jobs():
job_infos.append({'id': j.id, 'name': j.name})
return job_infos