import os
import logging
from logging.handlers import RotatingFileHandler
from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
from datetime import timedelta

DEBUG = True

# SECRET_KEY = os.urandom(24)
SECRET_KEY = '@d1Gl%L62ICX7!7OkEKfj9qtJ*NH9OhF'

HOSTNAME = '192.168.0.11'
# HOSTNAME = '192.168.0.11'
PORT = '3307'
DATABASE = 'zmly_mall'
# DATABASE = 'test'
USERNAME = 'root'
PASSWORD = '123456'


# HOSTNAME = 'rm-2zeva96wwq4p8dg57.mysql.rds.aliyuncs.com'
# PORT = '3306'
# DATABASE = 'zmly_mall'
# USERNAME = 'mall'
# PASSWORD = 'i;wuivhiyf40tXeeEcr]mp^kjoeute8w'

SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://{}:{}@{}:{}/{}?charset=utf8'.format(USERNAME, PASSWORD, HOSTNAME, PORT,
                                                                               DATABASE)

# SQLALCHEMY_POOL_SIZE = 20   # 数据库连接池的大小。默认是引擎默认值（通常 是 5 ）
# SQLALCHEMY_MAX_OVERFLOW = 5     # 上溢多少
SQLALCHEMY_POOL_RECYCLE = 1200  # 多少秒后自动回收连接
# SQLALCHEMY_POOL_TIMEOUT = 20


SQLALCHEMY_TRACK_MODIFICATIONS = False
SCHEDULER_API_ENABLED = True    # 时间调度器
# SCHEDULER_TIMEZONE = 'Asia/Hong_Kong'

JSON_SORT_KEYS = False

# excel过期时间
SEND_FILE_MAX_AGE_DEFAULT = timedelta(seconds=1)

# 时间调度器存储
SCHEDULER_JOBSTORES = {
        'default': SQLAlchemyJobStore(url=SQLALCHEMY_DATABASE_URI)
    }


CMS_USER_ID = 'jEWk*9L2LpIwWg9I@gUrjX$d1Ge9ms!p'  # user_id的常量
# FRONT_USER_ID = '5sN1sS2ie89@9W29wjFxjuIQL3iQT&Lw'  # user_id常量


# 设置日志的记录等级
logging.basicConfig(level=logging.DEBUG, filemode='a')  # 调试debug级别
# 创建日志记录器，指明日志保存路径，每个日志文件的最大大小，保存日志文件个数上限
file_log_handler = RotatingFileHandler("logs/log", maxBytes=1024 * 1024 * 100, backupCount=20)
file_log_handler.setLevel(logging.DEBUG)
# 创建日志的记录的格式
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)s - %(message)s')
# 为刚创建的日志记录器设置日志记录格式
file_log_handler.setFormatter(formatter)
# 为全局的日志工具对象(flask app使用的)，添加记录器
logging.getLogger().addHandler(file_log_handler)
