# 配置管理文件

import os
import json
import logging
import logging.handlers as handlers

from datetime import timedelta
from celery import platforms
from kombu import Exchange, Queue

from .constants import (CELERY_EXCHANGE_TASKTEST, CELERY_ROUTING_KEY_TASKTEST)

basedir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
base_log_dir = os.environ.get("BASE_LOG_DIR", "/app/log")
project_log_dir = os.path.join(base_log_dir, "flaskz")

if not os.path.exists(project_log_dir):
    os.makedirs(project_log_dir)

class Config(object):
    SECRET_KEY = 'hard string to guess'
    SQLALCHEMY_COMMIT_ON_TEARDOWN = True
    DEBUG = True
    CSRF_ENABLED = True

    MD5_SALT = 'md5 salt'

    CONFIG_PATH = os.path.join(basedir, 'config.json')

    REDIS_SENTINEL_ENABLE = True
    REDIS_SENTINEL_NODES = [("172.16.1.150", 26378)]
    REDIS_SENTINEL_MASTER_NAME = 'mymaster'
    REDIS_SENTINEL_SOCKET_TIMEOUT = 1
    REDIS_SENTINEL_PASSWD = 'pyredis'
    REDIS_SENTINEL_CACHE_DB = 5
    REDIS_HOST = 'localhost'
    REDIS_PORT = 6379
    CELERY_BROKER_URL = ""
    CELERY_RESULT_BACKEND = ""
    CELERY_TIMEZONE = 'Asia/Shanghai'
    CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = ""

    ADMIN_EMAILS = []

    FLASK_LOG_LEVEL = 'DEBUG'
    SQLALCHEMY_TRACK_MODIFICATIONS = True

    @classmethod
    def init_app(cls, app, cfg_path=None):
        pass

class DevelopmentConfig(Config):
    DEBUG = True
    HOST = 'http://127.0.0.1:5000'

    CELERY_TIMEZONE = 'Asia/Shanghai'
    CELERY_DEFAULT_QUEUE = 'default'
    CELERY_DEFAULT_EXCHANGE_TYPE = 'direct'
    CELERY_DEFAULT_ROUTING_KEY = 'default'
    CELERY_TIMEZONE = 'Asia/Shanghai'

    default_exchange = Exchange('default', type='direct')
    tasktest_exchange = Exchange(CELERY_EXCHANGE_TASKTEST, type='direct')

    CELERY_QUEUES = (
        Queue('default', default_exchange, routing_key='default'),
        Queue(CELERY_EXCHANGE_TASKTEST, tasktest_exchange, routing_key=CELERY_ROUTING_KEY_TASKTEST),
    )

    @classmethod
    def init_app(cls, app, cfg_path=Config.CONFIG_PATH):
        Config.init_app(app, cfg_path=cfg_path)
        cls.CONFIG_PATH = cfg_path
        with open(cls.CONFIG_PATH) as f:
            config_dict = json.load(f)['development']
            for k, v in config_dict.items():
                setattr(cls, k, v)
        cls.SQLALCHEMY_DATABASE_URL = config_dict['DATABASE_URL']

        log_file = os.path.join(project_log_dir, "flaskz_error.log")
        log_file = os.path.join(project_log_dir, "centralstock_error.log")
        file_handler = handlers.TimedRotatingFileHandler(log_file, when="D", interval=1, backupCount=30,
                                                         encoding='utf-8')
        file_handler.setLevel(logging.DEBUG)
        file_handler.setFormatter(logging.Formatter('\n%s\n%s' % (
            '-' * 30, '%(asctime)s %(levelname)s: %(message)s')))
        app.logger.addHandler(file_handler)


class CeleryConfig(Config):

    @classmethod
    def init_app(cls, app, cfg_path=Config.CONFIG_PATH):
        cls.CONFIG_PATH = cfg_path
        Config.init_app(app, cfg_path=cfg_path)
        with open(cls.CONFIG_PATH) as f:
            config_dict = json.load(f)['celery']
            for k, v in config_dict.items():
                print(k, v)
                setattr(cls, k, v)
        cls.SECRET_KEY = config_dict['SECRET_KEY']
        cls.MD5_SALT = config_dict['MD5_SALT']
        cls.SQLALCHEMY_DATABASE_URI = config_dict['DATABASE_URL']
        # # cls.CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = config_dict['BROKER_TRANSPORT_OPTIONS']

        log_file = os.path.join(project_log_dir, "centralstock_error.log")
        file_handler = handlers.TimedRotatingFileHandler(log_file, when="D", interval=1, backupCount=30, encoding='utf-8')
        file_handler.setLevel(logging.INFO)
        file_handler.setFormatter(logging.Formatter('\n%s\n%s' % (
            '-' * 30, '%(asctime)s %(levelname)s: %(message)s')))
        app.logger.addHandler(file_handler)

    DEBUG = False
    CELERY_TIMEZONE = 'Asia/Shanghai'
    CELERY_DEFAULT_QUEUE = 'default'  # 默认的队列，如果一个消息不符合其他的队列就会放在默认队列里面
    CELERY_DEFAULT_EXCHANGE_TYPE = 'direct'
    CELERY_DEFAULT_ROUTING_KEY = 'default'
    CELERY_RESULT_BACKEND_TRANSPORT_OPTIONS = None

    REDIS_SENTINEL_ENABLE = True
    # REDIS_SENTINEL_NODES = [("10.10.48.136", 26379), ("10.10.48.137", 26379),("10.10.48.138", 26379)]
    # REDIS_SENTINEL_MASTER_NAME = 'yfbmaster'
    REDIS_SENTINEL_SOCKET_TIMEOUT = 1
    # REDIS_SENTINEL_PASSWD = '7lkddfdfdff22'
    REDIS_SENTINEL_CACHE_DB = 5
    # REDIS_HOST = '10.10.48.136'
    # REDIS_PORT = 26379
    REDIS_DB = 7
    # CELERY_RESULT_BACKEND = "amqp"  # 官网优化的地方也推荐使用c的librabbitmq
    #  CELERYD_CONCURRENCY = 50  celery worker的并发数 也是命令行-c指定的数目,事实上实践发现并不是worker也多越好,保证任务不堆积,加上一定新增任务的预留就可以
    CELERY_TASK_RESULT_EXPIRES = 300  # celery任务执行结果的超时时间，我的任务都不需要返回结果,只需要正确执行就行
    CELERYD_PREFETCH_MULTIPLIER = 200  # celery worker 每次去rabbitmq取任务的数量，我这里预取了4个慢慢执行,因为任务有长有短没有预取太多
    # CELERYD_MAX_TASKS_PER_CHILD = 1000  # 每个worker执行了多少任务就会死掉，我建议数量可以大一些，比如200
    CELERYD_FORCE_EXECV = True    # 非常重要,有些情况下可以防止死锁
    # 任务发出后，经过一段时间还未收到acknowledge , 就将任务重新交给其他worker执行
    CELERY_DISABLE_RATE_LIMITS = True

    default_exchange = Exchange('default', type='direct')
    tasktest_exchange = Exchange(CELERY_EXCHANGE_TASKTEST, type='direct')

    CELERY_QUEUES = (
        Queue('default', default_exchange, routing_key='default'),
        Queue(CELERY_EXCHANGE_TASKTEST, tasktest_exchange, routing_key=CELERY_ROUTING_KEY_TASKTEST),
    )
    CELERYBEAT_SCHEDULE = {
        'sync-sale-stock': {
            'task': 'stock.tasks.sale_stock_sync.sync_changed_sale_stock',
            'schedule': timedelta(seconds=15)
        },
    }
    # cls.SQLALCHEMY_DATABASE_URI = config_dict['DATABASE_URL']
    # cls.MAIL_SERVER = config_dict['MAIL_SERVER']
    # cls.MAIL_PORT = config_dict['MAIL_PORT']
    # cls.MAIL_USE_SSL = config_dict['MAIL_USE_SSL']
    # cls.MAIL_USERNAME = config_dict['MAIL_USERNAME']
    # cls.MAIL_PASSWORD = config_dict['MAIL_PASSWORD']


config = {
    'development' : DevelopmentConfig,
    'default': DevelopmentConfig,
    'celery': CeleryConfig,
}