# -*- coding:utf-8 -*-
from __future__ import absolute_import, unicode_literals

import arrow
import requests
from celery import Celery, platforms
from raven import Client
from raven.contrib.celery import register_logger_signal, register_signal
import time
from . import conf
from .connection import InitConnectionTask, pg_insert, pg_select, pg_update
from .sync_flow import  redis_flow_pay_check_up,redis_flow_refund_check_up

'''
sync_flow 脚本是为了同步redis中的数据到pg数据库中进行存储。但是出现了问题redis中堆积了很多数据，再次执行程序的时候，会对每一个key进行处理插入到pg数据库中。
但是涉及的key太多，在执行异步任务的时候会把任务放到mq中，所以会导致mq队列中会产生上百万的待处理信息。所以该脚本作把 sync_flow 中的生产者任务独立执行，当队列中堆积太多消息的时候，可以暂时停掉该任务。
'''

app = Celery('sync_flow_add_task', broker=conf.MQ_BROKER)

class SyncFlowTask(InitConnectionTask):
    abstract = True
    threshold_timestamp = None
    map_key = {
        'appCode': 'app_code',
        'chargeId': 'charge_id',
        'createdAt': 'created_at',
        'id': 'order_id',
        'orderType': 'order_type',
        'platformCode': 'platform_code',
        'refundSource': 'refund_source',
        'refundedAt': 'refunded_at',
        'serialNum': 'flow_id',
        'userId': 'user_id',
        'userPhone': 'user_phone',
        'additionalPrice': 'additional_price',
        'orderPrice': 'order_price',
        'paidAt': 'paid_at',
        'payPrice': 'pay_price'
    }

    @property
    def prdb_0(self):
        return super().prdb(0)

    @property
    def prdb_1(self):
        return super().prdb(1)

    @property
    def prdb_2(self):
        return super().prdb(2)

    @property
    def pgdb(self):
        return super().pgdb('account')





@app.task(base=SyncFlowTask, bind=True, ignore_result=True, max_retries=3)
def add_task(self,app_code=None, flow_key=None):

    if app_code is None:
            for app_code in self.mdb.order_lite_list.distinct('appCode'):
                if not app_code:
                    continue
                add_task.apply_async(args=[app_code], queue=conf.MQ_SYNC_FLOW_ADD_TASK)
                add_task_refund.apply_async(args=[app_code], queue=conf.MQ_SYNC_FLOW_ADD_TASK)
            return 'appCode distribution completed'
    count = 0
    if flow_key is None:
        for flow_key in self.prdb_1.scan_iter("p:{}:*".format(app_code)):
            '''没执行发送10000个任务，停止60秒'''
            if count > 10000 :
                time.sleep(60)
                count = 0
            redis_flow_pay_check_up.apply_async(args=[app_code, flow_key], queue=conf.MQ_SYNC_FLOW)
            count += 1
        return app_code, 'flowKey distribution completed'

@app.task(base=SyncFlowTask, bind=True, ignore_result=True, max_retries=3)
def add_task_refund(self,app_code=None, flow_key=None):
    count = 0
    if flow_key is None:
        for flow_key in self.prdb_2.scan_iter("r:{}:*".format(app_code)):
            if count > 10000 :
                time.sleep(60)
                count = 0
            redis_flow_refund_check_up.apply_async(args=[app_code, flow_key], queue=conf.MQ_SYNC_FLOW)
            count += 1
        return app_code, 'flowKey distribution completed'