# -*- coding: utf-8 -*-
import json
from collections import Counter
from datetime import datetime
from time import time

import redis

from plugin.configs import configs
from plugin.db.mongo_base import MongoBase
from plugin.utils.common_utils import load_resault, dumps_resault
from pyspider.libs.pprint import pprint

redis_db = redis.StrictRedis(**configs['redis'])
consumer_queue = configs['redis_table']
failed_queue = consumer_queue + '_fail'

#没有从原始库里查找到file_url 返回true
def bad_filter(task):
    item = task['item']
    file_url = item['file_url']
    callback_mongo = task['callback_mongo']
    mongo_url = callback_mongo['mongo_url']
    mongo_col = callback_mongo['mongo_col']
    mongo_db = callback_mongo['mongo_db']
    # return not MongoClient(mongo_url, maxIdleTimeMS=10 * 1000)[mongo_db][mongo_col].find_one({'file_url': file_url})
    return not MongoBase(mongo_url, database=mongo_db, collection=mongo_col,
                         appname='pyspider_download_check_{}_{}'.format(mongo_db, mongo_col)).col.find_one({'file_url': file_url})


def repush_data(input_data, queue):
    if input_data is None:
        return
    # 清空mongo 变更的内容
    if input_data.get('_id'):
        input_data.pop('_id')
    if input_data.get('create_time'):
        input_data.pop('create_time')
    if input_data.get('update_time'):
        input_data.pop('update_time')
    redis_db.rpush(queue, dumps_resault(input_data))


def reppush_fail_data_list(fail_data_list):
    # num = 0
    # for data in fail_data_list:
    #     if data:
    #         data['retry'] = 0
    #         repush_data(data, configs['redis_table'])
    #         num += 1
    print('fail_data_list', redis_db.llen(fail_data_list))
    # print('repush ', num)


def show_data_llist_info(data_list):
    counter_data = {}
    strftime_format = '%Y-%m-%d %H'
    check_list = [
        'fetch_status_code',
        'extension',
        'save_type',
        'reason',
        'source_identity',
        'callback_mongo.mongo_col',
        'item.publish',
        'item.column',
        'item.source_type',
        # 'item.file_url',
        'crawl_time',
    ]
    for task in data_list:
        for check_keys in check_list:
            counter_data.setdefault(check_keys, [])
            check_res = task

            for check_key in check_keys.split('.'):
                check_res = check_res.get(check_key)
                if 'time' in check_key and check_res:
                    if isinstance(check_res, datetime):
                        check_res = check_res.strftime(strftime_format)
                    elif isinstance(check_res, int):
                        check_res = datetime.fromtimestamp(check_res).strftime(strftime_format)

                if not check_res:
                    break
            else:
                counter_data[check_keys].append(check_res)

    for check_keys in check_list:
        if counter_data.get(check_keys):
            print(check_keys)
            pprint(dict(Counter(counter_data[check_keys])))
            print()


def repair_fail_task(check_fail=True, distinct_fail_task=False, repair_num=20000):
    # distinct_fail_task = True, repair_num = repair_num, check_fail = False
    # data_list = redis_db.lrange(failed_queue, 179159 - redis_db.llen(failed_queue), -1)
    print('{} start----------------'.format(datetime.now()))
    _start_time = time()
    data_list = []
    check_queue = failed_queue if check_fail else consumer_queue
    if not check_fail:
        repair_num = redis_db.llen(check_queue)
    if distinct_fail_task:
        for _ in range(repair_num):
            pop_task = redis_db.rpop(check_queue)
            if pop_task is None:
                break
            data_list.append(pop_task)
    else:
        data_list = redis_db.lrange(check_queue, -repair_num, -1)

    get_data_time = time() - _start_time
    task_list = []
    bad_data_num = 0

    for data in data_list:
        if data is None:
            continue
        try:
            data = load_resault(data)
        except json.decoder.JSONDecodeError:
            bad_data_num += 1
            continue
        task_list.append(data)
    real_list = []
    real_set = set()
    for task in sorted(task_list, key=lambda t: -t['crawl_time']):
        item = task['item']
        file_url = item['file_url']

        if file_url not in real_set:
            real_list.append(task)
            real_set.add(file_url)
    if not check_fail:
        real_list = list(filter(bad_filter, real_list))
        show_data_llist_info(real_list)
        print('data_list', len(data_list))
        print('task_list', len(task_list))
        print('real_list', len(real_list))
        print('bad_data_num', bad_data_num)
        if distinct_fail_task:
            _start_push_time = time()
            for task in sorted(real_list, key=lambda t: t['crawl_time']):
                redis_db.rpush(check_queue, dumps_resault(task))
            push_data_time = time() - _start_push_time
            print('redis push num:{} take:{} rate:{}'.format(len(real_list), push_data_time,
                                                             len(real_list) / push_data_time))
        print('redis get num:{} take:{} rate:{}'.format(len(data_list), get_data_time,
                                                         len(data_list) / get_data_time))
        print('{} finished! take:{}----------------'.format(datetime.now(), time() - _start_time))
        return
    fail_task_list = list(filter(bad_filter, real_list))
    # fail_task_list = real_list
    # fail_task_list = list(filter(lambda t: not t['item']['file_url'].startswith('http://www.neeq.com.cnhttp://www.neeq.com.cn/uploads/1/file/public/'), fail_task_list))
    # show_data_llist_info(fail_task_list)
    show_data_llist_info(list(filter(lambda t: t['save_type'] == 'file', fail_task_list)))
    # show_data_llist_info(list(filter(lambda t: t['save_type'] == 'file' and t.get('fetch_status_code') in [200], fail_task_list)))
    # show_data_llist_info(list(filter(lambda t: t['extension'] in ['.xlsx', '.xls', '.doc', '.docx'], fail_task_list)))
    # show_data_llist_info(list(filter(lambda t: t['source_identity'] == 'notice_bond_ccxi', fail_task_list)))
    # show_data_llist_info(list(filter(lambda t: t.get('reason', '') == 'upload file error#requests download fail', fail_task_list)))
    # show_data_llist_info(list(filter(lambda t: t.get('reason', '') == 'upload file error#check file intergtiry fail', fail_task_list)))
    # show_data_llist_info(list(filter(lambda t: t.get('fetch_status_code', 0) != 200, fail_task_list)))

    print('data_list', len(data_list))
    print('task_list', len(task_list))
    print('real_list', len(real_list))
    print('fail_task_list', len(fail_task_list))
    if distinct_fail_task:
        _start_push_time = time()
        for task in sorted(fail_task_list, key=lambda t: t['crawl_time']):
            redis_db.rpush(check_queue, dumps_resault(task))
        push_data_time = time() - _start_push_time
        print('redis push num:{} take:{} rate:{}'.format(len(fail_task_list), push_data_time,
                                                         len(fail_task_list) / push_data_time))

    need_push_list = []
    # need_push_list =list(filter(lambda t: t['save_type'] == 'file' and t.get('fetch_status_code') in [599], fail_task_list))
    if need_push_list:
        reppush_fail_data_list(need_push_list)
    print('redis get num:{} take:{} rate:{}'.format(len(data_list), get_data_time,
                                                    len(data_list) / get_data_time))
    print('{} finished! take:{}----------------'.format(datetime.now(), time()-_start_time))


def from_mongo_export_fail_to_redis():
    mongo_url = configs['mongo_or']['mongo_uri']
    mongo_col = configs['mongo_or']['mongo_db']
    mongo_db = 'task'
    # save_or_task = MongoClient(configs['mongo_or']['mongo_uri'], maxIdleTimeMS=10 * 1000)[configs['mongo_or']['mongo_db']]['task']
    save_or_task = MongoBase(mongo_url, database=mongo_db, collection=mongo_col,
                             appname='pyspider_download_check_{}_{}'.format(mongo_db, mongo_col)).col
    gt_timestamp = 1550221210
    export_cond = {
        "is_done": 0,
        # "retry": {'$ne': 11},
        "retry": 11,
        'crawl_time': {'$gt': gt_timestamp},
        # 'source_identity': {'$nin': ['notice_ut_ratings', 'notice_pyrating']}
    }
    total_find = save_or_task.count(export_cond)
    save_num = 0
    for fail_task in save_or_task.find(export_cond):
        if bad_filter(fail_task):
            # repush_data(fail_task, consumer_queue)
            repush_data(fail_task, failed_queue)
        else:
            save_num += 1
            save_or_task.update({'_id': fail_task['_id']}, {'$set': {'is_done': 1}})
    print('total_find: {} had_save: {}'.format(total_find, save_num))


def pop_error_task_keyword(**kwargs):
    pop_info = {}
    all_raw_data = []
    while True:
        raw_data = redis_db.lpop(consumer_queue)
        if raw_data is None:
            break
        all_raw_data.append(raw_data)
    total_num = len(all_raw_data)
    for index, raw_data in enumerate(all_raw_data, 1):
        print(total_num, index)
        try:
            data = load_resault(raw_data)
        except Exception as ex1:
            # 根本错误，无法回复
            redis_db.rpush(failed_queue, raw_data)
            continue
        for key, value in kwargs.items():
            uniq_id = '{}={}'.format(key, value)
            pop_info.setdefault(uniq_id, 0)
            if data.get(key) == value:
                pop_info[uniq_id] += 1
                print('{}, {}'.format(uniq_id, data))
            else:
                redis_db.lpush(consumer_queue, raw_data)

