# -*- coding: utf-8 -*-
# @Time    : 2020/6/4 10:39
# @Author  : ZSQ
# @Email   : zsq199170918@163.com
# @FileName: case_id_dups.py
# @Software: PyCharm
import logging
from redis import StrictRedis
from pymongo import MongoClient
from FDC_spider.settings import MONGODB
from FDC_spider.utils.redis_pool import POOL

try:
    from FDC_spider.settings import SFPM_PROVINCE
except:
    SFPM_PROVINCE = None
try:
    from FDC_spider.settings import SFPM_CITY
except:
    SFPM_CITY = dict()

logger = logging.getLogger(__name__)


def get_province_and_city():
    try:
        province_li = SFPM_PROVINCE if SFPM_PROVINCE != None else ['全国']
        assert isinstance(province_li, list)
        city_dict = SFPM_CITY
        assert isinstance(city_dict, dict)
        for province, city_li in city_dict.items():
            assert province in province_li and city_li and isinstance(city_li, list)
    except:
        logger.error('SFPM_PROVINCE、SFPM_CITY 参数配置出错，请重新配置')
        return list(), dict()
    else:
        return province_li, city_dict


def get_dup_case_id_li():
    host = MONGODB.get('host', '127.0.0.1')
    port = int(MONGODB.get('port', 27017))
    mongo_cli = MongoClient(host=host, port=port)
    mongo_col = mongo_cli['sfpm']['case']
    province_li, city_dict = get_province_and_city()
    try:
        if province_li:
            if '全国' not in province_li:
                match_li = list()
                if city_dict:
                    for province, city_li in city_dict.items():
                        province_li.remove(province)
                        match_li.append({'province': province, 'city': {'$in': city_li}})
                    if province_li:
                        for province in province_li:
                            match_li.append({'province': province})
                    query_pipeline = [
                        {'$match': {'$or': match_li}},
                        {'$group': {
                            '_id': {'caseId': '$caseId', 'province': '$province', 'city': 'city'},
                        }},
                        {'$group': {'_id': None, 'dups': {'$addToSet': '$_id.caseId'}}},
                    ]
                    case_dups_li = list(mongo_col.aggregate(query_pipeline, allowDiskUse=True))
                    case_id_li = case_dups_li[0]['dups']
                else:
                    for province in province_li:
                        match_li.append({'province': province})
                    query_pipeline = [
                        {'$match': {'$or': match_li}},
                        {'$group': {
                            '_id': {'caseId': '$caseId', 'province': '$province', 'city': 'city'},
                        }},
                        {'$group': {'_id': None, 'dups': {'$addToSet': '$_id.caseId'}}},
                    ]
                    case_dups_li = list(mongo_col.aggregate(query_pipeline, allowDiskUse=True))
                    case_id_li = case_dups_li[0]['dups']
            else:
                query_pipeline = [
                    {'$group': {'_id': {'caseId': '$caseId'}, }},
                    {'$group': {'_id': None, 'dups': {'$addToSet': '$_id.caseId'}}},
                ]
                case_dups_li = list(mongo_col.aggregate(query_pipeline, allowDiskUse=True))
                case_id_li = case_dups_li[0]['dups']
            """写入文件"""
            # file = './sfpm_case_id.py'
            # with open(file, 'w', encoding='utf-8') as f:
            #     f.write('case_id_li={}'.format(case_id_li))
            """存入redis"""
            redis_conn = StrictRedis(connection_pool=POOL)
            redis_conn.delete('sfpm_case_id')
            for case_id in case_id_li:
                redis_conn.sadd('sfpm_case_id', case_id)
            redis_conn.close()
            ret = True
        else:
            ret = False
    except:
        ret = False
    finally:
        mongo_cli.close()
    return ret


if __name__ == '__main__':
    # get_dup_case_id_li()
    pass
