# -*- coding:utf-8 -*-
from datetime import datetime, timedelta
from pymongo import MongoClient
from django.core.cache import cache
from alldata.tools.w_usersum import obtain_user_shop
from tools.w_decorator import performance

import threading
import pymysql
import configparser
import re

class chart_userdata(object):
    def __init__(self):
        self.config_file = r'conf\config.ini'
        self.config = configparser.ConfigParser()
        self.config.read(self.config_file, encoding="utf-8")
        self.location_step = [(8, 2), (8, 2), (5, 2)]
        self.date_step = [-7, -31, -366]
        self.date_type = ["week_data", "month_data", "year_data"]
        self.date_tag = ["d", "d", "Y%m"]
        self.regex = re.compile(".*臻e盾.*", re.IGNORECASE)
        self.tod_obtain_data = obtain_user_shop()

    def mongodb_alarm_con(self):
        try:
            # 认证（密码无特殊符号）
            mac_host = self.config.get('alarm_mongodb', 'host')
            mac_port = self.config.get('alarm_mongodb', 'port')
            mac_dbname = self.config.get('alarm_mongodb', 'dbname')
            mac_user = self.config.get('alarm_mongodb', 'username')
            mac_pwd = self.config.get('alarm_mongodb', 'passwd')
            # mongo_client = MongoClient('mongodb://'+user+':'+pwd+'@'+host+':'+port+'/'+dbname+'?')
            # 认证（密码含特殊符号）
            mac_mongo_client = MongoClient(mac_host)
            mac_mongo_client[mac_dbname].authenticate(mac_user, mac_pwd, mac_dbname)
            # 连接到库：kafkadb
            mac_newdb = mac_mongo_client.security
            return mac_newdb
        except:
            print('alarmserver mongodb数据库连接不上')

    def mysql_con(self):
        try:
            host = self.config.get('mysql', 'host')
            port = self.config.get('mysql', 'port')
            dbname = self.config.get('mysql', 'dbname')
            user = self.config.get('mysql', 'username')
            pwd = self.config.get('mysql', 'passwd')

            db = pymysql.connect(host, user, pwd, dbname, charset='utf8')
            cursor = db.cursor()
            return cursor
        except:
            print('数据库连接失败')

    #判断进程状态
    def check_Thread(self, threadname):
        thread_list = threading.enumerate()
        for ct_i in thread_list:
            if ct_i.name == threadname:
                print("进程正在运行，请等待")
                return False
        print("进程结束")
        return True

    def get_datetime(self):
        """
        mstartdate：上个月开始时间
        enddate：年和月的结束时间
        ystartdate：上一年的开始时间
        wstartdate：上周开始时间
        wenddate：上周结束时间
        """
        today = datetime.today()
        year = today.year
        month = today.month
        days = today.strftime('%Y-%m-%d') + " 00:00:00"
        week = today.isoweekday()

        mstartdate = datetime(year, month - 1, 1)
        ystartdate = datetime(year - 1, month, 1)
        enddate = datetime(year, month, 1, 23, 59, 59) + timedelta(days=-1)
        wstartdate = datetime.strptime(days, '%Y-%m-%d %H:%M:%S') + timedelta(days=-(week + 6))
        wenddate = datetime.strptime(days, '%Y-%m-%d %H:%M:%S') + timedelta(days=-(week-1))
        all_date_time = [(wstartdate,wenddate),(mstartdate,enddate),(ystartdate,enddate)]
        return all_date_time

    def getBetweenDM(self,d_start,d_end,d_status):
        d_date_list = []
        d_start_m = d_start.month
        d_end_m = d_end.month
        if d_status<2:
            while d_start < d_end:
                d_date_str = d_start.strftime("%d")
                d_date_list.append(int(d_date_str))
                d_start += timedelta(days=1)
        else:
            while len(d_date_list) != 12:
                if d_start_m > 12:
                    d_start_m = d_start_m - 12
                d_date_list.append(d_start_m)
                d_start_m += 1
        return d_date_list

    def to_obtain_data(self, tod_list_id):
        tod_time = self.get_datetime()
        tod_obtain_data = obtain_user_shop()
        tod_cache_name = self.date_type[tod_list_id] + '_' + '01'
        tod_cache_data = cache.get(tod_cache_name)
        if tod_cache_data == None:
            user_shop_return, user_shop_excel, user_shop_list = self.tod_obtain_data.modify_data(tod_time[tod_list_id][0].strftime('%Y%m%d'), tod_time[tod_list_id][1].strftime('%Y%m%d'))
            cache.set(tod_cache_name, user_shop_excel, timeout=28800)
            tod_t = threading.Thread(target=self.to_obtain_alldata, args=(tod_list_id,))
            tod_t.start()
        else:
            print("%s 数据已经缓存" % tod_cache_name)
        return


    def to_obtain_alldata(self, tod_list_id):
        toa_time = self.get_datetime()
        for toa_i in range(len(self.date_type)):
            if toa_i != tod_list_id:
                tod_cache_name = self.date_type[toa_i] + '_' + '01'
                if cache.ttl(tod_cache_name) < 28800:
                    user_shop_return, user_shop_excel, user_shop_list = self.tod_obtain_data.modify_data(toa_time[toa_i][0].strftime('%Y%m%d'), toa_time[toa_i][1].strftime('%Y%m%d'))
                    cache.set(tod_cache_name, user_shop_excel, timeout=28800)

    def cloth_removal(self, cr_id, cr_serial, cr_companyid, cr_all_companyid):
        cr_newdb = self.mongodb_alarm_con()
        cr_cache_name = self.date_type[cr_id] + '_' + str(cr_companyid) + '_' + cr_serial
        cr_cache_sum_name = self.date_type[cr_id] + '_' + str(cr_companyid) + '_' + "mgdsum"
        cr_cache_data = cache.get(cr_cache_name)
        cr_time = self.get_datetime()
        cr_all_list = self.getBetweenDM(cr_time[cr_id][0], cr_time[cr_id][1], cr_id)
        cr_sum_data = 0
        if cr_cache_data == None:
            cr_data_json = {}
            for cr_i in cr_newdb.alarms.aggregate([{'$match': {"companyId": cr_companyid, "type": 2,"createDate": {'$gte': cr_time[cr_id][0] , '$lte': cr_time[cr_id][1] }, "defenceType": {"$regex": self.regex}}}, {"$project": {"new_time_stamp": {"$substr": ["$createDate", self.location_step[cr_id][0], self.location_step[cr_id][1]]}}}, {'$group': {'_id': '$new_time_stamp', 'count': {'$sum': 1}}}]):
                cr_data_json[int(cr_i['_id'])] = cr_i['count']
                cr_sum_data += cr_i['count']
                cr_all_list.pop(cr_all_list.index(int(cr_i['_id'])))
            if len(cr_all_list) > 0:
                for cr_n in cr_all_list:
                    cr_data_json[cr_n] = 0
            cache.set(cr_cache_sum_name, cr_sum_data, timeout=28800)
            cache.set(cr_cache_name, cr_data_json, timeout=28800)
            if self.check_Thread("all_cr"):
                t = threading.Thread(target=self.cloth_removal_all, args=(cr_companyid, cr_all_companyid, cr_serial,), name="all_cr")
                t.start()
        else:
            print('%s 数据已经缓存' % cr_cache_name)
        if self.check_Thread("edun_s"):
            cr_t = threading.Thread(target=self.edun_usersum, args=(cr_id, cr_serial, cr_companyid, cr_all_companyid,), name="edun_s")
            cr_t.start()
        return

    def cloth_removal_all(self, cra_companyid, cra_all_companyid, cra_serial):
        cra_newdb = self.mongodb_alarm_con()
        cra_time = self.get_datetime()

        for cra_i in range(len(self.date_type)):
            cra_sum_all_json = {}
            cra_sum_json = {}
            cra_cache_sum_name = self.date_type[cra_i] + '_' + "mgdsum"
            for cra_k, cra_v in cra_all_companyid.items():
                cra_cache_name = self.date_type[cra_i] + '_' + str(cra_v) + '_' + cra_serial
                if cache.ttl(cra_cache_name) < 26000:
                    cra_all_list = self.getBetweenDM(cra_time[cra_i][0], cra_time[cra_i][1], cra_i)
                    cra_data_json = {}
                    cra_data_sum = 0
                    for cra_x in cra_newdb.alarms.aggregate([{'$match': {"companyId": cra_v, "type": 2, "createDate": {'$gte': cra_time[cra_i][0], '$lte': cra_time[cra_i][1]},"defenceType": {"$regex": self.regex}}}, {"$project": {"new_time_stamp": {"$substr": ["$createDate", self.location_step[cra_i][0], self.location_step[cra_i][1]]}}}, {'$group': {'_id': '$new_time_stamp', 'count': {'$sum': 1}}}]):
                        cra_data_json[int(cra_x['_id'])] = cra_x['count']
                        cra_data_sum += cra_x['count']
                        cra_all_list.pop(cra_all_list.index(int(cra_x['_id'])))
                    cra_sum_json[cra_k] = cra_data_sum
                    if len(cra_all_list) > 0:
                        for cra_n in cra_all_list:
                            cra_data_json[cra_n] = 0
                    # print(cra_cache_name, cra_data_json)
                    cache.set(cra_cache_name, cra_data_json, timeout=28800)

                else:
                    cra_get_data = cache.get(cra_cache_name)
                    cra_sum_json[cra_k] = sum(cra_s for cra_s in cra_get_data.values())
            cra_sum_all_json[self.date_type[cra_i]] = cra_sum_json
            cache.set(cra_cache_sum_name, cra_sum_all_json, timeout=30000)


    def edun_usersum(self,eus_id, eus_serial, eus_companyid, eus_all_companyid):
        eus_cache_name = self.date_type[eus_id] + '_' + str(eus_companyid) + '_' + eus_serial + '_' + 'sql'
        eus_cache_data = cache.get(eus_cache_name)
        if eus_cache_data == None:
            eus_time = self.get_datetime()
            eus_olddate = eus_time[eus_id][0].strftime("%Y%m%d")
            eus_newdate = (eus_time[eus_id][1]+timedelta(days=-1)).strftime("%Y%m%d")
            eus_beforedate = (eus_time[eus_id][0]+timedelta(days=-1)).strftime("%Y%m%d")
            eus_sql_inc = "SELECT RIGHT (t.newdata,2), COUNT(1) AS num FROM y_info_account a, (SELECT DATE_FORMAT(b.createDate, '%"+ self.date_tag[eus_id] +"') AS newdata, b.accountId FROM s_shop c, c_account_shop b WHERE c.shopId = b.shopId AND c.companyId = " + str(eus_companyid) + " AND c.isDelete = 0 AND c.shopState = 1 AND c.isUsing = 1 AND b.isDelete = 0 AND DATE_FORMAT(b.createDate, '%Y%m%d') >= " + eus_olddate + " AND DATE_FORMAT(b.createDate, '%Y%m%d') <= " + eus_newdate + " ) t WHERE t.accountId = a.accountId AND a.isDelete = 0 GROUP BY t.newdata"
            eus_sql_sum = "SELECT COUNT(1) AS num FROM y_info_account a, (SELECT b.accountId FROM s_shop c, c_account_shop b WHERE c.shopId = b.shopId AND c.companyId = " + str(eus_companyid) + " AND c.isDelete = 0 AND c.shopState = 1 AND c.isUsing = 1 AND b.isDelete = 0 AND DATE_FORMAT(b.createDate, '%Y%m%d') <= " + eus_beforedate + " ) t WHERE t.accountId = a.accountId AND a.isDelete = 0"
            eus_cursor = self.mysql_con()
            eus_cursor.execute(eus_sql_inc)
            eus_inc_data = eus_cursor.fetchall()
            eus_cursor.execute(eus_sql_sum)
            eus_sum_data = eus_cursor.fetchall()[0][0]
            eus_all_list = self.getBetweenDM(eus_time[eus_id][0], eus_time[eus_id][1], eus_id)
            eus_and_json = {}
            eus_num = 0
            eus_json_num = 0
            eus_top_num = eus_all_list[0]
            eus_end_num = 0
            for eus_i in eus_inc_data:
                if eus_num == 0:
                    eus_and_json[int(eus_i[0])] = eus_sum_data + eus_i[1]
                else:
                    eus_and_json[int(eus_i[0])] = eus_and_json[eus_json_num] + eus_i[1]
                try:
                    if eus_all_list[eus_all_list.index(int(eus_i[0]))+1] == 1:
                        eus_end_num = eus_and_json[int(eus_i[0])]
                except:
                    pass
                eus_all_list.pop(eus_all_list.index(int(eus_i[0])))
                eus_json_num = int(eus_i[0])
                eus_num += 1
            if len(eus_all_list) > 0:
                for eus_n in eus_all_list:
                    if eus_top_num == eus_n:
                        eus_and_json[eus_n] = eus_sum_data
                    elif eus_n == 1:
                        eus_and_json[eus_n] = eus_end_num
                    else:
                        eus_and_json[eus_n] = eus_and_json[eus_n-1]
            cache.set(eus_cache_data, eus_and_json, timeout=28800)

            if self.check_Thread("all_edun"):
                eus_t = threading.Thread(target=self.edun_usersum_all,args=(eus_companyid, eus_all_companyid, eus_serial,), name="all_edun")
                eus_t.start()
        else:
            print('%s 数据已经缓存' % eus_cache_name)

    def edun_usersum_all(self, eua_companyid, eua_all_companyid, eua_serial):
        eua_cursor = self.mysql_con()
        eua_time = self.get_datetime()
        for eua_i in range(len(self.date_type)):
            for eua_k, eua_v in eua_all_companyid.items():
                eua_all_list = self.getBetweenDM(eua_time[eua_i][0], eua_time[eua_i][1], eua_i)
                if cache.ttl(eua_all_list) < 26000:
                    eua_olddate = eua_time[eua_i][0].strftime("%Y%m%d")
                    eua_newdate = (eua_time[eua_i][1] + timedelta(days=-1)).strftime("%Y%m%d")
                    eua_beforedate = (eua_time[eua_i][0] + timedelta(days=-1)).strftime("%Y%m%d")
                    # print(eua_olddate,eua_newdate,eua_beforedate)
                    eua_sql_inc = "SELECT RIGHT (t.newdata,2), COUNT(1) AS num FROM y_info_account a, (SELECT DATE_FORMAT(b.createDate, '%"+ self.date_tag[eua_i] +"') AS newdata, b.accountId FROM s_shop c, c_account_shop b WHERE c.shopId = b.shopId AND c.companyId = " + str(eua_v) + " AND c.isDelete = 0 AND c.shopState = 1 AND c.isUsing = 1 AND b.isDelete = 0 AND DATE_FORMAT(b.createDate, '%Y%m%d') >= " + eua_olddate + " AND DATE_FORMAT(b.createDate, '%Y%m%d') <= " + eua_newdate + " ) t WHERE t.accountId = a.accountId AND a.isDelete = 0 GROUP BY t.newdata"
                    eua_sql_sum = "SELECT COUNT(1) AS num FROM y_info_account a, (SELECT b.accountId FROM s_shop c, c_account_shop b WHERE c.shopId = b.shopId AND c.companyId = " + str(eua_v) + " AND c.isDelete = 0 AND c.shopState = 1 AND c.isUsing = 1 AND b.isDelete = 0 AND DATE_FORMAT(b.createDate, '%Y%m%d') <= " + eua_beforedate + " ) t WHERE t.accountId = a.accountId AND a.isDelete = 0"
                    eua_cursor.execute(eua_sql_inc)
                    eua_inc_data = eua_cursor.fetchall()
                    eua_cursor.execute(eua_sql_sum)
                    eua_sum_data = eua_cursor.fetchall()[0][0]
                    eua_and_json = {}
                    eua_num = 0
                    eua_json_num = 0
                    eua_top_num = eua_all_list[0]
                    eua_end_num = 0
                    for eua_x in eua_inc_data:
                        if eua_num == 0:
                            eua_and_json[int(eua_x[0])] = eua_sum_data + eua_x[1]
                        else:
                            eua_and_json[int(eua_x[0])] = eua_and_json[eua_json_num] + eua_x[1]
                        try:
                            if eua_all_list[eua_all_list.index(int(eua_x[0]))+1] == 1:
                                eua_end_num = eua_and_json[int(eua_x[0])]
                        except:
                            pass
                        eua_all_list.pop(eua_all_list.index(int(eua_x[0])))
                        eua_json_num = int(eua_x[0])
                        eua_num +=1

                    if len(eua_all_list) > 0:
                        for eua_n in eua_all_list:
                            if eua_n == eua_top_num: #第一个数为空
                                eua_and_json[eua_n] = eua_sum_data
                            elif eua_n == 1: #日期的第一天
                                eua_and_json[eua_n] = eua_end_num
                            else:
                                eua_and_json[eua_n] = eua_and_json[eua_n-1]
                    try:
                        eua_cache_name = self.date_type[eua_i] + '_' + str(eua_v) + '_' + eua_serial + '_' + 'sql'
                        cache.set(eua_cache_name, eua_and_json, timeout=28800)
                    except:
                        print(self.date_type, eua_i, eua_v, eua_serial)
        eua_cursor.close()








