# -*- coding: utf-8 -*- 
"""
@Author : Chan ZiWen
@Date : 2022/7/19 10:21
File Description:

"""
import time
import sys
import json
import psutil
import requests

import numpy as np
import pandas as pd

from pyhive import hive
from datetime import datetime, timedelta
from concurrent.futures import ThreadPoolExecutor, as_completed
from flask import jsonify, request, Blueprint,current_app
from common.feishu import FeishuMsgHandler
from threading import Thread

recogMobile = Blueprint('recogMobile', __name__)

# 生成公用参数
timeRanges = [' 04:00', ' 11:00', ' 14:00', ' 23:59']
a, b = 20, 30
c = a + b
filters_before = [1] * a + [0] * b
filters_before = np.array(filters_before)
filters_after = [0] * b + [1] * a
filters_after = np.array(filters_after)
interval_15 = 1296000

# 记录已经请求的date: ['s,e', 's,e']
dict_requests = {}


def asyncf(f):
    def wrapper(*args, **kwargs):
        thr = Thread(target=f, args=args, kwargs=kwargs)
        thr.start()

    return wrapper


def awayGoHome(log_time, baseInfo):
    """log_time与res_s的值一一对应
    计算每天全量用户家中有离回家的设备：
        避免识别到笔记本，电脑等不是移动设备（在家办公）：设置 白天（4-11点） 晚上（15-24点）

    通过filter去匹配。
    log_time: [timestamp1, timestamp2, ...] , 表示当前时间是在局域网内, shape of (n,)
    :return:
    """
    awayHome_start, awayHome_end, goHome_start, goHome_end, df_awayHome, df_goHome = baseInfo
    # generate completed Dataframe
    away_nums, go_nums = 0, 0

    for time_i in log_time:
        time_i = int(time_i)//1000

        if awayHome_start <= time_i <= awayHome_end:
            second = datetime.fromtimestamp(time_i).second
            if second >= 30:
                time_i_ = time_i + (60 - second)
            else:
                time_i_ = time_i - second
            df_awayHome.loc[time_i_, 'isOnline'] = 1
            away_nums += 1

        elif goHome_start <= time_i <= goHome_end:
            second = datetime.fromtimestamp(time_i).second
            if second >= 30:
                time_i_ = time_i + (60 - second)
            else:
                time_i_ = time_i - second
            df_goHome.loc[time_i_, 'isOnline'] = 1
            go_nums += 1

    res_flag = 0
    # 遍历 出门数据
    awayHome_values = df_awayHome['isOnline'].values
    goHome_values = df_goHome['isOnline'].values

    if len(awayHome_values) >= a:
        for i in range(c, len(awayHome_values)-c):
            temp = awayHome_values[(i - c):i]
            # res = abs(np.logical_and(temp, filters_before).sum() - a)
            res = c - np.sum(temp == filters_before)
            if res <= 2:
                # print(res, end='  ')
                res_flag += 1
                break

    # 遍历 回家数据
    if len(goHome_values) >= a:
        # pd.Series(goHome_values, index=range(len(goHome_values))).plot()
        # plt.show()
        for i in range(c, len(goHome_values)-c):
            temp = goHome_values[(i - c):i]
            # res = abs(np.logical_and(temp, filters_after).sum() - a)
            res = c - np.sum(temp == filters_after)
            if res <= 2:
                # print(res, end='  ')
                res_flag += 1
                break
    del df_awayHome
    del df_goHome
    return True if res_flag == 2 else False


def Connect(configs):
    con = hive.Connection(
                host=configs['host'],
                port=configs['port'],
                auth='LDAP',
                username=configs['user'],
                password=configs['password'],
                database=configs['database'])
    return con


def Batch_select(date: str = None, a: int = None, b: int = None, config_hive: dict = None):
    """
        Get all the pair of (active_id, mac) (mobile devices) according day and log_time (20:00-02:00 or 03:00-10:00)
    by "group by"
    """
    con = Connect(config_hive)
    cursor = con.cursor()
    # 预设分析时间
    start = time.time()

    # 执行查询
    """,collect_list / collect_set"""
    # 该语句耗时平均 548(s) ， 晚上433
    sql = f'select * from (select active_id, mac, collect_set(log_time), row_number() over() as rowsa ' \
          f'from {config_hive["table"]} where `day` = "{date}" group by tv_mac, active_id, mac) as t ' \
          f'where t.rowsa between {a} and {b}'
    print(sql)
    cursor.execute(sql)

    y = cursor.fetchall()
    cursor.close()
    con.close()
    print(f"Duration(get source data) {a} - {b} time: ", time.time() - start)
    print(f"All device's data length: {len(y)} ")
    return y


def single_fn(MD_id_mac_time_dict, active_id: str = None, mac: str = None, log_time: str = None, date: str = None,
              baseInfo: list = None):
    """
    :param log_time:  shape (n,)
    :param distance:  shape (n,)
    :return:
   {"mode":1,"activeId":"34","mac":"3243","tvMac":"23423","oneDayTime":15013452,"final15dTime":"12:00","analysisDatetime":"2022-02-12"},
    """
    # str to list
    try:
        log_time = log_time.strip(']').strip('[').split(',')
    except AttributeError as a:
        print(f'{a}')

    ans = awayGoHome(log_time, baseInfo)
    if ans:
        # to dict
        if active_id not in MD_id_mac_time_dict:
            MD_id_mac_time_dict[active_id] = {
                    "activeId": active_id,
                    "analysisDatetime": date,
                    "mobileMac": mac,
            }
        else:
            MD_id_mac_time_dict[active_id]["mobileMac"] += (',' + mac)


def batch_analysis(date: str = None, a: int = None, b: int = None, config_hive: dict = None, wCKUrl: str = None):
    """ multi-thread processing
    procedure:
        1, creating threads
        2, ordering data by log_time
        3, calling single mac analysis function
    """
    MD_id_mac_time_dict = {}
    res_list = Batch_select(date, a, b, config_hive)

    # 生成公用参数
    awayHome_start = int(time.mktime(time.strptime(date + timeRanges[0], "%Y-%m-%d %H:%M")))    # 10 bit
    awayHome_end = int(time.mktime(time.strptime(date + timeRanges[1], "%Y-%m-%d %H:%M")))
    goHome_start = int(time.mktime(time.strptime(date + timeRanges[2], "%Y-%m-%d %H:%M")))
    goHome_end = int(time.mktime(time.strptime(date + timeRanges[3], "%Y-%m-%d %H:%M")))
    awayHomeRange = [i + awayHome_start for i in range(0, (awayHome_end - awayHome_start), 60)]
    goHomeRange = [i + goHome_start for i in range(0, (goHome_end - goHome_start), 60)]
    # list to Dataframe
    df_awayHome = pd.DataFrame(
        {'isOnline': [0] * ((awayHome_end - awayHome_start) // 60)},
        index=awayHomeRange
    )
    df_goHome = pd.DataFrame(
        {'isOnline': [0] * ((goHome_end - goHome_start) // 60)},
        index=goHomeRange
    )

    baseinfo = [awayHome_start, awayHome_end, goHome_start, goHome_end, df_awayHome, df_goHome]

    executor_r = ThreadPoolExecutor(40)  # 参数设置线程池大小
    print('Created a threadPool which max_worker is 40.')
    features = []
    for res in res_list:
        active_id, mac, log_time = res[0], res[1], res[2]
        if active_id != "":
            features.append(executor_r.submit(single_fn, MD_id_mac_time_dict, active_id, mac, log_time, date, baseinfo))

    print('Waiting all threads shutdown')
    for fi in as_completed(features):
        if fi.result() is not None:
            FeishuMsgHandler().sendFeiShu(f"{fi}")
    del executor_r, features

    # save to clickhouse
    MD_id_mac_time = list(MD_id_mac_time_dict.values())
    print(f"begin request clickhouse api")
    save2ck(date, MD_id_mac_time, wCKUrl)
    print(f"{date}(mobile recognized)The length of data saving to the Clickhouse : {len(MD_id_mac_time)} ")
    del MD_id_mac_time_dict


def post(date, datas, url, headers):
    datas = json.dumps(datas)
    response = json.loads(requests.post(url, data=datas, headers=headers).text)
    if response['code'] != 1000:
        raise RuntimeError(f" {date}  ({response})")


def save2ck(date, data, wCKUrl):
    headers = {"Content-Type": "application/json"}
    inter_10000 = 10000
    n = len(data)
    if n > 0:
        # 超过1w条：则分批存
        if n > inter_10000:
            nb = n // inter_10000
            for i in range(nb):
                datas_part = data[i * inter_10000: (i + 1) * inter_10000]
                post(date, datas_part, wCKUrl, headers)
            if len(data[(i + 1) * inter_10000:]) > 0:
                post(date, data[(i + 1) * inter_10000:], wCKUrl, headers)
        else:
            post(date, data, wCKUrl, headers)


@asyncf
def run(date, startRow, endRow, config_hive, wCKUrl):
    print(f"Begin of the ({date})")
    start = time.time()
    batch_analysis(date, startRow, endRow, config_hive, wCKUrl)
    print(f"Total duration(read & analysis) time: {(time.time() - start) / 60}(m) ")


def validate(date_text):
    try:
        datetime.strptime(date_text, '%Y-%m-%d')
        return 0
    except ValueError:
        return 1


@recogMobile.route('/res', methods=['GET'])
def main():
    """
    http://172.20.146.61:8688/analyze/recogMobile/res?date=2022-08-11&s=1&e=50000
    http://172.20.148.89:8688/analyze/recogMobile/res?date=2022-08-21&s=50001&e=100000
    http://172.20.148.89:8688/analyze/recogMobile/res?date=2022-08-21&s=100001&e=130000
    """
    config_hive = {
        'host': current_app.config['HIVE_HOST'],
        'port': current_app.config['HIVE_PORT'],
        'user': current_app.config['HIVE_USER'],
        'password': current_app.config['HIVE_PASSWORD'],
        'database': "coocaa_rds",
        'table': "rds_zdfw_sc_router_device"
    }
    wCKUrl = current_app.config['BATCH_SAVE_MOBILE_MAC']

    register_dict = request.args
    date = register_dict['date']
    if validate(date):
        return jsonify({"status": "Incorrect data format, should be YYYY-MM-DD"}), 404
    startRow = int(register_dict['s'])
    endRow = int(register_dict['e'])
    if date not in dict_requests:
        dict_requests.clear()
        dict_requests[date] = [f"{startRow},{endRow}"]
    else:
        tmp = f"{startRow},{endRow}"
        if tmp in dict_requests[date]:
            return jsonify({"status": "Warning: has been requested"}), 404
        else:
            dict_requests[date].append(tmp)
    run(date, startRow, endRow, config_hive, wCKUrl)
    return jsonify({"status": "success"}), 200


@recogMobile.route('/counts', methods=['GET'])
def counts():
    """
    http://172.20.148.89:8688/analyze/recogMobile/counts?date=2022-08-22
    """
    # analysis from hive
    config_hive = {
        'host': current_app.config['HIVE_HOST'],
        'port': current_app.config['HIVE_PORT'],
        'user': current_app.config['HIVE_USER'],
        'password': current_app.config['HIVE_PASSWORD'],
        'database': "coocaa_rds",
        'table': "rds_zdfw_sc_router_device"
    }

    register_dict = request.args
    date = register_dict['date']
    if validate(date):
        return jsonify({"status": "Incorrect data format, should be YYYY-MM-DD"}), 404

    con = Connect(config_hive)
    cursor = con.cursor()
    # 执行查询
    """,collect_list / collect_set"""
    sql = f'select count(distinct active_id, mac) from {config_hive["table"]} where `day` = "{date}"'
    print(sql)
    cursor.execute(sql)
    count = cursor.fetchall()
    if len(count) > 0:
        try:
            if len(count[0]) > 0:
                count = count[0][0]
        except :
            count = count[0]
    else:
        count = 0
    cursor.close()
    con.close()
    if count == 0:
        FeishuMsgHandler.sendFeiShu("移动mac识别分析，hive缺少{0}的数据，赶紧排查", date)
    return jsonify({"Counts": count}), 200



