# -*- coding: utf-8 -*- 
"""
@Author : Chan ZiWen
@Date : 2022/7/13 16:56
File Description:

"""
import time
import sys
import json
import psutil
import requests
import vthread
import numpy as np
import pandas as pd

from pyhive import hive
from clickhouse_driver import Client
from datetime import datetime, timedelta


# analysis from hive
config_hive = {
    'host': "192.168.1.88",
    'port': 10000,
    'user': "lifeservice",
    'password': "HnJs8OiOQX",
    'database': "coocaa_rds",
    'table': "rds_zdfw_sc_router_device"}


# clickhouse configures
config_ck = {
    'host': "172.20.151.175",
    'url': "http://172.20.151.175:8123",
    'port': 9000,
    'user': "life_assistant_data",
    'password': "qZmtqgmqq",
    'database': "proactive_service_data",
    'table': "life_time"}

# clickhouse 写入api
wCKUrl = "http://beta-life-assistant.skysrt.com/conf/mobile/mac/saveBatch"


timeRanges = [' 04:00', ' 11:00', ' 14:00', ' 23:59']
interval_15 = 1296000

# 计算线程数量
pc_mem = psutil.virtual_memory()
div_gb_factor = (1024**2)
thread_n = int((pc_mem.available / div_gb_factor) // 50)
# 用于并行处理数据
pool1 = vthread.pool(thread_n, gqueue=1)  # 开40个伺服线程，组名为1

MD_id_mac_time_dict = {}      # List[dict]
# 记录15天最终时间的数据
final_time_info = []


def awayGoHome(date: str, log_time: list, timeRanges: list = None, filters_before: list = None, filters_after: list = None,
               Home_time: list = None, HomeRange: list = None, threshold: list = None):
    """log_time与res_s的值一一对应
    计算每天全量用户家中有离回家的设备：
        避免识别到笔记本，电脑等不是移动设备（在家办公）：设置 白天（4-11点） 晚上（15-24点）

    通过filter去匹配。
    log_time: [timestamp1, timestamp2, ...] , 表示当前时间是在局域网内, shape of (n,)
    :return:
    """
    if not timeRanges:
        timeRanges = [' 04:00', ' 11:00', ' 14:00', ' 23:59']

    if not threshold:
        a, b, c = 20, 30, 50
    else:
        a, b = threshold[0], threshold[1]
        c = a + b
    if not filters_before:
        filters_before = [1] * a + [0] * b
        filters_before = np.array(filters_before)

    if not filters_after:
        filters_after = [0] * b + [1] * a
        filters_after = np.array(filters_after)

    if not Home_time:
        awayHome_start = int(time.mktime(time.strptime(date + timeRanges[0], "%Y-%m-%d %H:%M")))    # 10 bit
        awayHome_end = int(time.mktime(time.strptime(date + timeRanges[1], "%Y-%m-%d %H:%M")))
        goHome_start = int(time.mktime(time.strptime(date + timeRanges[2], "%Y-%m-%d %H:%M")))
        goHome_end = int(time.mktime(time.strptime(date + timeRanges[3], "%Y-%m-%d %H:%M")))
    else:
        awayHome_start = Home_time[0]
        awayHome_end = Home_time[1]
        goHome_start = Home_time[2]
        goHome_end = Home_time[3]

    if not HomeRange:
        awayHomeRange = [i + awayHome_start for i in range(0, (awayHome_end - awayHome_start), 60)]
        goHomeRange = [i + goHome_start for i in range(0, (goHome_end - goHome_start), 60)]
    else:
        awayHomeRange = HomeRange[0]
        goHomeRange = HomeRange[1]

    # list to Dataframe
    df_awayHome = pd.DataFrame(
        {'isOnline': [0] * ((awayHome_end - awayHome_start)//60)},
        index=awayHomeRange
    )
    df_goHome = pd.DataFrame(
        {'isOnline': [0] * ((goHome_end - goHome_start)//60)},
        index=goHomeRange
    )

    # generate completed Dataframe
    away_nums, go_nums = 0, 0

    for time_i in log_time:
        time_i = int(time_i)//1000
        second = datetime.fromtimestamp(time_i).second
        if second >= 30:
            time_i_ = time_i + (60 - second)
        else:
            time_i_ = time_i - second
        if awayHome_start <= time_i <= awayHome_end:
            df_awayHome.loc[time_i_, 'isOnline'] = 1
            away_nums += 1
        elif goHome_start <= time_i <= goHome_end:
            df_goHome.loc[time_i_, 'isOnline'] = 1
            go_nums += 1
        else:
            continue
    res_flag = 0
    # 遍历 出门数据

    awayHome_values = df_awayHome['isOnline'].values
    goHome_values = df_goHome['isOnline'].values

    if len(awayHome_values) >= a:
        for i in range(c, len(awayHome_values)-c):
            temp = awayHome_values[(i - c):i]
            res = abs(np.logical_and(temp, filters_before).sum() - a)
            if res <= 2:
                # print(res, end='  ')
                res_flag += 1
                break

    # 遍历 回家数据
    if len(goHome_values) >= a:
        # pd.Series(goHome_values, index=range(len(goHome_values))).plot()
        # plt.show()
        for i in range(c, len(goHome_values)-c):
            temp = goHome_values[(i - c):i]
            res = abs(np.logical_and(temp, filters_after).sum() - a)
            if res <= 2:
                # print(res, end='  ')
                res_flag += 1
                break

    return True if res_flag == 2 else False


def Connect(configs):
    con = hive.Connection(
                host=configs['host'],
                port=configs['port'],
                auth='LDAP',
                username=configs['user'],
                password=configs['password'],
                database=configs['database'])
    return con


def Batch_select(date: str = None, a: int = None, b: int = None):
    """
        Get all the pair of (active_id, mac) (mobile devices) according day and log_time (20:00-02:00 or 03:00-10:00)
    by "group by"
    """
    con = Connect(config_hive)
    cursor = con.cursor()
    # 预设分析时间
    start = time.time()

    # 生成时间戳
    awayHome_start = int(time.mktime(time.strptime(date + timeRanges[0], "%Y-%m-%d %H:%M")) * 1000)  # 13 bit
    awayHome_end = int(time.mktime(time.strptime(date + timeRanges[1], "%Y-%m-%d %H:%M")) * 1000)
    goHome_start = int(time.mktime(time.strptime(date + timeRanges[2], "%Y-%m-%d %H:%M")) * 1000)
    goHome_end = int(time.mktime(time.strptime(date + timeRanges[3], "%Y-%m-%d %H:%M")) * 1000)
    # 执行查询
    """,collect_list / collect_set"""
    # 该语句耗时平均 548(s) ， 晚上433
    cursor.execute(
        f'select * from (select active_id, mac, collect_set(log_time), row_number() over() as rowsa '
        f'from {config_hive["table"]} where `day` = "{date}" group by active_id, mac) t '
        f'where t.rowsa between {a} and {b}')

    y = cursor.fetchall()
    cursor.close()
    con.close()
    print(f"Duration(get source data) {a} - {b} time: ", time.time() - start)
    print(f"All device's data length: {len(y)} ")
    return y


@pool1
def single_fn(active_id: str = None, mac: str = None, log_time: str = None, date: str = None):
    """
    :param log_time:  shape (n,)
    :param distance:  shape (n,)
    :return:
   {"mode":1,"activeId":"34","mac":"3243","tvMac":"23423","oneDayTime":15013452,"final15dTime":"12:00","analysisDatetime":"2022-02-12"},
    """
    # str to list
    try:
        log_time = log_time.strip(']').strip('[').split(',')
    except AttributeError as a:
        print(f'{a}')
    log_time = sorted(log_time)
    ans = awayGoHome(date, log_time)
    del log_time
    if ans:
        # to dict
        if active_id not in MD_id_mac_time_dict:
            MD_id_mac_time_dict[active_id] = {
                    "activeId": active_id,
                    "analysisDatetime": date,
                    "mobileMac": mac,
            }
        else:
            MD_id_mac_time_dict[active_id]["mobileMac"] += (',' + mac)


def batch_analysis(date: str = None, rows: int = 50000):
    """ multi-thread processing
    procedure:
        1, creating threads
        2, ordering data by log_time
        3, calling single mac analysis function
    """
    # 分页处理
    i = 0
    flag = True
    while flag:
        a, b = (rows * i) + 1, rows * (i+1)
        res_list = Batch_select(date, a, b)
        if len(res_list) < rows:
            flag = False
        i += 1
        for res in res_list:
            active_id, mac, log_time = res[0], res[1], res[2]
            if active_id == "":
                continue
            single_fn(active_id, mac, log_time, date)       # datetime

        # waiting
        vthread.pool.waitall()
        # save to clickhouse
        MD_id_mac_time = list(MD_id_mac_time_dict.values())
        save2ck(date, MD_id_mac_time)
        print(f"The length of data saving to the Clickhouse : {len(MD_id_mac_time)} ")
        MD_id_mac_time_dict.clear()


def post(date, datas, url, headers):
    datas = json.dumps(datas)
    response = json.loads(requests.post(url, data=datas, headers=headers).text)
    if response['code'] != 1000:
        raise RuntimeError(f" {date}  ({response})")


def save2ck(date, data):
    """
    [
    {"activeId":"34","mac":"3243","tvMac":"23423","oneDayTime":15013452,"final15dTime":"12:00","analysisDatetime":"2022-02-12"},
    {"mode":1,"activeId":"34","mac":"3243","tvMac":"23423","oneDayTime":15013452,"final15dTime":"12:00","analysisDatetime":"2022-02-12"},
    ]
    :return:
    """
    headers = {"Content-Type": "application/json"}
    inter_10000 = 10000
    n = len(data)
    if n > 0:
        # 超过1w条：则分批存
        if n > inter_10000:
            nb = n // inter_10000
            for i in range(nb):
                datas_part = data[i * inter_10000: (i + 1) * inter_10000]
                post(date, datas_part, wCKUrl, headers)
            if len(data[(i + 1) * inter_10000:]) > 0:
                post(date, data[(i + 1) * inter_10000:], wCKUrl, headers)
        else:
            post(date, data, wCKUrl, headers)


def main(date):
    """
    read hive data time:  430(s) - 633(s)
    k=data length: 51000
    """
    start = time.time()
    batch_analysis(date)
    print(f"Total duration(read & analysis) time: {(time.time() - start) / 60}(m) ")
    optimizer()


def Parsers():
    date = str(datetime.now().date() - timedelta(days=1))
    args = sys.argv[1:]
    for arg in args:
        if '-' in arg:
            date = arg.strip(' ')
    return date


def optimizer():
    ck_client = Client(user=config_ck['user'], password=config_ck['password'], host=config_ck['host'],
                       port=config_ck['port'], database=config_ck['database'])
    ck_client.execute(f"optimize table {config_ck['table']}")


if __name__ == '__main__':
    date = Parsers()
    print(f"We will set {thread_n} threads in the threading pool.")
    print(f"Begin of the ({date})")
    main(date)