# -*- coding: utf-8 -*- 
"""
@Author : Chan ZiWen
@Date : 2022/6/30 15:47
File Description:
    计算每天全量用户家中有离回家的设备：
        白天（4-12点）
        晚上（12-24点）
"""
import os
import time
import argparse
import yaml
import json
import requests
import vthread
from datetime import datetime, timedelta
from pyhive import hive
from utils import awayGoHome

work_dir = os.path.dirname(__file__)
args = yaml.safe_load(open(os.path.join(work_dir, "hometime.yaml")))

timeRanges = args['timeRanges']
config = args['config_hive']
interval_15 = args['interval_15']

# 用于并行处理数据
pool1 = vthread.pool(40, gqueue=1)  # 开40个伺服线程，组名为1

MD_id_mac_time_dict = {}      # List[dict]
# 记录15天最终时间的数据
final_time_info = []


def Connect(configs):
    con = hive.Connection(
                host=configs['host'],
                port=configs['port'],
                auth='LDAP',
                username=configs['user'],
                password=configs['password'],
                database=configs['database'])
    return con


def Batch_select(date: str = None):
    """
        Get all the pair of (active_id, mac) (mobile devices) according day and log_time (20:00-02:00 or 03:00-10:00)
    by "group by"
    """
    con = Connect(config)
    cursor = con.cursor()
    # 预设分析时间
    start = time.time()

    # 生成时间戳
    awayHome_start = int(time.mktime(time.strptime(date + timeRanges[0], "%Y-%m-%d %H:%M")) * 1000)  # 13 bit
    awayHome_end = int(time.mktime(time.strptime(date + timeRanges[1], "%Y-%m-%d %H:%M")) * 1000)
    goHome_start = int(time.mktime(time.strptime(date + timeRanges[2], "%Y-%m-%d %H:%M")) * 1000)
    goHome_end = int(time.mktime(time.strptime(date + timeRanges[3], "%Y-%m-%d %H:%M")) * 1000)
    # 执行查询
    """,collect_list / collect_set"""
    # 该语句耗时平均 548(s) ， 晚上433
    cursor.execute(
        f'select active_id, mac, collect_set(log_time) '
        f'from {config["table"]} '
        f'where `day` = "{date}" '
        f'group by active_id, mac')

    y = cursor.fetchall()
    cursor.close()
    con.close()
    print("Duration(get all source data) time: ", time.time() - start)
    print(f"All device's data length: {len(y)} ")
    return y


@pool1
def single_fn(active_id: str = None, mac: str = None, log_time: str = None, date: str = None):
    """
    :param log_time:  shape (n,)
    :param distance:  shape (n,)
    :return:
   {"mode":1,"activeId":"34","mac":"3243","tvMac":"23423","oneDayTime":15013452,"final15dTime":"12:00","analysisDatetime":"2022-02-12"},
    """
    # str to list
    try:
        log_time = log_time.strip(']').strip('[').split(',')
    except AttributeError as a:
        print(f'{a}')
    log_time = sorted(log_time)
    ans = awayGoHome(date, log_time)

    if ans:
        # to dict
        if active_id not in MD_id_mac_time_dict:
            MD_id_mac_time_dict[active_id] = {
                    "activeId": active_id,
                    "analysisDatetime": date,
                    "mobileMac": mac,
            }
        else:
            MD_id_mac_time_dict[active_id]["mobileMac"] += (',' + mac)


def batch_analysis(date: str = None):
    """ multi-thread processing
    procedure:
        1, creating threads
        2, ordering data by log_time
        3, calling single mac analysis function
    """
    res_list = Batch_select(date)
    for res in res_list:
        active_id, mac, log_time = res[0], res[1], res[2]
        if active_id == "":
            continue
        single_fn(active_id, mac, log_time, date)       # datetime


def post(date, datas, url, headers):
    datas = json.dumps(datas)
    response = json.loads(requests.post(url, data=datas, headers=headers).text)
    if response['code'] != 1000:
        raise RuntimeError(f" {date}  ({response})")


def save2ck(date, data):
    """
    [
    {"activeId":"34","mac":"3243","tvMac":"23423","oneDayTime":15013452,"final15dTime":"12:00","analysisDatetime":"2022-02-12"},
    {"mode":1,"activeId":"34","mac":"3243","tvMac":"23423","oneDayTime":15013452,"final15dTime":"12:00","analysisDatetime":"2022-02-12"},
    ]
    :return:
    """
    url = args["url"]
    headers = {"Content-Type": "application/json"}
    inter_10000 = 10000
    n = len(data)
    if n > 0:
        # 超过1w条：则分批存
        if n > inter_10000:
            nb = n // inter_10000
            for i in range(nb):
                datas_part = data[i * inter_10000: (i + 1) * inter_10000]
                post(date, datas_part, url, headers)
            if len(data[(i + 1) * inter_10000:]) > 0:
                post(date, data[(i + 1) * inter_10000:], url, headers)
        else:
            post(date, data, url, headers)


def main(date):
    """
    read hive data time:  430(s) - 633(s)
    k=data length: 51000
    """
    start = time.time()
    batch_analysis(date)
    vthread.pool.waitall()
    print(f"Total duration(read & analysis) time: {(time.time() - start) / 60}(m) ")
    MD_id_mac_time = list(MD_id_mac_time_dict.values())
    save2ck(date, MD_id_mac_time)
    MD_id_mac_time_dict.clear()


def Parsers():
    parser = argparse.ArgumentParser("For the mobile device analysis the parser")
    parser.add_argument('-d', '--date', default=None, type=str)
    return parser.parse_args()


if __name__ == '__main__':
    parsers = Parsers()
    date = parsers.date
    if date is None:
        date = datetime.now().date() - timedelta(days=1)
    print(f"Begin of the ({date})")
    main(date)

    # date = "2022-07-06"
    # date_end = "2022-07-08"
    # date_stamp = int(time.mktime(time.strptime(date, "%Y-%m-%d")))
    # date_stamp_end = int(time.mktime(time.strptime(date_end, "%Y-%m-%d")))
    # """
    # 3,4130,5414
    # """
    # oneday = 86400
    # length = (date_stamp_end - date_stamp) // oneday
    # for i in range(length):
    #     date_stamp_new = oneday*i + date_stamp
    #     date_new = datetime.datetime.fromtimestamp(date_stamp_new).strftime("%Y-%m-%d")
    #     print(f"Begin of the ({date_new})")
    #     main(date_new)



