from __future__ import absolute_import
import logging
import json
import traceback
import random
import string
import requests
from requests.cookies import cookiejar_from_dict
from django.utils import timezone
from django.db.models import Q
from conf.conf import data_mgt_apis, anonymized_data_url, scene_data_url
from data_mgt.models import AnonymizedData, SceneData, SceneDataCount
from data_mgt.models import ObsAsyncJobStatus, AsyncJobAnonymizedAdd, AsyncJobSceneCut
from monitor.models import SceneDataMonitor
from common.obs_tools import object_restore_status
from common.tools import scenario_conversion_main_scene
from ploto.celery import app as celery_app


logger = logging.getLogger(__name__)


@celery_app.task
def get_obs_job_status():
    """
    1、获取当前时间以前的任务信息
    2、对于任务信息 分脱敏数据和场景数据处理
    3、对脱敏数据 获取恢复状态  已恢复：加入到脱敏数据成功列表  未恢复：下一次执行时间+5分钟或者30分钟
    4、对场景数据 记录所有的id 已恢复：删除记录表的id 若当前id不在记录表中存在，则加入场景数据成功列表
       未恢复：下一次执行时间+5分钟或者30分钟
    5、脱敏数据成功列表有数据，则更新AnonymizedData的恢复状态字段，
    并且删除ObsAsyncJobStatus表中data_type为脱敏数据且id在脱敏数据成功列表中数据
    6、场景数据成功列表有数据，则更新SceneData的恢复状态字段，
    并且删除ObsAsyncJobStatus表中data_type为场景数据且id在场景数据成功列表中数据
    """
    job_list = ObsAsyncJobStatus.objects.filter(next_run_time__lt=timezone.now())
    if not job_list:
        logger.info("OBSAsync not have job list")
        return
    anonymize_success_job_id = []
    scene_success_job_id = []
    job_id_list = []
    for job_info in job_list:
        if job_info.job_data_type == 0:
            restore_status = object_restore_status(job_info.job_bucket, job_info.job_object)
            if restore_status == "restored":
                anonymize_success_job_id.append(job_info.job_id)
                continue
            if job_info.job_type == 0:
                # 正常取回数据
                job_info.next_run_time = timezone.now() + timezone.timedelta(seconds=1800)
            else:
                job_info.next_run_time = timezone.now() + timezone.timedelta(seconds=300)
            job_info.save()
        else:
            job_id_list.append(job_info.job_id)
            restore_status = object_restore_status(job_info.job_bucket, job_info.job_object)
            if restore_status == "restored":
                job_id_list.pop(-1)
                if job_info.job_id not in job_id_list:
                    scene_success_job_id.append(job_info.job_id)
                    continue
            else:
                if job_info.job_type == 0:
                    # 正常取回
                    job_info.next_run_time = timezone.now() + timezone.timedelta(seconds=1800)
                else:
                    job_info.next_run_time = timezone.now() + timezone.timedelta(seconds=300)
                job_info.save()
    if anonymize_success_job_id:
        # 针对已经恢复的同步脱敏数据的状态值
        AnonymizedData.objects.filter(id__in=anonymize_success_job_id).update(restoration_status=3)
        ObsAsyncJobStatus.objects.filter(Q(job_data_type=0) & Q(job_id__in=anonymize_success_job_id)).delete()
        logger.info("update Anonymize Data restoration status")
    if scene_success_job_id:
        SceneData.objects.filter(id__in=scene_success_job_id).update(restoration_status=3)
        ObsAsyncJobStatus.objects.filter(Q(job_data_type=1) & Q(job_id__in=scene_success_job_id)).delete()
        logger.info("update scene Data restoration status")


@celery_app.task
def async_count_scene_data(scenario_id, size, city):
    # scenario_id 两边数据格式还未统一，默认现在是一致的
    main_scene = scenario_conversion_main_scene(scenario_id)
    objects = SceneDataCount.objects.filter(main_scene=main_scene, scenario_id=scenario_id)
    if objects:
        for obj in objects:
            update_size = obj.size + int(size)
            update_number = obj.scenario_number + 1
            SceneDataCount.objects.filter(scenario_id=scenario_id).update(size=update_size,
                                                                          scenario_number=update_number,
                                                                          main_scene=main_scene)
    else:
        SceneDataCount.objects.create(main_scene=main_scene, scenario_id=scenario_id, size=size, scenario_number=1)
    # 更新monitor数据
    object_monitor = SceneDataMonitor.objects.filter(main_scene=main_scene, scenario_id=scenario_id)
    if object_monitor:
        for obj in object_monitor:
            update_size = obj.size + int(size)
            update_number = obj.scenario_number + 1
            # 更新主场景下的细分场景
            SceneDataMonitor.objects.filter(main_scene=main_scene,
                                            scenario_id=scenario_id).update(size=update_size,
                                                                            scenario_number=update_number,
                                                                            complete_time=timezone.now())
    else:
        SceneDataMonitor.objects.create(region=city, size=size, main_scene=main_scene, scenario_id=scenario_id,
                                        scenario_number=1, bag_id=0, complete_time=timezone.now())


@celery_app.task
def async_delete_scene_data(pk):
    objects = SceneData.objects.filter(id=pk)
    if objects is None:
        logger.error("场景数据不存在")
    scenario_id = objects[0].scenario_id
    main_scene = scenario_conversion_main_scene(scenario_id)
    # 修改count
    scene_obj = SceneDataCount.objects.filter(main_scene=main_scene, scenario_id=scenario_id)
    if scene_obj is None:
        logger.error("场景统计数据不存在")
        return
    update_size = scene_obj[0].size - objects[0].size
    update_scenario_number = scene_obj[0].scenario_number - 1
    if update_scenario_number:
        SceneDataCount.objects.filter(main_scene=main_scene, scenario_id=scenario_id).update(size=update_size,
                                                                        scenario_number=update_scenario_number)
    else:
        SceneDataCount.objects.filter(main_scene=main_scene, scenario_id=scenario_id).delete()

    # 修改monitor
    monitor_obj = SceneDataMonitor.objects.filter(main_scene=main_scene, scenario_id=scenario_id)
    if monitor_obj is None:
        logger.error("数据大屏场景统计数据不存在")
        return
    update_size = monitor_obj[0].size - objects[0].size
    update_scenario_number = monitor_obj[0].scenario_number - 1
    if update_scenario_number:
        SceneDataMonitor.objects.filter(main_scene=main_scene, scenario_id=scenario_id).update(size=update_size,
                                                                        scenario_number=update_scenario_number)
    else:
        SceneDataMonitor.objects.filter(main_scene=main_scene, scenario_id=scenario_id).delete()


@celery_app.task
def async_count_training_data(training_id):
    pass


@celery_app.task
def anonymized_data_parse():
    try:
        get_bag_api = data_mgt_apis.get("bags_detail", None)
        if get_bag_api is None:
            logger.error('Not got api url to check bags detail from isv platform.\
                        Pls check conf data_mgt_apis["bags_detail"]')
            return

        job_lst = AsyncJobAnonymizedAdd.objects.all()
        get_params = {"bag_id": None}
        headers = {"Content-Type": "application/json"}
        for job_info in job_lst:
            get_params["bag_id"] = job_info.data_id
            response = requests.get(url=get_bag_api, params=get_params, headers=headers)
            res_json = json.loads(response.text)
            if response.status_code != 200:
                logger.error(
                    "get bag detail from isv platfrom failed.bag_id:%s, error message :%s", job_info.data_id,
                    res_json.get("errmsg"))
                continue
            detail = res_json.get("data", {})
            if detail.get("has_parse", False):
                logger.info("bag(%s) has not parsed. will check next times.", job_info.data_id)
            else:
                # update anonymized data
                anony = AnonymizedData.objects.get(id=job_info.anonymized_id)
                anony.tag = detail.get("tag", "")
                remark = ""
                for key in Holomatic.bags_keys:
                    val = detail.get(key, None)
                    if val:
                        remark += "{}:{},".format(key, val)
                remark += detail.get("remarks", None)
                anony.remark = remark

                # delete the job info
                job_info.delete()
    except Exception as e:
        logger.error(e.args)
        logger.error(str(e))
        logger.error(repr(e))


def send_to_next(data, user_data, url):
    headers = {"Content-Type": "application/json"}
    headers["sessionid"] = user_data["sessionid"]
    headers["X-CSRFToken"] = user_data["token"]
    cookies_dict = {}
    cookies_dict["sessionid"] = user_data["sessionid"]
    cookies_dict["csrftoken"] = user_data["token"]
    cookies = cookiejar_from_dict(cookies_dict)
    response_post = requests.post(url=url, data=json.dumps(data), headers=headers, cookies=cookies)
    if response_post.status_code >= 300:
        logger.error("数据添加失败")


@celery_app.task()
def async_check_anonymize():
    task_list = AsyncJobAnonymizedAdd.objects.all()
    if task_list.count() == 0:
        logger.info("Anonymize task list is empty")
        return
    anony_success_list = []
    user_data = {}

    url = data_mgt_apis["bags_detail"]
    for task in task_list:
        params = {"task_id": task.task_id}
        headers = {"Content-Type": "application/json"}
        # 模拟第三方接口使用 resp = resp_anonymize_test()
        resp = requests.get(url=url, params=params, headers=headers)
        user_data = {"username": task.username, "sessionid": task.sessionid, "token": task.token}
        resp_json = json.loads(resp)
        errmsg = resp_json.get('errmsg', '')
        if resp_json.get("status_code") != 200:
            logger.error(
                "get task detail from isv platfrom failed.task_id:%s, error message :%s", task.task_id,
                errmsg)
            continue

        detail = resp_json.get('data', '')
        if detail.get("status") is Holomatic.task_status_active:
            logger.info("task(%s) is running. will check next times.", task.task_id)
            continue
        anony = {}
        try:
            name = detail.get('name')
            str_name = name.split('.')
            str_name[-1] = '_' + ''.join(random.sample(string.ascii_letters, 7)) + '.' + str_name[-1]
            name = ''.join(str_name)
            anony['name'] = name
            anony['size'] = detail.get('size', 0)
            anony['car_id'] = detail.get('car_id', '')
            anony['data_type'] = detail.get('data_type', '')
            anony['region'] = detail.get('region', '')
            anony['url'] = detail.get('url', '')
            anony['is_delete'] = False
            anony['source_type'] = detail.get('source_type', 0)
            anony['source_id'] = detail.get('source_id', '')
            anony['tag'] = detail.get("tag", '')
            anony['remark'] = detail.get("remark", '')
            anony['collect_time'] = detail.get('collect_time', '')
            anony['storage_type'] = detail.get('storage_type', 0)
            anony['remark'] = detail.get("remark", '')
            anony['task_id'] = detail.get('task_id', '')
        except Exception as e:
            logger.error("数据格式错误或任务未完成:%s, %s", repr(e), traceback.format_exc())
        anony_success_list.append(anony)
        task.delete()
    send_to_next(anony_success_list, user_data, anonymized_data_url)


@celery_app.task()
def async_check_scene_cut():
    task_list = AsyncJobSceneCut.objects.all()
    if task_list.count() == 0:
        logger.info("Scenecut task list is empty")
        return
    scene_cut_success_list = []
    user_data = {}
    url = data_mgt_apis["scene_cut_task_detail"]
    for task in task_list:
        params = {"task_id": task.task_id}
        headers = {"Content-Type": "application/json"}
        # 模拟第三方接口使用 resp = resp_scene_cut_test()
        resp = requests.get(url=url, params=params, headers=headers)
        user_data = {"username": task.username, "sessionid": task.sessionid, "token": task.token}
        resp_json = json.loads(resp)
        errmsg = resp_json.get('errmsg', '')
        if resp_json.get("status_code") != 200:
            logger.error(
                "get task detail from isv platfrom failed.task_id:%s, error message :%s", task.task_id, errmsg)
            continue
        data = resp_json.get('data', '')
        if data.get("status") is Holomatic.task_status_active:
            logger.info("task(%s) is running. will check next times.", task.task_id)
            continue
        scene = {}
        try:
            # name 字段用来匹配SceneData字段的fromdata外键
            scene['name'] = AnonymizedData.objects.get(id=task.anonymized_id.id).name
            scene['size'] = data.get('size', 0)
            scene['data_type'] = data.get('data_type', '')
            scene['region'] = data.get('region', '')
            scene['url'] = data.get('url', '')
            scene['is_delete'] = False
            scene['source_type'] = data.get('source_type', '')
            scene['source_id'] = data.get('source_id', '')
            scene['tag'] = data.get("tag", '')
            scene['remark'] = data.get("remark", '')
            scene['collect_time'] = data.get('collect_time', '')
            scene['storage_type'] = data.get('storage_type', '')
            scene['remark'] = data.get("remark", '')
            scene['task_id'] = data.get('task_id', '')
            scene['car_id'] = data.get('car_id', '')
            scene['scenario_id'] = data.get('scenario_id', '')
            scene['from_uuid'] = AnonymizedData.objects.get(id=task.anonymized_id.id).uuid
        except Exception as e:
            logger.error("数据格式错误:%s, %s", repr(e), traceback.format_exc())
        scene_cut_success_list.append(scene)
        task.delete()
    send_to_next(scene_cut_success_list, user_data, scene_data_url)


class Holomatic:
    task_status_active = 1  # 正在执行
    task_status_pending = 2  # 等待执行
    task_status_scheduled = 3  # 调度（暂未启用）
    task_status_retry = 4  # 重试
    task_status_archived = 5  # 失败归档
    task_status_completed = 6  # 提取完成
    # 7 预留状态，未使用
    task_status_canceled = 8  # 任务取消

    bags_keys = ["md5", "start", "end", "mileage", "road_types", "participant_types", "traffic_types",
                 "line_types", "has_map"]


def resp_anonymize_test():
    # 第三方脱敏返回数据
    text = json.dumps(
        {
            "status_code": 200,
            "errno": 0,
            "errmsg": "OK",
            "data": {
                "id": "123123",
                "status": 6,
                "create_at": 1650624326149,
                "update_at": 1650628238192,
                "task_id": "123",
                "params": "",
                "channel": "holo_data",
                "target": "cut_scene",
                "name": "holomatic_2022-05_test.holobag",
                "size": 1000,
                "car_id": "粤B123456",
                "data_type": 1,
                "region": "GZ",
                "url": "obs://ploto-test/anonymize/holomatic_2022-05-26-14-53-55_734_483.holobag",
                "is_delete": False,
                "source_type": 0,
                "source_id": 0,
                "tag": "人|车|路",
                "remark": "test-test-test",
                "collect_time": "2022-05-03 11:00:00.000000",
                "storage_type": 0
            },
            "details": None
        })
    return text


def resp_scene_cut_test():
    # 第三方场景数据
    text = json.dumps(
        {
            "status_code": 200,
            "errno": 0,
            "errmsg": "OK",
            "data": {
                "id": "123123",
                "status": 6,
                "task_id": "123",
                "car_id": "粤B123456",
                "remark": "test-test-test",
                "params": "obs://holomatic-storage/output/huawei-demo/112",
                "target": "cut_scene",
                "name": "holomatic_2022-05_test_scene_data.holobag",
                "size": 1000,
                "data_type": 1,
                "region": "GZ",
                "url": "obs://ploto-test/holo/test_data__2021-06-04-14-53-55.bag__perception_04-1__321",
                "is_delete": False,
                "source_type": 0,
                "source_id": 0,
                "tag": "人|车|路",
                "collect_time": "2022-05-03 11:00:00.000000",
                "storage_type": 0,
                "scenario_id": "follow_big_car"
            },
            "details": None
        })
    return text
