from __future__ import absolute_import
import logging
import json
import traceback
import datetime
import requests
from requests.cookies import cookiejar_from_dict
from django.utils import timezone
from django.db.models import Q
from conf.conf import data_mgt_apis, scene_data_url
from data_mgt.models import AnonymizedData, SceneData, SceneDataCount, DataSetInfo, DataImportTask
from data_mgt.models import ObsAsyncJobStatus, AsyncJobAnonymizedAdd, AsyncJobSceneCut
from monitor.models import SceneDataMonitor
from common.obs_tools import obs_client, object_restore_status, get_job_type
from common.tools import scenario_conversion_main_scene, check_session
from common.train_tool import DataSet
from ploto.celery import app as celery_app


logger = logging.getLogger(__name__)


@celery_app.task
def get_obs_job_status():
    """
    1、获取当前时间以前的任务信息
    2、对于任务信息 分脱敏数据和场景数据处理
    3、对脱敏数据 获取恢复状态  已恢复：加入到脱敏数据成功列表  未恢复：下一次执行时间+5分钟或者30分钟
    4、对场景数据 记录所有的id 已恢复：删除记录表的id 若当前id不在记录表中存在，则加入场景数据成功列表
       未恢复：下一次执行时间+5分钟或者30分钟
    5、脱敏数据成功列表有数据，则更新AnonymizedData的恢复状态字段，
    并且删除ObsAsyncJobStatus表中data_type为脱敏数据且id在脱敏数据成功列表中数据
    6、场景数据成功列表有数据，则更新SceneData的恢复状态字段，
    并且删除ObsAsyncJobStatus表中data_type为场景数据且id在场景数据成功列表中数据
    """
    job_list = ObsAsyncJobStatus.objects.filter(next_run_time__lt=timezone.now())
    if not job_list:
        logger.info("OBSAsync not have job list")
        return
    anonymize_success_job_id = []
    scene_success_job_id = []
    job_id_list = []
    for job_info in job_list:
        if job_info.job_data_type == 0:
            restore_status = object_restore_status(job_info.job_bucket, job_info.job_object)
            if restore_status == "restored":
                anonymize_success_job_id.append(job_info.job_id)
                continue
            if job_info.job_type == 0:
                # 正常取回数据
                job_info.next_run_time = timezone.now() + timezone.timedelta(seconds=1800)
            else:
                job_info.next_run_time = timezone.now() + timezone.timedelta(seconds=300)
            job_info.save()
        else:
            job_id_list.append(job_info.job_id)
            restore_status = object_restore_status(job_info.job_bucket, job_info.job_object)
            if restore_status == "restored":
                job_id_list.pop(-1)
                if job_info.job_id not in job_id_list:
                    scene_success_job_id.append(job_info.job_id)
                    continue
            else:
                if job_info.job_type == 0:
                    # 正常取回
                    job_info.next_run_time = timezone.now() + timezone.timedelta(seconds=1800)
                else:
                    job_info.next_run_time = timezone.now() + timezone.timedelta(seconds=300)
                job_info.save()
    if anonymize_success_job_id:
        # 针对已经恢复的同步脱敏数据的状态值
        AnonymizedData.objects.filter(id__in=anonymize_success_job_id).update(restoration_status=3)
        ObsAsyncJobStatus.objects.filter(Q(job_data_type=0) & Q(job_id__in=anonymize_success_job_id)).delete()
        logger.info("update Anonymize Data restoration status")
    if scene_success_job_id:
        SceneData.objects.filter(id__in=scene_success_job_id).update(restoration_status=3)
        ObsAsyncJobStatus.objects.filter(Q(job_data_type=1) & Q(job_id__in=scene_success_job_id)).delete()
        logger.info("update scene Data restoration status")


@celery_app.task
def async_count_scene_data(scenario_id, size, city):
    # scenario_id 两边数据格式还未统一，默认现在是一致的
    main_scene = scenario_conversion_main_scene(scenario_id)
    objects = SceneDataCount.objects.filter(main_scene=main_scene, scenario_id=scenario_id)
    if objects:
        for obj in objects:
            update_size = obj.size + int(size)
            update_number = obj.scenario_number + 1
            SceneDataCount.objects.filter(scenario_id=scenario_id).update(size=update_size,
                                                                          scenario_number=update_number,
                                                                          main_scene=main_scene)
    else:
        SceneDataCount.objects.create(main_scene=main_scene, scenario_id=scenario_id, size=size, scenario_number=1)
    # 更新monitor数据
    object_monitor = SceneDataMonitor.objects.filter(main_scene=main_scene, scenario_id=scenario_id)
    if object_monitor:
        for obj in object_monitor:
            update_size = obj.size + int(size)
            update_number = obj.scenario_number + 1
            # 更新主场景下的细分场景
            SceneDataMonitor.objects.filter(main_scene=main_scene,
                                            scenario_id=scenario_id).update(size=update_size,
                                                                            scenario_number=update_number,
                                                                            complete_time=timezone.now())
    else:
        SceneDataMonitor.objects.create(region=city, size=size, main_scene=main_scene, scenario_id=scenario_id,
                                        scenario_number=1, bag_id=0, complete_time=timezone.now())


@celery_app.task
def async_delete_scene_data(pk):
    objects = SceneData.objects.filter(id=pk)
    if objects is None:
        logger.error("场景数据不存在")
    scenario_id = objects[0].scenario_id
    main_scene = scenario_conversion_main_scene(scenario_id)
    # 修改count
    scene_obj = SceneDataCount.objects.filter(main_scene=main_scene, scenario_id=scenario_id)
    if scene_obj is None:
        logger.error("场景统计数据不存在")
        return
    update_size = scene_obj[0].size - objects[0].size
    update_scenario_number = scene_obj[0].scenario_number - 1
    if update_scenario_number:
        SceneDataCount.objects.filter(main_scene=main_scene, scenario_id=scenario_id).update(size=update_size,
                                                                        scenario_number=update_scenario_number)
    else:
        SceneDataCount.objects.filter(main_scene=main_scene, scenario_id=scenario_id).delete()

    # 修改monitor
    monitor_obj = SceneDataMonitor.objects.filter(main_scene=main_scene, scenario_id=scenario_id)
    if monitor_obj is None:
        logger.error("数据大屏场景统计数据不存在")
        return
    update_size = monitor_obj[0].size - objects[0].size
    update_scenario_number = monitor_obj[0].scenario_number - 1
    if update_scenario_number:
        SceneDataMonitor.objects.filter(main_scene=main_scene, scenario_id=scenario_id).update(size=update_size,
                                                                        scenario_number=update_scenario_number)
    else:
        SceneDataMonitor.objects.filter(main_scene=main_scene, scenario_id=scenario_id).delete()


@celery_app.task
def async_import_dataset():
    ds = DataSet()
    resp = ds.get_datasets(page=1, page_size=100, name="")
    resp_json = json.loads(resp.text)['datasets']
    ready_dataset = set()
    for element in resp_json:
        id = element['dataset_id']
        cur_dataset_info = json.loads(ds.get_import_data_info(id).text)
        if cur_dataset_info['total_count'] == 0:
            ready_dataset.add(element['dataset_id'])
        elif cur_dataset_info['import_tasks'][0]['status'] != "RUNNING":
            ready_dataset.add(element['dataset_id'])
    task_list = DataImportTask.objects.all()
    if task_list.count == 0:
        logger.info("dataset import task list is empty")
        return
    for task in task_list:
        # 要插入的数据集是就绪的数据集
        if task.des_dataset_id in ready_dataset:
            ds.import_task(dataset_id=task.des_dataset_id, data_url=task.import_path)
            logger.info("dataset import success")
            task.delete()
            ready_dataset.remove(task.des_dataset_id)


@celery_app.task
def anonymized_data_parse():
    try:
        get_bag_api = data_mgt_apis.get("bags_detail", None)
        if get_bag_api is None:
            logger.error('Not got api url to check bags detail from isv platform.\
                        Pls check conf data_mgt_apis["bags_detail"]')
            return

        job_lst = AsyncJobAnonymizedAdd.objects.all()
        get_params = {"bag_id": None}
        headers = {"Content-Type": "application/json"}
        for job_info in job_lst:
            get_params["bag_id"] = job_info.data_id
            response = requests.get(url=get_bag_api, params=get_params, headers=headers)
            res_json = json.loads(response.text)
            if response.status_code != 200:
                logger.error(
                    "get bag detail from isv platfrom failed.bag_id:%s, error message :%s", job_info.data_id,
                    res_json.get("errmsg"))
                continue
            detail = res_json.get("data", {})
            if detail.get("has_parse", False):
                logger.info("bag(%s) has not parsed. will check next times.", job_info.data_id)
            else:
                # update anonymized data
                anony = AnonymizedData.objects.get(id=job_info.anonymized_id)
                anony.tag = detail.get("tag", "")
                remark = ""
                for key in Holomatic.bags_keys:
                    val = detail.get(key, None)
                    if val:
                        remark += "{}:{},".format(key, val)
                remark += detail.get("remarks", None)
                anony.remark = remark

                # delete the job info
                job_info.delete()
    except Exception as e:
        logger.error(e.args)
        logger.error(str(e))
        logger.error(repr(e))


def send_to_next(data, user_data, url):
    headers = {"Content-Type": "application/json"}
    headers["sessionid"] = user_data["sessionid"]
    headers["X-CSRFToken"] = user_data["token"]
    cookies_dict = {}
    cookies_dict["sessionid"] = user_data["sessionid"]
    cookies_dict["csrftoken"] = user_data["token"]
    cookies = cookiejar_from_dict(cookies_dict)
    response_post = requests.post(url=url, data=json.dumps(data), headers=headers, cookies=cookies)
    if response_post.status_code >= 300:
        logger.error("数据添加失败:{}".format(response_post.status_code))


@celery_app.task()
def async_check_scene_cut():
    task_list = AsyncJobSceneCut.objects.all()
    if task_list.count() == 0:
        logger.info("Scenecut task list is empty")
        return
    scene_cut_success_list = []
    url = data_mgt_apis["scene_cut_task_detail"]
    for task in task_list:
        params = {"task_id": task.task_id}
        headers = {"Content-Type": "application/json"}
        task_detail_resp = requests.get(url=url, params=params, headers=headers)
        user_data = {"username": task.username, "sessionid": task.sessionid, "token": task.token}
        resp_json = task_detail_resp.json()
        if check_session(task.sessionid):
            logger.info("当前会话已过期")
            continue
        # 任务不存在
        if resp_json.get("errno") > 0:
            logger.error(
                "get task detail failed. Task_id:%s, error message :%s", task.task_id, resp_json.get('errmsg', ''))
            task.delete()
            continue
        data = resp_json.get('data', '')
        task_status = data.get("status")
        # 任务等待执行或进行中
        if task_status == Holomatic.task_status_active or task_status == Holomatic.task_status_pending:
            logger.info("Task: %s is running. will check next times.", task.task_id)
            continue

        # 任务失败或取消
        elif task_status == Holomatic.task_status_canceled or task_status == Holomatic.task_status_archived:
            logger.info("Run task: %s failed.", task.task_id)
            task.delete()
            continue
        elif task_status == Holomatic.task_status_completed:
            scene = {}
            try:
                anonymized_obj = AnonymizedData.objects.filter(id=task.anonymized_id.id)[0]
                scene['name'] = anonymized_obj.name
                scene['data_type'] = anonymized_obj.data_type
                scene['region'] = anonymized_obj.region
                scene['source_type'] = anonymized_obj.source_type
                scene['source_id'] = anonymized_obj.source_id
                scene['collect_time'] = anonymized_obj.collect_time.strftime("%Y-%m-%d %H:%M:%S")
                scene['car_id'] = anonymized_obj.car_id
                scene['from_uuid'] = anonymized_obj.uuid
                scene['storage_type'] = anonymized_obj.storage_type
                scene['tag'] = anonymized_obj.tag
                scene['remark'] = anonymized_obj.remark
                scene['task_id'] = data.get('task_id', '')
                scene['is_delete'] = False
                scene['last_cut_time'] = datetime.datetime.today().strftime("%Y-%m-%d %H:%M:%S")
                scene['url'] = task.obs_output_dir + '/' + str(task.data_id)
                scene['bag_id'] = task.data_id

            except Exception as e:
                logger.error("数据格式错误:%s, %s", repr(e), traceback.format_exc())
            scene_cut_success_list.append(scene)
            task.delete()
        send_to_next(scene_cut_success_list, user_data, scene_data_url)


class Holomatic:
    task_status_active = 1  # 正在执行
    task_status_pending = 2  # 等待执行
    task_status_scheduled = 3  # 调度（暂未启用）
    task_status_retry = 4  # 重试
    task_status_archived = 5  # 失败归档
    task_status_completed = 6  # 提取完成
    # 7 预留状态，未使用
    task_status_canceled = 8  # 任务取消

    bags_keys = ["md5", "start", "end", "mileage", "road_types", "participant_types", "traffic_types",
                 "line_types", "has_map"]


def resp_scene_cut_test():
    # 第三方场景数据
    text = json.dumps(
        {
            "errno": 0,
            "errmsg": "OK",
            "data": {
                "id": "123123",
                "status": 6,
                "task_id": "123"
            },
            "details": None
        })
    return text
