# coding=utf-8

import celery
import json
import os

import datetime

from kd_server.settings import SPARK_SUBMIT_JAR
from plantform.models import Task
from report.models import Report
from utils.kaida2.start import start_make_data

@celery.task()
def spark_async_submit(conf_string, task_id):
    """
    异步调用spark执行jar包
    :param conf_string:
    :param task_id:
    :return:
    """

    task_detail = Task.objects.get(pk=task_id)
    # 执行report任务
    report_ins = Report.objects.get(task_id=task_id)
    # 执行数据写入
    try:
        report_ins.report_info = json.dumps(start_make_data(report_ins.keywords))
        report_ins.report_status = 2
    except Exception as e:
        report_ins.report_status = 3
        report_ins.report_info = json.dumps({"error": str(e)})
    report_ins.save()

    try:
        cmd = "java -jar " + SPARK_SUBMIT_JAR + " " + conf_string
        #TODO 考虑升级为远程调用执行
        var = os.popen(cmd).read()
        # 然后是将数据存入数据库，修改task的任务状态
        task_detail.task_status = 2
        task_detail.task_results = var
        task_detail.end_datetime = datetime.datetime.now()
        task_detail.save()
    except Exception as e:
        task_detail.task_status = 3
        task_detail.task_results = ""
        task_detail.end_datetime = datetime.datetime.now()
        task_detail.error_info = str(e)
        task_detail.save()
    return "success"
