from fastapi import HTTPException
from dag_creator import DAGTemplate, ConfigTemplate,  create_dag, create_dag_args
from custom_operator import PythonOperator, SparkKubernetesOperator
from vo import BuildIDAGItem
from read_file_from_minio import MinioDataSource, ConvertCsvToDataframeDecorator
from response import create_response
from jinja2 import Template, FileSystemLoader, Environment
from minio import Minio, S3Error
import custom_config

j2_loader = FileSystemLoader('./j2_files')
env = Environment(loader=j2_loader)

def build_dag_py(item: BuildIDAGItem):
    res = ""

    # 导入用户指定的依赖
    dependencies_j2 = env.get_template('./dependencies.j2')
    dependencies_j2_str = dependencies_j2.render(python_dependencies=item.python_dependencies)
    res += dependencies_j2_str

    # 导入用户指定的依赖

    # dag 配置
    config = ConfigTemplate()
    # dag
    dag = DAGTemplate()
    dag.dag_id = item.user_id + "-dag"
    dag.description = item.user_id + " create a dag!"

    # 生成配置字符串
    config_str = create_dag_args(config)
    res += config_str + "\n"

    # 生成 dag 字符串
    dag_str = create_dag(dag)
    res += dag_str + "\n"

    # 选择数据源
    # minio 数据源
    if item.user_data_source.datasource_type == "Minio":
        minio_client = Minio(
            custom_config.MINIO_API_URL,
            access_key=custom_config.MINIO_ACCESS_KEY,
            secret_key=custom_config.MINIO_SECRET_KEY,
            secure=False
        )
        # 检查 bucket 和 object 是否存在
        if minio_client.bucket_exists(item.user_data_source.bucket_name) is not True:
            raise HTTPException(status_code=404, detail="Bucket not found")
        else:
            try:
                minio_client.get_object(item.user_data_source.bucket_name, item.user_data_source.object_name)
            except Exception as e:
                raise HTTPException(status_code=404, detail="Object not found")
        # 创建数据源
        datasource = MinioDataSource()
        datasource.bucket_name = item.user_id + "-bucket"
        datasource.object_name = item.user_data_source.object_name
        # 选择数据转换装饰器
        if item.user_data_source.datasource_convert == "csv to dataframe": # 将 csv 转换为 pandas 的 dataframe
            decorated_datasource = ConvertCsvToDataframeDecorator(datasource, item.user_data_source.convert_result_name)
            res += decorated_datasource.create_data_source_object() + "\n" # 生成字符串
        elif item.user_data_source.datasource_convert is None: # 无需转换操作
            res += datasource.create_data_source_object() + "\n" # 生成字符串

    # 加入 operator 字符串
    # 生成 PythonOperator 对应的函数的定义的字符串
    if item.python_functions is not None:
        Python_functions_j2 = env.get_template('./Python_functon.j2')
        Python_functions_j2_str = Python_functions_j2.render(
            python_functions=item.python_functions
        )
        res += Python_functions_j2_str + "\n"

    for operator in item.operators:
        if operator.operator_type == 'PythonOperator':
            # 生成 PythonOperator 字符串
            python_operator = PythonOperator()
            python_operator.operator_name = operator.operator_name # PythonOperator 对象命名
            python_operator.python_callable = operator.python_function_defined # 指定 PythonOperator 的执行函数
            python_operator.op_kwargs = operator.python_function_args
            python_operator.task_id = item.user_id + "-" + operator.operator_name  # PythonOperator 的 task_id 命名
            res += python_operator.create_python_operator() + "\n"
        elif operator.operator_type == 'SparkKubernetesOperator':
            # 生成 SparkKubernetesOperator 字符串
            spark_kubernetes_operator = SparkKubernetesOperator()
            spark_kubernetes_operator.operator_name = operator.operator_name  # spark_applications 对象命名
            spark_kubernetes_operator.code_path = "local:///home/" + operator.spark_application
            spark_kubernetes_operator.task_id = item.user_id + "-" + operator.operator_name # PythonOperator 的 task_id 命名
            res += spark_kubernetes_operator.create_spark_kubernetes_operator() + "\n"

    # 生成 dag 执行顺序
    if item.dag_sequence is not None:
        # 线性结构
        dag_sequence = ""
        for operator in item.dag_sequence:
            dag_sequence += operator + " >> "
        dag_sequence = dag_sequence[:-4]
        res += dag_sequence
    elif item.dag_sequence_tree is not None:
        # 树形结构
        dag_sequence = ""
        for parent_operator, sub_operators in item.dag_sequence_tree.items():
            for sub_operator in sub_operators:
                dag_sequence += sub_operator + ".set_upstream(" + parent_operator + ")" + '\n'
        res += dag_sequence

    return res