package com.cl.spark.base;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.enums.SparkNodeEnum;
import com.cl.spark.util.SparkUtil;
import lombok.Data;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.functions;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.storage.StorageLevel;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.StringUtils;

import java.util.*;
import java.util.stream.Collectors;

@Data
public abstract class BaseSparkNode {

    @Autowired
    public SparkUtil sparkUtil;

    public SparkResult handle(SparkParam sparkParam) {
        boolean allFieldAppendTableNameFlag = false;
        JSONObject expression = sparkParam.getNodeExpression();
        if (expression != null) {

            if (sparkParam.getNodeExpression().containsKey("fieldAppendTableNameFlag")) {
                String fieldAppendTableNameFlagString = sparkParam.getNodeExpression().getString("fieldAppendTableNameFlag");
                if ("是".equals(fieldAppendTableNameFlagString)) {
                    allFieldAppendTableNameFlag = true;
                }
            }

            List<SparkResult> sparkResultList = sparkParam.getSparkResultList();

            if (sparkResultList != null && sparkResultList.size() > 1) {
                allFieldAppendTableNameFlag = allFieldAppendTableNameFlag || sparkResultList.stream().anyMatch(SparkResult::getFieldAppendTableNameFlag);

                Dataset<Row> data1 = sparkResultList.get(0).getDataset();
                Dataset<Row> data2 = sparkResultList.get(1).getDataset();

                String tableName1 = sparkResultList.get(0).getTableName();
                String tableName2 = sparkResultList.get(1).getTableName();

                if (tableName1 == null && !data1.isEmpty()) {
                    Row item1 = data1.first();
                    if (Arrays.asList(item1.schema().fieldNames()).contains("TABLE_NAME")) {
                        tableName1 = item1.getAs("TABLE_NAME");
                    }
                }
                if (tableName2 == null && !data2.isEmpty()) {
                    Row item2 = data2.first();
                    if (Arrays.asList(item2.schema().fieldNames()).contains("TABLE_NAME")) {
                        tableName2 = item2.getAs("TABLE_NAME");
                    }
                }

                if (allFieldAppendTableNameFlag && tableName1 != null && tableName2 != null) {
                    JSONArray newFields1 = new JSONArray();
                    JSONArray newFields2 = new JSONArray();

                    JSONArray fields1 = expression.getJSONArray("fields1");
                    JSONArray fields2 = expression.getJSONArray("fields2");

                    if (fields1 != null && fields2 != null) {
                        for (int i = 0; i < fields1.size(); i++) {
                            newFields1.add(tableName1 + "__" + fields1.get(i));
                        }
                        expression.put("fields1", newFields1);
                        for (int i = 0; i < fields2.size(); i++) {
                            newFields2.add(tableName2 + "__" + fields2.get(i));
                        }
                        expression.put("fields2", newFields2);
                    }

                    for (StructField field : data1.schema().fields()) {
                        String fieldName = field.name();
                        if (!fieldName.startsWith(tableName1) && !tableName1.contains("null") && !fieldName.equals("TABLE_NAME") && !fieldName.equals("DB_CODE")) {
                            data1 = data1.withColumnRenamed(fieldName, tableName1 + "__" + fieldName);
                        }
                    }
                    for (StructField field : data2.schema().fields()) {
                        String fieldName = field.name();
                        if (!fieldName.startsWith(tableName2) && !tableName2.contains("null") && !fieldName.equals("TABLE_NAME") && !fieldName.equals("DB_CODE")) {
                            data2 = data2.withColumnRenamed(fieldName, tableName2 + "__" + fieldName);
                        }
                    }
                    sparkResultList.get(0).setDataset(data1);
                    sparkResultList.get(1).setDataset(data2);
                    sparkParam.setSparkResultList(sparkResultList);
                    sparkParam.setNodeExpression(expression);

                    SparkResult res = handleProcess(sparkParam);
                    res.setDataset(res.getDataset().withColumn("TABLE_NAME", functions.lit("")));
                    res.setDataset(res.getDataset().withColumn("DB_CODE", functions.lit("")));
                    res.setFieldAppendTableNameFlag(allFieldAppendTableNameFlag);
                    return res;
                }
            }
        }
        SparkResult res = handleProcess(sparkParam);
        return res;
    }

    public JSONArray getFields(SparkParam sparkParam) {
        Map<String, JSONObject> map = new TreeMap<>();
        for (int i = 0; i < sparkParam.getFieldsList().size(); i++) {
            JSONArray fields = sparkParam.getFieldsList().getJSONArray(i);
            for (int j = 0; j < fields.size(); j++) {
                JSONObject field = fields.getJSONObject(j);


                if (field.containsKey("tableFieldName") && field.getString("tableFieldName") != null) {
                    map.put(field.getString("tableFieldName"), field);
                } else if (field.containsKey("code") && field.getString("code") != null) {
                    map.put(field.getString("code"), field);
                }
                if (field.containsKey("fieldMapping")) {
                    if (StringUtils.hasLength(field.getString("fieldMapping"))) {
                        field.put("code", field.getString("fieldMapping"));
                    }
                }
            }
        }
        JSONArray res = new JSONArray();
        res.addAll(map.values());
        return res;
    }

    public SparkResult handleProcess(SparkParam sparkParam) {
        SparkResult result = process(sparkParam);
        result.getDataset().persist(StorageLevel.MEMORY_AND_DISK());
        return result;
    }

    public JSONObject newFieldJSONObject(String field) {
        JSONObject fieldObj = new JSONObject();
        fieldObj.put("tableFieldName", field);
        return fieldObj;
    }

    public abstract SparkResult process(SparkParam sparkParam);

    public abstract SparkNodeEnum getType();

}
