package com.cl.spark.base;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.enums.SparkNodeEnum;
import com.cl.spark.util.SparkUtil;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.types.StructField;
import org.springframework.beans.factory.annotation.Autowired;
import scala.annotation.meta.field;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;


public abstract class BaseSparkNode {

    @Autowired
    public SparkUtil sparkUtil;

    protected BaseSparkNode() {
    }

    public SparkResult handle(SparkParam sparkParam) {
        try {
            boolean fieldAppendTableNameFlag = false;
            JSONObject expression = sparkParam.getNodeExpression();
            if (expression != null) {

                if (sparkParam.getNodeExpression().containsKey("fieldAppendTableNameFlag")) {
                    String fieldAppendTableNameFlagString = sparkParam.getNodeExpression().getString("fieldAppendTableNameFlag");
                    if ("是".equals(fieldAppendTableNameFlagString)) {
                        fieldAppendTableNameFlag = true;
                    }
                }

                List<Dataset<Row>> datasetList = sparkParam.getDatasetList();

                if (datasetList != null && datasetList.size() > 1 && fieldAppendTableNameFlag) {
                    JSONArray newFields1 = new JSONArray();
                    JSONArray newFields2 = new JSONArray();
                    Dataset<Row> data1 = datasetList.get(0);
                    Dataset<Row> data2 = datasetList.get(1);
                    List<String> fields1 = Arrays.stream(data1.columns()).collect(Collectors.toList());
                    List<String> fields2 = Arrays.stream(data2.columns()).collect(Collectors.toList());
                    Row item1 = data1.first();
                    Row item2 = data2.first();
                    String tableName1 = item1.getAs("TABLE_NAME");
                    String tableName2 = item2.getAs("TABLE_NAME");
                    if (!fields1.isEmpty() && data1.count() > 0) {
                        for (String field : fields1) {
                            newFields1.add(tableName1 + "__" + field);
                        }
                        expression.put("fields1", newFields1);
                    }
                    if (!fields2.isEmpty() && data2.count() > 0) {
                        for (String field : fields2) {
                            newFields1.add(tableName2 + "__" + field);
                        }
                        expression.put("fields2", newFields2);
                    }

                    for (StructField field : data1.schema().fields()) {
                        String fieldName = field.name();
                        if (tableName1 != null && !fieldName.startsWith(tableName1) && !tableName1.contains("null")) {
                            data1 = data1.withColumnRenamed(fieldName, tableName1 + "__" + fieldName);
                        }
                    }
                    for (StructField field : data2.schema().fields()) {
                        String fieldName = field.name();
                        if (tableName2 != null && !fieldName.startsWith(tableName2) && !tableName2.contains("null")) {
                            data2 = data2.withColumnRenamed(fieldName, tableName2 + "__" + fieldName);
                        }
                    }

                    List<Dataset<Row>> newDataList = new ArrayList<>();
                    newDataList.add(data1);
                    newDataList.add(data2);
                    sparkParam.setDatasetList(newDataList);
                    sparkParam.setNodeExpression(expression);
                }
            }

            SparkResult res = process(sparkParam);
            JSONArray fields = getFields(sparkParam);
            res.setFields(fields);
            return res;
        } catch (Exception e) {
            e.printStackTrace();
            return SparkResult.failed(e.getMessage());
        }
    }

    public JSONArray getFields(SparkParam sparkParam) {
        boolean fieldAppendTableNameFlag = false;
        if (sparkParam.getNodeExpression().containsKey("fieldAppendTableNameFlag")) {
            String fieldAppendTableNameFlagString = sparkParam.getNodeExpression().getString("fieldAppendTableNameFlag");
            if ("是".equals(fieldAppendTableNameFlagString)) {
                fieldAppendTableNameFlag = true;
            }
        }
        JSONArray fields = new JSONArray();
        if (sparkParam.getFieldsList() == null) {
            return new JSONArray();
        }
        sparkParam.getFieldsList().stream().filter(Objects::nonNull).forEach(fields::addAll);
        if (sparkParam.getFieldsList().size() > 1 && fieldAppendTableNameFlag) {
            JSONArray res = new JSONArray();
            for (int i = 0; i < fields.size(); i++) {
                JSONObject field = fields.getJSONObject(i);
                if (!field.containsKey("table_field")) {
                    field.put("table_field", field.getString("tableField"));
                }
                field.put("tableField", field.getString("tableName") + "__" + field.getString("table_field"));
                res.add(field);
            }
            return res;
        }
        return fields;
    }

    public abstract SparkResult process(SparkParam sparkParam);

    public abstract SparkNodeEnum getType();


}
