package com.cl.spark.node;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cl.spark.base.BaseSparkNode;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.enums.SparkNodeEnum;
import org.apache.spark.sql.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;

@Component
public class FieldValueMappingNode extends BaseSparkNode {

    @Autowired
    private SparkSession sparkSession;

    @Override
    public SparkResult process(SparkParam sparkParam) {
        Dataset<Row> dataset = sparkParam.getSparkResultList().get(0).getDataset();
        JSONObject expression = sparkParam.getNodeExpression();

        // 解析配置
        String sourceField = expression.getJSONArray("sourceField").getString(0);
        String targetField = expression.getJSONArray("targetField").getString(0);
        JSONArray filterValues = expression.getJSONArray("filterValues");
        String filterFunction = expression.getString("filterFunction");
        JSONArray targetValues = expression.getJSONArray("targetValues");
        String mappingType = expression.getString("mappingType");  // 新增的映射方式字段

        // 确保filterValues和targetValues长度一致
        if (filterValues == null || targetValues == null || filterValues.size() != targetValues.size()) {
            throw new IllegalArgumentException("filterValues and targetValues must be of the same length.");
        }

        // 构建映射逻辑
        Column mappingExpr = null;

        for (int i = 0; i < filterValues.size(); i++) {
            String filterValue = filterValues.getString(i);
            String targetValue = targetValues.getString(i);
            // 创建when条件表达式
            Column condition = generateFilterColumn(sourceField,filterFunction,filterValue);
            Column whenClause = functions.when(condition, functions.lit(targetValue));

            if (mappingExpr == null) {
                mappingExpr = whenClause;
            } else {
                mappingExpr = mappingExpr.when(condition, functions.lit(targetValue));
            }
        }

        if (mappingExpr != null) {
            if ("追加".equals(mappingType)) {
                // 如果是追加，则构建一个新的列表达式来处理追加逻辑
                mappingExpr = functions.when(functions.col(targetField).isNotNull(),
                        functions.concat(functions.col(targetField),  mappingExpr)
                ).otherwise(functions.col(targetField));

            } else {
                // 如果是替换，则使用新值覆盖旧值
                mappingExpr = mappingExpr.otherwise(functions.col(targetField));
            }
        } else {
            // 如果没有提供任何过滤条件，默认不修改数据集
            return SparkResult.success(dataset);
        }

        // 应用映射逻辑到数据集中
        Dataset<Row> transformedDataset = dataset.withColumn(targetField, mappingExpr);

        return SparkResult.success(transformedDataset);
    }


    @Override
    public SparkNodeEnum getType() {
        return SparkNodeEnum.FIELD_VALUE_MAPPING;
    }

    public Column generateFilterColumn(String field, String filterFunction, String filterValue) {
        if (!StringUtils.hasLength(field) || !StringUtils.hasLength(filterFunction)) {
            throw new RuntimeException(String.format("field: %s function %s value %s", field, filterFunction, filterValue));
        }
        Column column = new Column(field);
        Column filterValueLit = functions.lit(filterValue);
        switch (filterFunction) {
            case "大于":
                column = column.gt(filterValueLit);
                break;
            case "小于":
                column = column.lt(filterValueLit);
                break;
            case "不包含":
                column = functions.expr(field + " not like '%" + filterValue + "%'");
                break;
            case "包含":
                column = column.like("%" + filterValue + "%");
                break;
            case "等于":
                column = column.equalTo(filterValueLit);
                break;
            case "不等于":
                column = column.notEqual(filterValueLit);
                break;
            case "大于等于":
                column = column.geq(filterValueLit);
                break;
            case "小于等于":
                column = column.leq(filterValueLit);
                break;
            case "列表包含(或)":
                column = column.rlike(filterValue.replace(",", "|"));
                break;
            case "空值":
                column = column.isNull().or(functions.length(column).equalTo(0));
                break;
            case "非空值":
                column = column.isNotNull().and(functions.length(column).gt(0));
                break;
            case "非纯数字":
                column = column.isNaN();
                break;
            case "以值为开始":
                column = column.startsWith(filterValueLit);
                break;
            case "以值为结束":
                column = column.endsWith(filterValueLit);
                break;
        }
        return column;
    }

}
