package com.cl.spark.node;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cl.spark.base.BaseSparkNode;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.enums.SparkNodeEnum;
import org.apache.spark.sql.*;
import org.apache.spark.sql.types.DataTypes;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;

import java.math.BigDecimal;

@Component
public class ComputeNode extends BaseSparkNode {
    @Autowired
    SparkSession sparkSession;


    @Override
    public SparkResult process(SparkParam sparkParam) {
        Dataset<Row> dataset = sparkParam.getSparkResultList().get(0).getDataset();
        JSONObject expression = sparkParam.getNodeExpression();

        JSONArray fields = expression.getJSONArray("fields");
        JSONArray resultFields = expression.getJSONArray("resultFields");
        String field = fields.getString(0);
        String resultField = "";
        if (!resultFields.isEmpty()) {
            resultField = resultFields.getString(0);
        }
        String computeFunction = expression.getString("computeFunction");
        BigDecimal computeValue = expression.getBigDecimal("computeValue");

        Column computeColumn = generateComputeColumn(field, computeFunction, computeValue);
        String newColumn = generateNewColumn(field, computeFunction);
        if (StringUtils.hasLength(resultField)) {
            newColumn = resultField;
        }
        Dataset<Row> filter = dataset.withColumn(newColumn, computeColumn);
        return SparkResult.success(filter);
    }

    public String generateNewColumn(String column, String computeFunction) {
        switch (computeFunction) {
            case "加":
                column += "_plus";
                break;
            case "减":
                column += "_minus";
                break;
            case "乘":
                column += "_multiply";
                break;
            case "除以":
                column += "_divide";
                break;
            case "取余":
                column += "_mod";
                break;
            case "绝对值":
                column += "_abs";
                break;
            case "平方根":
                column += "_sqrt";
                break;
            case "幂运算":
                column += "_pow";
                break;
            case "取整":
                column += "_floor";
                break;
            case "取顶":
                column += "_ceil";
                break;
            case "取小数部分":
                column += "_fractionalPart";
                break;
            case "非空字符数":
                column += "_noSpaceLength";
                break;
            case "截取前N位":
                column += "_substringStart";
                break;
            case "截取后N位":
                column += "_substringEnd";
                break;
            case "去除两侧空字符":
                column += "_trim";
                break;
            case "取最小地址单位":
                column += "_minAddressUnit";
                break;
            case "去除括号值后提取中英文和数字":
                column += "_extractChineseAndAlphanumeric";
                break;
        }
        return column;
    }

    public Column generateComputeColumn(
            String field, String computeFunction, BigDecimal computeValue) {
        if (!StringUtils.hasLength(field) || !StringUtils.hasLength(computeFunction)) {
            throw new RuntimeException(
                    String.format("field: %s function %s value %s", field, computeFunction, computeValue));
        }
        Column column = new Column(field);
        switch (computeFunction) {
            case "加":
                column = functions.col(field).plus(computeValue);
                break;
            case "减":
                column = functions.col(field).minus(computeValue);
                break;
            case "乘":
                column = functions.col(field).multiply(computeValue);
                break;
            case "除以":
                column = functions.col(field).divide(computeValue);
                break;
            case "取余":
                column = functions.col(field).mod(computeValue);
                break;
            case "绝对值":
                column = functions.abs(functions.col(field));
                break;
            case "平方根":
                column = functions.sqrt(functions.col(field));
                break;
            case "幂运算":
                column = functions.pow(functions.col(field), computeValue.doubleValue());
                break;
            case "取整":
                column = functions.floor(functions.col(field));
                break;
            case "取顶":
                column = functions.ceil(functions.col(field));
                break;
            case "取小数部分":
                column =
                        functions.callUDF("FractionalPartUDF", functions.col(field).cast(DataTypes.DoubleType));
                break;
            case "非空字符数":
                column = functions.length(functions.regexp_replace(column, "\\s", ""));
                break;
            case "截取前N位":
                column =
                        functions.substring(column, 1, computeValue.intValue()); // 注意：substring的第二个参数是从1开始的
                break;
            case "截取后N位":
                int n = computeValue.intValue();
                // 使用 expr 构建 SQL 表达式
                column =
                        functions.expr(
                                String.format("substring(%s, length(%s) - %d + 1, %d)", field, field, n, n));
                break;
            case "去除两侧空字符":
                column = functions.trim(functions.col(field));
                break;
            case "取最小地址单位":
                column =
                        functions.callUDF("AddressParseUDF", functions.col(field).cast(DataTypes.StringType));
                break;
            case "去除括号值后提取中英文和数字":
                column =
                        functions.callUDF("ExtractChineseAndAlphanumericUDF", functions.col(field).cast(DataTypes.StringType));
                break;
            default:
                throw new IllegalArgumentException("Unsupported compute function: " + computeFunction);
        }
        return column;
    }


    @Override
    public SparkNodeEnum getType() {
        return SparkNodeEnum.COMPUTE;
    }
}
