package com.cl.spark.node;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cl.spark.base.BaseSparkNode;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.enums.SparkNodeEnum;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.functions;
import org.springframework.stereotype.Component;

import java.util.Arrays;

import static org.apache.spark.sql.functions.*;

@Component
public class JsonHandleNode extends BaseSparkNode {

    @Override
    public SparkResult process(SparkParam sparkParam) {
        // 获取输入数据集
        Dataset<Row> dataset = sparkParam.getSparkResultList().get(0).getDataset();

        // 获取节点配置的 JSON 表达式
        JSONObject expression = sparkParam.getNodeExpression();
        String jsonValue = expression.getString("jsonValue");
        JSONObject jsonValueObject = JSONObject.parseObject(jsonValue);

        // 解析 JSON 表达式
        String name = jsonValueObject.getString("name");
        JSONArray conditions = jsonValueObject.getJSONArray("conditions");

        // 根据 name 调用对应的处理逻辑
        if ("singleConditionMapping".equals(name)) {
            dataset = applySingleConditionMapping(dataset, conditions);
        } else {
            throw new IllegalArgumentException("Unsupported condition mapping name: " + name);
        }

        // 返回处理后的数据集
        return SparkResult.success(dataset);
    }

    /**
     * 应用单条件映射规则
     *
     * @param dataset    输入数据集
     * @param conditions 条件规则列表
     * @return 处理后的数据集
     */
    private Dataset<Row> applySingleConditionMapping(Dataset<Row> dataset, JSONArray conditions) {
        // 遍历条件规则
        for (int i = 0; i < conditions.size(); i++) {
            JSONObject condition = conditions.getJSONObject(i);
            String operation = condition.getString("operation");
            String sourceField = condition.getString("sourceField");
            String sourceValue = condition.getString("sourceValue");
            String targetField = condition.getString("targetField");
            String targetValue = condition.getString("targetValue");

            // 检查 targetField 是否存在
            if (!Arrays.asList(dataset.columns()).contains(targetField)) {
                // 如果 targetField 不存在，创建该字段并初始化为 null
                dataset = dataset.withColumn(targetField, lit(null));
            }
            // 根据操作符构建条件列
            Column conditionColumn = buildConditionColumn(sourceField, operation, sourceValue);

            conditionColumn = col(sourceField).isNotNull().and(conditionColumn);


            // 应用条件映射
            dataset = dataset.withColumn(targetField, when(conditionColumn, targetValue).otherwise(col(targetField)));
        }

        return dataset;
    }

    /**
     * 构建条件列
     *
     * @param field     字段名
     * @param operation 操作符
     * @param value     字段值
     * @return 条件列
     */
    private Column buildConditionColumn(String field, String operation, String value) {
        switch (operation.toLowerCase()) { // 统一转换为小写，避免大小写敏感问题
            // 数值比较
            case "eq":
                return col(field).equalTo(value);
            case "neq":
                return col(field).notEqual(value);
            case "gt":
                return col(field).gt(value);
            case "lt":
                return col(field).lt(value);
            case "gte":
                return col(field).geq(value);
            case "lte":
                return col(field).leq(value);

            // 字符串处理
            case "contains":
                return col(field).contains(value);
            case "startswith":
                return col(field).startsWith(value);
            case "endswith":
                return col(field).endsWith(value);
            case "regex":
                return col(field).rlike(value); // 正则匹配
            case "isnull":
                return col(field).isNull(); // 检查是否为空
            case "isnotnull":
                return col(field).isNotNull(); // 检查是否不为空
            case "isempty":
                return col(field).equalTo(""); // 检查是否为空字符串
            case "isnotempty":
                return col(field).notEqual(""); // 检查是否不为空字符串
            case "trim":
                return functions.trim(col(field)).equalTo(value); // 去除两侧空格后比较
            case "lower":
                return functions.lower(col(field)).equalTo(value.toLowerCase()); // 转换为小写后比较
            case "upper":
                return functions.upper(col(field)).equalTo(value.toUpperCase()); // 转换为大写后比较

            // 其他常见操作
            case "between":
                String[] range = value.split(","); // 假设 value 是 "min,max"
                if (range.length != 2) {
                    throw new IllegalArgumentException("Invalid range format for 'between' operation: " + value);
                }
                return col(field).between(range[0], range[1]);
            case "in":
                String[] values = value.split(","); // 假设 value 是 "value1,value2,value3"
                return col(field).isin((Object[]) values);
            default:
                throw new IllegalArgumentException("Unsupported operation: " + operation);
        }
    }

    @Override
    public SparkNodeEnum getType() {
        return SparkNodeEnum.JSON_HANDLE;
    }
}
