package com.cl.spark.node;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cl.spark.base.BaseSparkNode;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.enums.SparkNodeEnum;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.functions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

@Component
public class CleanNode extends BaseSparkNode {

    @Autowired
    private SparkSession sparkSession;

    @Override
    public SparkResult process(SparkParam sparkParam) {
        Dataset<Row> dataset = sparkParam.getSparkResultList().get(0).getDataset();
        JSONObject expression = sparkParam.getNodeExpression();

        String[] fieldsTypes = {
                "fillUnknownFields",
                "notNullFields",
                "notEmptyFields",
                "notNumberFields",
                "isNumberFields",
                "cleanSpaceFields",
                "extractNumberFields",
                "lowerFields",
                "upperFields"
        };

        for (String fieldsType : fieldsTypes) {
            JSONArray fields = expression.getJSONArray(fieldsType);
            dataset = cleanData(dataset, fields, fieldsType);
        }

        return SparkResult.success(dataset);
    }

    public Dataset<Row> cleanData(Dataset<Row> dataset, JSONArray fields, String fieldType) {
        if (fields != null && !fields.isEmpty()) {
            switch (fieldType) {
                case "fillUnknown":
                    for (Object field : fields) {
                        dataset = dataset.na().fill("unknown", new String[]{field.toString()});
                    }
                    break;
                case "notNull":
                    for (Object field : fields) {
                        dataset = dataset.filter(functions.col(field.toString()).isNotNull());
                    }
                    break;
                case "notEmpty":
                    for (Object field : fields) {
                        dataset = dataset.filter(functions.col(field.toString()).isNotNull().and(functions.length(functions.col(field.toString())).gt(0)));
                    }
                    break;
                case "notNumber":
                    for (Object field : fields) {
                        dataset = dataset.filter(functions.col(field.toString()).cast("double").isNull());
                    }
                    break;
                case "isNumber":
                    for (Object field : fields) {
                        dataset = dataset.filter(functions.col(field.toString()).cast("double").isNotNull());
                    }
                    break;
                case "cleanSpace":
                    for (Object field : fields) {
                        dataset = dataset.withColumn(field.toString(), functions.trim(functions.col(field.toString())));
                    }
                    break;
                case "extractNumber":
                    for (Object field : fields) {
                        dataset = dataset.withColumn(field.toString(), functions.regexp_extract(functions.col(field.toString()), "\\d+", 0));
                    }
                    break;
                case "lower":
                    for (Object field : fields) {
                        dataset = dataset.withColumn(field.toString(), functions.lower(functions.col(field.toString())));
                    }
                    break;
                case "upper":
                    for (Object field : fields) {
                        dataset = dataset.withColumn(field.toString(), functions.upper(functions.col(field.toString())));
                    }
                    break;
                default:
                    throw new IllegalArgumentException("Unsupported field type: " + fieldType);
            }
        }
        return dataset;
    }
    @Override
    public SparkNodeEnum getType() {
        return SparkNodeEnum.CLEAN;
    }
}
