package com.cl.spark.node;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cl.spark.base.BaseSparkNode;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.enums.SparkNodeEnum;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.functions;
import org.apache.spark.sql.Column;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;

@Component
public class CloneColumnNode extends BaseSparkNode {
    @Override
    public SparkResult process(SparkParam sparkParam) {
        Dataset<Row> dataset = sparkParam.getSparkResultList().get(0).getDataset();
        JSONObject expression = sparkParam.getNodeExpression();

        JSONArray fieldsArray = expression.getJSONArray("cloneFields");
        String targetField = expression.getString("targetField");

        if (!dataset.isEmpty() && fieldsArray != null && !fieldsArray.isEmpty()) {
            try {
                List<Column> fieldColumns = new ArrayList<>();
                for (int i = 0; i < fieldsArray.size(); i++) {
                    fieldColumns.add(functions.col(fieldsArray.getString(i)));
                }

                Column coalescedColumn = functions.coalesce(fieldColumns.toArray(new Column[0]));
                dataset = dataset.withColumn(targetField, coalescedColumn);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }

        return SparkResult.success(dataset);
    }
    @Override
    public SparkNodeEnum getType() {
        return SparkNodeEnum.CLONE_COLUMN;
    }
}
