package com.cl.spark.node;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cl.spark.base.BaseSparkNode;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.enums.SparkNodeEnum;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.functions;
import org.springframework.stereotype.Component;

@Component
public class AppendColumnValueNode extends BaseSparkNode {

    @Override
    public JSONArray getFields(SparkParam sparkParam) {
        JSONArray fields = super.getFields(sparkParam);
        JSONObject expression = sparkParam.getNodeExpression();
        String field = expression.getJSONArray("fields").getString(0);
        fields.add(newFieldJSONObject("field_source_" + field));
        return fields;
    }

    @Override
    public SparkResult process(SparkParam sparkParam) {
        Dataset<Row> dataset = sparkParam.getSparkResultList().get(0).getDataset();
        JSONObject expression = sparkParam.getNodeExpression();

        String field = expression.getJSONArray("fields").getString(0);
        String columnName = expression.getString("columnName");
        if (!dataset.isEmpty()) {
            try {
                dataset = dataset.withColumnRenamed(field, "field_source_" + field);
                dataset = dataset.withColumn(field, functions.lit(dataset.collectAsList().get(0).getAs(columnName)));
            } catch (Exception e) {
                e.printStackTrace();
            }

        }

        return SparkResult.success(dataset);
    }


    @Override
    public SparkNodeEnum getType() {
        return SparkNodeEnum.APPEND_COLUMN_VALUE;
    }

}
