package com.cl.spark.nodenew;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cl.spark.base.BaseSparkNode;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.enums.SparkNodeEnum;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.List;

@Component
public class DropDuplicatesNode extends BaseSparkNode {

    @Override
    public SparkResult process(SparkParam sparkParam) {
        return SparkResult.success(dropDuplicates(sparkParam));
    }

    public JSONArray dropDuplicates(SparkParam sparkParam) {
        JSONArray res = new JSONArray();
        List<JSONArray> dataList = sparkParam.getDataList();
        Dataset<Row> allDataset = sparkUtil.jsonArrayToDataset(dataList.get(0));
        JSONObject expression = sparkParam.getNodeExpression();

        JSONArray fields = expression.getJSONArray("fields");
        List<String> fieldsList = new ArrayList<>();
        for (int i = 0; i < fields.size(); i++) {
            fieldsList.add(fields.getString(i));
        }

        allDataset = allDataset.dropDuplicates(fieldsList.toArray(new String[0]));
        allDataset.collectAsList().forEach(row -> {
            JSONObject item = JSONObject.parseObject(row.json());
            res.add(item);
        });
        return res;
    }


    @Override
    public SparkNodeEnum getType() {
        return SparkNodeEnum.DROP_DUPLICATES;
    }
}
