package com.cl.spark.node;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cl.spark.base.BaseSparkNode;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.enums.SparkNodeEnum;
import jdk.nashorn.internal.scripts.JD;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.functions;
import org.springframework.stereotype.Component;
import scala.annotation.meta.param;

import java.util.*;
import java.util.stream.Collectors;

@Component
public class GroupNode extends BaseSparkNode {

    @Override
    public SparkResult process(SparkParam sparkParam) {
        return SparkResult.success(group(sparkParam));
    }

    public static Dataset<Row> group(SparkParam sparkParam) {

        JSONObject expression = sparkParam.getNodeExpression();
        String statisticsMethod = expression.getString("statisticsMethod");

        Dataset<Row> dataset = sparkParam.getSparkResultList().get(0).getDataset();
        JSONArray groupFields = expression.getJSONArray("groupFields");
        JSONArray statisticsFields = expression.getJSONArray("statisticsFields");

        List<Column> groupColumnList = new ArrayList<>();
        for (int i = 0; i < groupFields.size(); i++) {
            Column column = dataset.col(groupFields.getString(i));
            groupColumnList.add(column);
        }
        List<String> statisticsColumnList = new ArrayList<>();
        if (statisticsFields != null) {
            for (int i = 0; i < statisticsFields.size(); i++) {
                statisticsColumnList.add(statisticsFields.getString(i));
            }
        } else {
            statisticsMethod = "计数";
            statisticsColumnList.add(groupFields.getString(0));
        }

        Column[] groupColumns = groupColumnList.toArray(new Column[0]);
        String[] statisticsColumns = statisticsColumnList.toArray(new String[0]);
        List<Column> agg = new ArrayList<>();

        for (String statisticsColumn : statisticsColumns) {
            if ("求和".equals(statisticsMethod)) {
                agg.add(functions.sum(statisticsColumn).alias(statisticsColumn + "_sum"));
            } else if ("计数".equals(statisticsMethod)) {
                agg.add(functions.count(statisticsColumn).alias(statisticsColumn + "_count"));
            } else if ("平均值".equals(statisticsMethod)) {
                agg.add(functions.avg(statisticsColumn).alias(statisticsColumn + "_avg"));
            } else if ("合并".equalsIgnoreCase(statisticsMethod)) {
                Column aggregatedColumn = functions.collect_set(statisticsColumn).alias(statisticsColumn + "_set");

                return dataset.groupBy(groupColumns)
                        .agg(aggregatedColumn)
                        .withColumn(statisticsColumn + "_concat", functions.concat_ws(",", functions.col(statisticsColumn + "_set")))
                        .drop(statisticsColumn + "_set");
            }
        }
        return dataset.groupBy(groupColumns).agg(agg.get(0), agg.subList(1, agg.size()).toArray(new Column[0]));
    }

    @Override
    public SparkNodeEnum getType() {
        return SparkNodeEnum.GROUP;
    }
}
