package com.cl.ks.flow.handler;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cl.ks.entity.ClueAutoResult;
import com.cl.ks.entity.KsFlowGraph;
import com.cl.ks.entity.SparkNode;
import com.cl.ks.flow.KsFlowGraphHandler;
import com.cl.ks.flow.base.BaseFlowNodeHandler;
import com.cl.ks.flow.enums.NodeHandlerEnum;
import com.cl.ks.service.ClueAutoResultService;
import com.cl.ks.service.KsFlowGraphService;
import com.cl.spark.dto.SparkResult;
import com.cl.spark.node.GroupNode;
import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.RelationalGroupedDataset;
import org.apache.spark.sql.Row;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Component;

import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicInteger;

import static org.apache.spark.sql.functions.col;

@Component
public class KsFlowGraphChildGroupByHandler extends BaseFlowNodeHandler {
    private final KsFlowGraphService ksFlowGraphService;

    @Lazy
    @Autowired
    private KsFlowGraphHandler ksFlowGraphHandler;
    @Autowired
    private ClueAutoResultService clueAutoResultService;

    public KsFlowGraphChildGroupByHandler(KsFlowGraphService ksFlowGraphService) {
        this.ksFlowGraphService = ksFlowGraphService;
    }

    @Override
    public SparkResult process(ProcessParam processParam) {
        JSONObject expression = processParam.getSparkNode().getNodeExpression();
        JSONArray id = expression.getJSONArray("id");
        Optional<KsFlowGraph> ksFlowGraphChildOptional = ksFlowGraphService.findById(id.getInteger(0));
        if (!ksFlowGraphChildOptional.isPresent()) {
            throw new RuntimeException("流程图不存在");
        }
        KsFlowGraph ksFlowGraphChild = ksFlowGraphChildOptional.get();
        ksFlowGraphChild.setCreatedBy(processParam.getKsFlowGraph().getCreatedBy());
        ksFlowGraphChild.setCreatedByShowName(processParam.getKsFlowGraph().getCreatedByShowName());

        Dataset<Row> dataset = processParam.getSparkNode().getInputSparkResultList().values().stream().findFirst().get().getDataset();
        Dataset<Row> groupDateset = GroupNode.group(SparkNodeHandler.newSparkParam(processParam));
        JSONArray groupFields = expression.getJSONArray("groupFields");

        List<SparkResult> sparkResultList = new ArrayList<>();
        ClueAutoResult param = new ClueAutoResult();
        param.setFilterId(ksFlowGraphChild.getId());
        param.setFilterName(ksFlowGraphChild.getName());
        clueAutoResultService.deleteAll(param);
        groupDateset.collectAsList().forEach(row -> {
            String firstGroupField = groupFields.getString(0);
            Column column = null;
            column = col(firstGroupField).equalTo(row.getAs(firstGroupField));
            if (groupFields.size() > 1) {
                for (int i = 1; i < groupFields.size(); i++) {
                    String groupField = groupFields.getString(i);
                    String groupValue = String.valueOf(row.getAs(groupField));
                    column = column.and(col(groupField).equalTo(groupValue));
                }
            }

            Dataset<Row> filter = dataset.filter(column);
            Map<String, SparkResult> stringSparkResultMap = new HashMap<>();
            stringSparkResultMap.put(String.valueOf(processParam.getKsFlowGraph().getId()), SparkResult.success(filter));
            SparkNode childSparkNode = JSON.parseObject(ksFlowGraphChild.getSparkNodeJson(), SparkNode.class);
            childSparkNode.setInputSparkResultList(stringSparkResultMap);
            ksFlowGraphChild.setSparkNode(childSparkNode);
            ksFlowGraphChild.setSaveKsFlowResultFlag(false);
            try {
                sparkResultList.add(ksFlowGraphHandler.handleKsFlowGraph(ksFlowGraphChild).get().getInputSparkResultList().values().stream().findFirst().get());
            } catch (InterruptedException | ExecutionException e) {
                throw new RuntimeException(e);
            }
        });
        if (!sparkResultList.isEmpty()) {
            return SparkResult.success(sparkResultList.stream().map(SparkResult::getDataset).reduce(Dataset::unionAll).orElse(null));
        }
        return SparkResult.failed("子流程无结果");
    }

    @Override
    public NodeHandlerEnum getType() {
        return NodeHandlerEnum.KS_FLOW_GRAPH_CHILD_GROUP_BY;
    }
}
