package com.cl.ks.flow.handler;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cl.mvc.service.BaseService;
import com.cl.spark.dto.SparkParam;
import com.cl.spark.dto.SparkResult;
import com.cl.ks.entity.SysSourceConfig;
import com.cl.ks.entity.SysSourceMappingTable;
import com.cl.ks.entity.SysSourceMappingTableField;
import com.cl.ks.flow.base.BaseFlowNodeHandler;
import com.cl.ks.flow.enums.NodeHandlerEnum;
import com.cl.ks.service.SysSourceConfigService;
import com.cl.ks.service.SysSourceMappingTableFieldService;
import com.cl.ks.service.SysSourceMappingTableService;
import com.cl.ks.utils.DataSourceMap;
import com.cl.ks.utils.DbUtil;
import com.jandar.ds.pojo.DbInfo;
import com.querydsl.jpa.impl.JPAQueryFactory;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.util.*;

import static org.apache.spark.sql.functions.lit;

@Component
public class CreateTableHandler extends BaseFlowNodeHandler {
    private final DbUtil dbUtil;
    private final JPAQueryFactory queryFactory;

    public CreateTableHandler(DbUtil dbUtil, JPAQueryFactory queryFactory) {
        this.dbUtil = dbUtil;
        this.queryFactory = queryFactory;
    }

    /**
     * @param processParam
     * @return
     */
    @Override
    public SparkResult process(ProcessParam processParam) {
        JSONObject expression = processParam.getSparkNode().getNodeExpression();
        String tableName = "ks_flow_graph_" + processParam.getKsFlowGraph().getId() + "_" + processParam.getSparkNode().getCode().replace("-", "");
        String fixedTableName = expression.getString("tableName");
        if (fixedTableName != null && !fixedTableName.isEmpty()) {
            tableName = fixedTableName;
        }
        String dbCode = expression.getString("dbCode");
        String name = expression.getString("name");

        processParam.getKsFlowGraph().getCreateTableList().add(dbCode + ":" + tableName);

        SparkResult inputSparkResult = processParam.getSparkNode().getInputSparkResultList().values().stream().findFirst().get();

        inputSparkResult.setDataset(inputSparkResult.getDataset().withColumn("DB_CODE", lit(dbCode)).withColumn("TABLE_NAME", lit(tableName)));
        List<String> existFields = new ArrayList<>();
        try {
            existFields = dbUtil.listFieldName(dbCode, tableName);
        } catch (Exception e) {
            e.printStackTrace();
        }
        Set<String> fieldList = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
        Map<String, String> commentMap = new HashMap<>();
        String[] fieldNames = inputSparkResult.getDataset().schema().fieldNames();
        for (String fieldName : fieldNames) {
            if (!existFields.contains(fieldName) && !fieldList.contains(fieldName) && !fieldList.contains(fieldName.toLowerCase()) && !fieldList.contains(fieldName.toUpperCase())) {
                fieldList.add(fieldName);
                commentMap.put(fieldName, fieldName);
            }
        }

        if (inputSparkResult.getCount() == 0) {
            return SparkResult.success(inputSparkResult.getDataset());
        }

        if (fieldList.contains("id")) {
            fieldList.add("DEFAULT_UNIQUE_KEY");
        }
        fieldList.remove("id");
        if (!fieldList.isEmpty()) {
            dbUtil.createTableOrAddColumns(dbCode, tableName, fieldList, commentMap);
        }
//        fieldList.add("id");
        if (processParam.getKsFlowGraph().getIncrementFlag() == null || !processParam.getKsFlowGraph().getIncrementFlag()) {
//            dbUtil.execute(dbCode, "TRUNCATE TABLE " + tableName);
            expression.put("incrementFlag", false);
        } else {
            expression.put("incrementFlag", true);
        }

        SparkParam sparkParam = new SparkParam();
        DbInfo dbInfo = dbUtil.getDbInfo(dbCode);
        expression.put("url", dbInfo.getUrl());
        expression.put("username", dbInfo.getUsername());
        expression.put("password", dbInfo.getPassword());
        expression.put("tableName", tableName);
        sparkParam.setNodeExpression(expression);
        sparkParam.setSparkResultList(Collections.singletonList(inputSparkResult));

//        List<JSONObject> insertList = dbUtil.batchInsert(dbCode, tableName, dataList, fieldList, true);

        String power = BaseService.cleanData("," + processParam.getKsFlowGraph().getPower() + ",");
        Integer sourceConfigId = dbUtil.createSourceConfigAndMapping(dbCode, tableName, power, name + "(输出表)");
        processParam.getKsFlowGraph().getSourceConfigIdList().add(String.valueOf(sourceConfigId));

        SparkResult sparkResult = sparkNodeFactory.getSparkNodeByCode("createTable").handle(sparkParam);
        return sparkResult;
    }

    @Override
    public NodeHandlerEnum getType() {
        return NodeHandlerEnum.CREATE_TABLE;
    }
}
