package com.northpool.operator.analysis.dataset;

import com.northpool.operator.annotation.InputParam;
import com.northpool.operator.annotation.InputParams;
import com.northpool.operator.annotation.OperatorInfo;
import com.northpool.operator.common.stpclient.StpClientOperatorBase;
import com.northpool.operator.param.ParamTool;
import com.northpool.operator.param.custom.DatasetParam;
import com.northpool.operator.type.TypeEnum;
import com.northpool.resources.datasource.IDataSource;
import com.northpool.resources.datasource.db.DbDataSource;
import com.northpool.resources.datatable.operate.ITableOperator;
import com.northpool.stpclient.bean.BaseResponse;
import com.northpool.stpclient.database.DatabaseUtils;
import com.northpool.stpclient.modules.geoprocess.operator.IPipelineOperator;
import com.northpool.stpclient.operator.bean.DatasetBean;
import com.northpool.stpclient.operator.bean.OperatorBean;
import com.northpool.stpclient.operator.bean.ParamBean;
import com.northpool.stpclient.operator.bean.PipelineBean;
import org.apache.commons.lang3.StringUtils;

import java.io.File;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;

@OperatorInfo(
        ref = "PropsSplitOperator",
        name = "按属性分割",
        description = "将通过分割字段参数值为每个唯一的属性组合创建要素类或表。"
)
public class PropsSplitOperator extends StpClientOperatorBase {
    String dataSourceId;
    String tableName;
    String sqlFilter;
    String fields;
    String splitField;
    String targetDataSourceId;
    String tableNamePrefix;

    @InputParams({
            @InputParam(ref = "dataset", name = "数据集", desc = "数据集包含数据源ID和数据表名称的JSON描述，如: {\"dataSourceId\":\"xxx-xxx\",\"tableName\":\"table\"}", type = TypeEnum.DB, required = true, testValue = ""),
            @InputParam(ref = "sqlFilter", name = "SQL过滤条件A", desc = "SQL过滤条件, 如: dlmc = '旱地'", type = TypeEnum.STRING, required = false, testValue = "dlbm_1 like '%01%'"),
            @InputParam(ref = "splitField", name = "分割字段", desc = "多个字段逗号隔开，为空则为全部字段", type = TypeEnum.STRING, testValue = "dlmc_1", required = true),
            @InputParam(ref = "fields", name = "选择字段", desc = "数据字段列表，多个逗号隔开，如: f_id,dlmc,dlbm", type = TypeEnum.STRING, required = false),
            @InputParam(ref = "targetDataSourceId", name = "目标数据源ID", desc = "目标数据源ID", type = TypeEnum.DATASOURCE, required = true, testValue = "storage"),
            @InputParam(ref = "tableNamePrefix", name = "目标数据集前缀", desc = "目标数据集前缀", type = TypeEnum.STRING, required = true, testValue = "result.dltb2022_4301_")
    })
    public PropsSplitOperator() {
        super();
    }

    @Override
    public void execute(ParamTool params) throws Exception {
        DatasetParam dataset = params.getDataset("dataset");
        this.dataSourceId = dataset.getDataSourceId();
        this.tableName = dataset.getTableName();

        this.sqlFilter = params.getStr("sqlFilter");
        this.splitField = params.getStr("splitField");
        this.fields = params.getStr("fields");
        this.targetDataSourceId = params.getStr("targetDataSourceId");
        this.tableNamePrefix = params.getStr("tableNamePrefix");

        PipelineBean pipeline = new PipelineBean();
        pipeline.setResultFile(System.getProperty("user.dir") + File.separator + "result_file");
        DatasetBean dataSet = new DatasetBean();
        dataSet.setDataSourceId(dataSourceId);
        dataSet.setTableName(tableName);
        dataSet.setFields(fields);
        dataSet.setSqlFilter(sqlFilter);
        pipeline.setDataSet(dataSet);

        DatabaseUtils.init();
        String dataSourceJson = DatabaseUtils.getDataSourceJson(this.dataSourceId);
        DbDataSource ds = (DbDataSource) IDataSource.fromJSON(dataSourceJson);
        ITableOperator tableOperator = ds.getTableOperator(this.tableName);
        // 分类类别不能超过30个，即输出的表不宜过多
        String schema = tableOperator.getSchema();
        String where = StringUtils.isBlank(this.sqlFilter) ? "" : "where " + this.sqlFilter;
        String sql = "SELECT DISTINCT(\"" + this.splitField + "\") from \"" + schema + "\".\"" + tableOperator.getTableName() + "\" " + where + " limit 31";

        List<Map<String, Object>> resultList = ds.queryBySql(sql, null);

        if (resultList.isEmpty()) {
            // 没有分类，直接退出
            this.log("分割字段的类别数量为0，跳过执行");
            this.successExit();
        } else if (resultList.size() > 30) {
            this.error("分割字段类别数量过多，超过阈值30");
            this.failedExit();
        }

        List<OperatorBean> ops = new LinkedList<>();
        Integer parallelism = Math.min(64 / resultList.size(), 16);
        for (Map<String, Object> row : resultList) {
            String value = (String) row.get(this.splitField);
            String splitSqlFilter = this.splitField + "='" + value + "'";
            String targetTableName = tableNamePrefix + value;
            ops.add(buildPgSink(targetDataSourceId, targetTableName, splitSqlFilter, dataSourceId, tableName, parallelism));
            this.log("分类名称: {}", value);
        }

        pipeline.setOperators(ops);
        IPipelineOperator iPipelineOperator = this.client.pipelineOperator();
        BaseResponse response = iPipelineOperator.startPipeline(pipeline);
        iPipelineOperator.onStart(response.getRequestId(), () -> {
            this.log("按属性分割开始");
        });
        iPipelineOperator.onSuccess(response.getRequestId(), () -> {
            this.log("按属性分割成功");
            String log = iPipelineOperator.log(response.getRequestId());
            this.log(log);
            this.successExit();
        });
        iPipelineOperator.onError(response.getRequestId(), () -> {
            this.error("按属性分割失败");
            String log = iPipelineOperator.log(response.getRequestId());
            this.error(log);
            this.failedExit();
        });
        this.waiting(24);
    }

    public OperatorBean buildPgSink(String dataSourceId, String tableName, String sqlFilter, String templateDataSourceId, String templateTableName, Integer parallelism) {
        OperatorBean operatorBean = new OperatorBean();
        operatorBean.setClassName("com.northpool.stpprocess.operator.basic.sink.PostgresSink");

        List<ParamBean> params = new LinkedList<>();
        params.add(new ParamBean("dataSourceId", dataSourceId));
        params.add(new ParamBean("tableName", tableName));
        params.add(new ParamBean("sqlFilter", sqlFilter));
        params.add(new ParamBean("templateDataSourceId", templateDataSourceId));
        params.add(new ParamBean("templateTableName", templateTableName));
        params.add(new ParamBean("createPK", "true"));
        operatorBean.setStartParam(params);
        operatorBean.setParallelism(parallelism);
        return operatorBean;
    }
}
