package com.apex.flink.batch;

import com.apex.env.Execution;
import com.apex.flink.FlinkEnvironment;
import com.apex.flink.utils.TableUtil;
import com.apex.plugin.BasePlugin;
import com.typesafe.config.Config;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.operators.DataSink;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.BatchTableEnvironment;
import org.apache.flink.types.Row;

import java.util.ArrayList;
import java.util.List;
import java.util.Objects;

@SuppressWarnings("rawtypes")
public class FlinkBatchExecution implements Execution<FlinkBatchSource,FlinkBatchTransform,FlinkBatchSink> {

    private final FlinkEnvironment flinkEnvironment;

    private Config config;

    private String tableName;

    private final String resultTableName = "apexosft_resutl_table";

    private List<String> sqlAll;

    public FlinkBatchExecution(FlinkEnvironment flinkEnvironment) {
        this.flinkEnvironment = flinkEnvironment;
    }

    @Override
    public void start(List<FlinkBatchSource> sources, List<FlinkBatchTransform> transforms, List<FlinkBatchSink> sinks) {
        tableName = flinkEnvironment.getQueryTable();
        List<DataSet> data = new ArrayList<>();
        DataSink<Row> result = null;
        DataSet transformDataSet = null;
        for (FlinkBatchSource source : sources) {
            DataSet dataSet = source.getData(flinkEnvironment);
            data.add(dataSet);
            if (!StringUtils.isBlank(tableName)){
                registerSourceTable(source,dataSet);
            }
        }

        DataSet input = data.get(0);

        for (FlinkBatchTransform transform : transforms) {
            transformDataSet = fromSourceTable(transform);
            if (!Objects.isNull(transformDataSet)) {
                input = transform.processBatch(flinkEnvironment, transformDataSet);
                if (Objects.isNull(input)){
                    registerResultTable(transform,data.get(0));
                }else {
                    registerResultTable(transform, input);
                }
            }
        }

        for (FlinkBatchSink sink : sinks) {
            DataSet dataSet = null;
            if (Objects.isNull(transformDataSet)) {
                dataSet = input;
            }else {
                dataSet = fromResultTable(sink);
            }
            result = sink.outputBatch(flinkEnvironment, dataSet);
        }
        try {
            if (result != null){
                flinkEnvironment.getBatchEnvironment().execute(flinkEnvironment.getJobName());
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    @Override
    public void prepare(Void plugin) {

    }

    @Override
    public Config getConfig() {
        return config;
    }

    @Override
    public void setConfig(Config config) {
        this.config = config;
    }

    @Override
    public boolean status() {
        return false;
    }

    private void registerSourceTable(BasePlugin plugin, DataSet dataSet) {
            BatchTableEnvironment tableEnvironment = flinkEnvironment.getBatchTableEnvironment();
            if (!TableUtil.tableExists(tableEnvironment, tableName)) {
                tableEnvironment.createTemporaryView(tableName,
                        dataSet);
            }
    }

    private void registerResultTable(BasePlugin plugin, DataSet dataSet) {
            BatchTableEnvironment tableEnvironment = flinkEnvironment.getBatchTableEnvironment();
            if (!TableUtil.tableExists(tableEnvironment, resultTableName)) {
                tableEnvironment.createTemporaryView(resultTableName,
                        dataSet);
            }
    }

    private DataSet fromSourceTable(BasePlugin plugin) {
            BatchTableEnvironment tableEnvironment = flinkEnvironment.getBatchTableEnvironment();
            if (StringUtils.isBlank(tableName)){
                return null;
            }else {
                Table table = tableEnvironment.from(tableName);
                return TableUtil.tableToDataSet(tableEnvironment, table);
            }
    }

    private DataSet fromResultTable(BasePlugin plugin) {
            BatchTableEnvironment tableEnvironment = flinkEnvironment.getBatchTableEnvironment();
            Table table = tableEnvironment.from(resultTableName);
            return TableUtil.tableToDataSet(tableEnvironment, table);
    }

    /**
     * 执行的SQL文件
     * @param sqlAll
     */
    public void setSqlAll(List<String> sqlAll){
        this.sqlAll = sqlAll;
    }

    public List<String> getSqlAll(){
        return sqlAll;
    }

}
