package com.apex.spark.structuredstream;

import com.apex.env.Execution;
import com.apex.spark.SparkEnvironment;
import com.apex.spark.transform.SparkTransform;
import com.typesafe.config.Config;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.streaming.StreamingContext;

import java.util.ArrayList;
import java.util.List;
import java.util.Objects;

/**
 * spark 结构化流处理环境入口
 */
public class SparkStructuredStreamingExecution implements Execution<SparkStructuredStreamingSource,
        SparkTransform, SparkStructuredStreamingSink> {
    //配置环境管理
    private Config config;
    //上下文执行环境
    private final SparkEnvironment sparkEnvironment;

    /**
     * 初始化结构化流执行环境
     * @param sparkEnvironment 上下文执行环境
     */
    public SparkStructuredStreamingExecution(SparkEnvironment sparkEnvironment) {
        this.sparkEnvironment = sparkEnvironment;
    }

    /**
     * 数据处理方法
     * @param sources 数据源
     * @param transforms 业务处理
     * @param sinks 写入目标数据
     */
    @Override
    public void start(List<SparkStructuredStreamingSource> sources, List<SparkTransform> transforms,
                      List<SparkStructuredStreamingSink> sinks) {
        String tableName = sparkEnvironment.getQueryTable();
        Dataset<Row> result = null;
        List<Dataset<Row>> datasets = new ArrayList<>();
        if (!sources.isEmpty()){
            for (SparkStructuredStreamingSource source: sources){
               Dataset<Row> dataset = source.getData(sparkEnvironment);
               datasets.add(dataset);
                registerTempView(tableName,dataset);
            }
        }
        Dataset<Row> input = datasets.get(0);

        for (SparkTransform transform: transforms){
            Dataset<Row> rowDataset = transform.process(sparkEnvironment,input);
            if (Objects.isNull(rowDataset)){
                result = input;
            }else {
                result = rowDataset;
            }
        }

        for (SparkStructuredStreamingSink sink : sinks){
            sink.outputStructuredStreaming(sparkEnvironment,result);
        }

    }

    /**
     * 返回程序执行状态
     * @return boolean
     */
    @Override
    public boolean status() {
        return false;
    }

    /**
     *
     * @param plugin 执行环境
     */
    @Override
    public void prepare(Void plugin) {
    }

    /**
     *
     * @return config
     */
    @Override
    public Config getConfig() {
        return config;
    }

    /**
     *
     * @param config config
     */
    @Override
    public void setConfig(Config config) {
        this.config = config;
    }

    /**
     * 注册spark sql 查询临时表
     * @param tableName 注册临时表名称
     * @param dataSet 上游结果集
     */
    private void registerTempView(String tableName, Dataset<Row> dataSet) {
        dataSet.createOrReplaceTempView(tableName);
    }
}
