package com.apex.flink.sink;

import com.apex.flink.FlinkEnvironment;
import com.apex.flink.batch.FlinkBatchSink;
import com.apex.flink.stream.FlinkStreamSink;
import com.apex.flink.utils.ConfigKeyName;
import com.apex.flink.utils.TableUtil;
import com.typesafe.config.Config;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.operators.DataSink;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSink;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.bridge.java.BatchTableEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;
import org.apache.flink.types.Row;
import org.apache.flink.api.common.serialization.Encoder;
import org.apache.flink.api.java.io.TextOutputFormat;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.flink.api.common.io.FileOutputFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.PrintStream;
import java.util.Objects;

public class FileSink implements FlinkBatchSink<Row,Row> , FlinkStreamSink<Row,Row> {
    private static final Logger logger = LoggerFactory.getLogger(FileSink.class);

    private  String PATH;
    private  String format;
    private  String WRITE_MODE;

    private Config config;

    private FileOutputFormat outputFormat;

    private Path filePath;

    private static final String HIVE_CATALOG = "hive";
    private static String HIVE_DATABASE;
    private static String HIVE_CONF_DIR;
    private static String tableName;
    private static String sql;

    @Override
    public void prepare(FlinkEnvironment plugin) {
        if (config.hasPath(ConfigKeyName.FILE_SINK_PATH)){
            PATH = config.getString(ConfigKeyName.FILE_SINK_PATH);
            format = config.getString(ConfigKeyName.FILE_SINK_TYPE);
            WRITE_MODE = config.getString(ConfigKeyName.FILE_SINK_MODE);
        }
        if (config.hasPath(ConfigKeyName.FILE_SINK_HIVE_CONFDIR)){
            HIVE_CONF_DIR = config.getString(ConfigKeyName.FILE_SINK_HIVE_CONFDIR);
            HIVE_DATABASE = config.getString(ConfigKeyName.FILE_SINK_HIVE_DATABASE);
            tableName = config.getString(ConfigKeyName.FILE_SINK_FLINK_HIVE_TEMP_NAME);
            sql = config.getString(ConfigKeyName.FILE_SINK_FLINK_HIVE_WRITER_SQL);
        }

    }

    @Override
    public Config getConfig() {
        return config;
    }

    @Override
    public void setConfig(Config config) {
        this.config = config;
    }

    @Override
    public DataSink<Row> outputBatch(FlinkEnvironment env, DataSet<Row> dataSet) {
        filePath = new Path(PATH);
        switch (format) {
            case "json":
                RowTypeInfo rowTypeInfo = (RowTypeInfo) dataSet.getType();
                outputFormat = new JsonRowOutputFormat(filePath, rowTypeInfo);
                break;
            case "csv":
                CsvRowOutputFormat csvFormat = new CsvRowOutputFormat(filePath);
                outputFormat = csvFormat;
                break;
            case "text":
                outputFormat = new TextOutputFormat(filePath);
                break;
            default:
                logger.warn(" unknown file_format [{}],only support json,csv,text", format);
                break;

        }
        DataSink<Row> dataSink;
        if (Objects.isNull(outputFormat)){
            dataSink = null;
        }else {
            outputFormat.setWriteMode(FileSystem.WriteMode.valueOf(WRITE_MODE));
            dataSink= dataSet.output(outputFormat);
        }
        return dataSink;
    }

    @Override
    public DataStreamSink<Row> outputStream(FlinkEnvironment env, DataStream<Row> dataStream) {
        final StreamingFileSink<Row> sink = StreamingFileSink
                .forRowFormat(filePath, (Encoder<Row>) (element, stream) -> {
                    PrintStream out = new PrintStream(stream);
                    out.println(element);
                })
                .build();
        return dataStream.addSink(sink);
    }

    public void writeToHive(StreamTableEnvironment tableEnv, DataStream<Row> dataStream){
        HiveCatalog hiveCatalog = new HiveCatalog(HIVE_CATALOG, HIVE_DATABASE, HIVE_CONF_DIR);
        tableEnv.registerCatalog(HIVE_CATALOG, hiveCatalog);
        tableEnv.useCatalog("hive");
        TableUtil.dataStreamToTable(tableEnv,tableName,dataStream);
        tableEnv.sqlUpdate(sql);
    }
}
