package cn.com.bluemoon.bd.flink.creater.sql;

import cn.com.bluemoon.bd.flink.creater.source.KafkaSourceCreater;
import cn.com.bluemoon.bd.flink.func.filter.DistinctFilterByField;
import cn.com.bluemoon.bd.flink.utils.FieldUtils;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import java.io.InputStream;


public class TableCreater {

    private StreamExecutionEnvironment env;
    private StreamTableEnvironment tableEnv;
    private KafkaSourceCreater kafkaSourceCreater;

    public final static String DATA_ID_FIELD = "data_id";

    public TableCreater(StreamExecutionEnvironment env, StreamTableEnvironment tableEnv, InputStream is) {
        this.env = env;
        this.tableEnv = tableEnv;
        kafkaSourceCreater = new KafkaSourceCreater(is);
    }

    public TableCreater(StreamExecutionEnvironment env, StreamTableEnvironment tableEnv, KafkaSourceCreater kafkaSourceCreater) {
        this.env = env;
        this.tableEnv = tableEnv;
        this.kafkaSourceCreater = kafkaSourceCreater;
    }

    /**
     * 注册表
     * 根据data_id字段 进行去重
     *
     * @param tableName
     * @param tableFields
     * @param filter
     * @param mapper
     * @param clazz
     * @param topic
     * @param args
     * @param <T>
     */
    public <T> void registerDistinctTable(String tableName, String tableFields, FilterFunction<String> filter, MapFunction<String, T> mapper, Class<T> clazz, String topic, String[] args) {
        registerDistinctTable(tableName, DATA_ID_FIELD, tableFields, filter, mapper, clazz, topic, args);
    }


    /**
     * 注册表
     * 根据data_id字段 进行去重
     * 没有过滤filter
     *
     * @param tableName
     * @param tableFields
     * @param mapper
     * @param clazz
     * @param topic
     * @param args
     * @param <T>
     */
    public <T> void registerDistinctTable(String tableName, String tableFields, MapFunction<String, T> mapper, Class<T> clazz, String topic, String[] args) {
        registerDistinctTable(tableName, DATA_ID_FIELD, tableFields, mapper, clazz, topic, args);
    }

    /**
     * 注册表
     * 根据groupByField key 进行去重
     *
     * @param tableName
     * @param groupByField
     * @param tableFields
     * @param mapper
     * @param clazz
     * @param topic
     * @param args
     * @param <T>
     */
    public <T> void registerDistinctTable(String tableName, String groupByField, String tableFields, MapFunction<String, T> mapper, Class<T> clazz, String topic, String[] args) {
        DataStreamSource<String> sourceStream = kafkaSourceCreater.createSource(env, topic, args);
        SingleOutputStreamOperator<T> source = sourceStream
                .keyBy(s -> JSONObject.parseObject(s).getString(groupByField))
                .filter(new DistinctFilterByField(groupByField))
                .map(mapper)
                .returns(clazz);
        tableEnv.createTemporaryView(tableName, source, FieldUtils.convertFieldArray(tableFields));
    }

    /**
     * 注册表
     * 根据groupByField key 进行去重
     *
     * @param tableName
     * @param groupByField
     * @param tableFields
     * @param filter
     * @param mapper
     * @param clazz
     * @param topic
     * @param args
     * @param <T>
     */
    public <T> void registerDistinctTable(String tableName, String groupByField, String tableFields, FilterFunction<String> filter, MapFunction<String, T> mapper, Class<T> clazz, String topic, String[] args) {
        DataStreamSource<String> sourceStream = kafkaSourceCreater.createSource(env, topic, args);
        SingleOutputStreamOperator<T> source = sourceStream
                .keyBy(s -> JSONObject.parseObject(s).getString(groupByField))
                .filter(filter)
                .keyBy(s -> JSONObject.parseObject(s).getString(groupByField))
                .filter(new DistinctFilterByField(groupByField))
                .map(mapper)
                .returns(clazz);
        tableEnv.createTemporaryView(tableName, source, FieldUtils.convertFieldArray(tableFields));
    }

    /**
     * 根据mai函数参数决定kafka分区数
     * 没有filter的注册表函数
     *
     * @param tableName
     * @param fields
     * @param mapper
     * @param clazz
     * @param topic
     * @param args
     * @param <T>
     */
    public <T> void registerTable(String tableName, String fields, MapFunction<String, T> mapper, Class<T> clazz, String topic, String[] args) {
        DataStreamSource<String> sourceStream = kafkaSourceCreater.createSource(env, topic, args);
        SingleOutputStreamOperator<T> source = sourceStream
                .map(mapper)
                .returns(clazz);
        tableEnv.createTemporaryView(tableName, source, FieldUtils.convertFieldArray(fields));
    }

    /**
     * 根据mai函数参数决定kafka分区数 （根据主键做keyBy)
     * 没有filter的注册表函数
     *
     * @param tableName
     * @param fields
     * @param keyByField
     * @param mapper
     * @param clazz
     * @param topic
     * @param args
     * @param <T>
     */
    public <T> void registerTable(String tableName, String fields, String keyByField, MapFunction<String, T> mapper, Class<T> clazz, String topic, String[] args) {
        DataStreamSource<String> sourceStream = kafkaSourceCreater.createSource(env, topic, args);
        SingleOutputStreamOperator<T> source = sourceStream
                .keyBy(s -> JSONObject.parseObject(s).getString(keyByField))
                .map(mapper)
                .returns(clazz);
        tableEnv.createTemporaryView(tableName, source, FieldUtils.convertFieldArray(fields));
    }

    /**
     * 根据mai函数参数决定kafka分区数
     *
     * @param tableName
     * @param fields
     * @param filter
     * @param mapper
     * @param clazz
     * @param topic
     * @param args
     * @param <T>
     */
    public <T> void registerTable(String tableName, String fields, FilterFunction<String> filter, MapFunction<String, T> mapper, Class<T> clazz, String topic, String[] args) {
        DataStreamSource<String> sourceStream = kafkaSourceCreater.createSource(env, topic, args);
        SingleOutputStreamOperator<T> source = sourceStream
                .filter(filter)
                .map(mapper)
                .returns(clazz);
        tableEnv.createTemporaryView(tableName, source, FieldUtils.convertFieldArray(fields));
    }

    /**
     * 根据mai函数参数决定kafka分区数 （根据主键keyBy）
     *
     * @param tableName
     * @param fields
     * @param keyByField
     * @param filter
     * @param mapper
     * @param clazz
     * @param topic
     * @param args
     * @param <T>
     */
    public <T> void registerTable(String tableName, String fields, String keyByField, FilterFunction<String> filter, MapFunction<String, T> mapper, Class<T> clazz, String topic, String[] args) {
        DataStreamSource<String> sourceStream = kafkaSourceCreater.createSource(env, topic, args);
        SingleOutputStreamOperator<T> source = sourceStream
                .keyBy(s -> JSONObject.parseObject(s).getString(keyByField))
                .filter(filter)
                .map(mapper)
                .returns(clazz);
        tableEnv.createTemporaryView(tableName, source, FieldUtils.convertFieldArray(fields));
    }


    /**
     * canal1.1.4格式数据注册表
     *
     * @param tableName 表名
     * @param fields 字段
     * @param keyByField 主键字段
     * @param flatMapper 数组字段扁平化
     * @param filter 过滤
     * @param clazz 返回的class
     * @param topic Kafka topic
     * @param args main参数
     * @param <T> dataStream类型
     */
    public <T> void registerTableWithCanalJson(String tableName, String fields, KeySelector<T, String> keyByField, FlatMapFunction<String, T> flatMapper, FilterFunction<T> filter, Class<T> clazz, String topic, String[] args) {
        DataStreamSource<String> sourceStream = this.kafkaSourceCreater.createSource(this.env, topic, args);
        SingleOutputStreamOperator<T> source = sourceStream
                .flatMap(flatMapper)
                .keyBy(keyByField)
                .filter(filter)
                .returns(clazz);

        this.tableEnv.createTemporaryView(tableName, source, FieldUtils.convertFieldArray(fields));
    }


    /**
     * @param tableName
     * @param fields
     * @param filter
     * @param mapper
     * @param clazz
     * @param topic
     * @param args
     * @param <T>
     */
    public <T> void registerTable(String tableName, String fields, FilterFunction<String> filter, FlatMapFunction<String, T> mapper, Class<T> clazz, String topic, String[] args) {
        DataStreamSource<String> sourceStream = kafkaSourceCreater.createSource(env, topic, args);
        SingleOutputStreamOperator<T> source = sourceStream
                .filter(filter)
                .flatMap(mapper)
                .returns(clazz);
        tableEnv.createTemporaryView(tableName, source, FieldUtils.convertFieldArray(fields));
    }

    /**
     * @param tableName
     * @param fields
     * @param keyByField
     * @param filter
     * @param mapper
     * @param clazz
     * @param topic
     * @param args
     * @param <T>
     */
    public <T> void registerTable(String tableName, String fields, String keyByField, FilterFunction<String> filter, FlatMapFunction<String, T> mapper, Class<T> clazz, String topic, String[] args) {
        DataStreamSource<String> sourceStream = kafkaSourceCreater.createSource(env, topic, args);
        SingleOutputStreamOperator<T> source = sourceStream
                .keyBy(s -> JSONObject.parseObject(s).getString(keyByField))
                .filter(filter)
                .flatMap(mapper)
                .returns(clazz);
        tableEnv.createTemporaryView(tableName, source, FieldUtils.convertFieldArray(fields));
    }

    /**
     * 向flink目录注册表，并指定kafka分区数
     *
     * @param tableName
     * @param fields
     * @param filter
     * @param mapper
     * @param clazz
     * @param topic
     * @param <T>
     */
    public <T> void registerTable(String tableName, String fields, FilterFunction<String> filter, MapFunction<String, T> mapper, Class<T> clazz, String topic, int partitionNum, String[] args) {
        DataStreamSource<String> sourceStream = kafkaSourceCreater.createSource(env, topic, partitionNum, args);
        SingleOutputStreamOperator<T> source = sourceStream
                .filter(filter)
                .map(mapper)
                .returns(clazz);
        tableEnv.createTemporaryView(tableName, source, FieldUtils.convertFieldArray(fields));
    }

    /**
     * @param tableName
     * @param fields
     * @param keyByField
     * @param filter
     * @param mapper
     * @param clazz
     * @param topic
     * @param partitionNum
     * @param args
     * @param <T>
     */
    public <T> void registerTable(String tableName, String fields, String keyByField, FilterFunction<String> filter, MapFunction<String, T> mapper, Class<T> clazz, String topic, int partitionNum, String[] args) {
        DataStreamSource<String> sourceStream = kafkaSourceCreater.createSource(env, topic, partitionNum, args);
        SingleOutputStreamOperator<T> source = sourceStream
                .keyBy(s -> JSONObject.parseObject(s).getString(keyByField))
                .filter(filter)
                .map(mapper)
                .returns(clazz);
        tableEnv.createTemporaryView(tableName, source, FieldUtils.convertFieldArray(fields));
    }

    /**
     * 创建Table对象
     *
     * @param fields
     * @param filter
     * @param mapper
     * @param clazz
     * @param topic
     * @param <T>
     * @return
     */
    public <T> Table createTable(String fields, FilterFunction<String> filter, MapFunction<String, T> mapper, Class<T> clazz, String topic, int partitionNum, String[] args) {
        DataStreamSource<String> sourceStream = kafkaSourceCreater.createSource(env, topic, partitionNum, args);
        SingleOutputStreamOperator<T> source = sourceStream
                .filter(filter)
                .map(mapper)
                .returns(clazz);
        return tableEnv.fromDataStream(source, FieldUtils.convertFieldArray(fields));
    }


    /**
     * 打印表数据，用于测试
     *
     * @param tableName
     */
    public void printTable(String tableName) {
        Table table = tableEnv.sqlQuery("select * from " + tableName);
        DataStream<Tuple2<Boolean, Row>> stream = tableEnv.toRetractStream(table, Row.class);
        stream.print();
    }

    /**
     * 打印表数据，用于测试
     *
     * @param table
     */
    public void printTable(Table table) {
        DataStream<Tuple2<Boolean, Row>> stream = tableEnv.toRetractStream(table, Row.class);
        stream.print();
    }


    public static void main(String[] args) {

    }

}
