package com.athui.service;

import com.athui.bean.metadata.ColumnMetaData;
import com.athui.bean.task.SameTask;
import com.athui.utils.common.jdbc.JdbcUtils;
import com.athui.utils.explorer.DataExplorer;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.functions.RichReduceFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.shaded.guava30.com.google.common.hash.BloomFilter;
import org.apache.flink.shaded.guava30.com.google.common.hash.Funnels;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.io.Serializable;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;

/**
 * @description: TODO
 * @autor: zhangzhonghui
 * @create: 2024-08-25 23:43
 * @Version: 1.0
 */
public abstract class ExplorerServer<IN,OUT> extends JdbcUtils implements Serializable {

    public static final String LIMIT_BROADCAST_VALUE="limit-broadcast-value";
    public static final String COLUMN_METADATA_BROADCAST_STATE ="column-metadata-broadcast-state";
    public static final String LIMIT_BROADCAST_STATE ="limit-broadcast-state";

    public abstract void open(StreamExecutionEnvironment env);
    
    /**
     * @description: TODO 数据源转换器
     * @author: 阿辉大人
     * @return: com.athui.service.ExplorerServer.DataSourceTransformer<IN,OUT>
     * @create: 2024/8/26 21:15 
     * @version: 1.0 
    **/
    public abstract DataSourceTransformer<IN,OUT> getTypeTransformer();

    public  abstract  Collection<ColumnMetaData> getColumnMetaDataList(Collection<ColumnMetaData> collect);

    // TODO: 获取数据源
    public abstract DataStream<IN> source(StreamExecutionEnvironment env);

    public abstract void sink(DataStream<Tuple2<ColumnMetaData,SameTask>> stream);

    /**
     * @description: TODO 获取行数限制
     * @author: 阿辉大人
     * @return: java.lang.Integer
     * @create: 2024/8/26 21:13
     * @version: 1.0
    **/
    public abstract Integer getRowSizeLimit();
    /**
     * @description: TODO Flink Job 名称
     * @author: 阿辉大人
     * @return: java.lang.String
     * @create: 2024/8/26 21:14
     * @version: 1.0
    **/
    public abstract String getJobName();


    public StreamExecutionEnvironment getExecutionEnvironment(){
        StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();
        return executionEnvironment;
    }

    public StreamTableEnvironment getTableEnvironment(){
        // 创建配置对象
        EnvironmentSettings build = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .build();

        // 创建表环境
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(getExecutionEnvironment(), build);
        return tableEnvironment;
    }

    /**
     * @description: TODO 数据处理
     *  将数据流转换为 ColumnMetaData数据流
     * @author: 阿辉大人
     * @param: env
     * @param: sourceStream
     * @param: transformer
     * @return: org.apache.flink.streaming.api.datastream.DataStream<com.athui.bean.ColumnMetaData>
     * @create: 2024/8/26 21:18
     * @version: 1.0
    **/
    public DataStream<Tuple2<ColumnMetaData,SameTask>> process(
            StreamExecutionEnvironment env,
            DataStream<IN> sourceStream,
            DataSourceTransformer<IN,OUT> transformer
    ){
        // TODO: 广播 ColumnMetaData 集合
        MapStateDescriptor<String, ColumnMetaData> columnMetaDataStateDescriptor = new MapStateDescriptor<>(ExplorerServer.COLUMN_METADATA_BROADCAST_STATE, String.class, ColumnMetaData.class);
        BroadcastStream<ColumnMetaData> columnMetaDataBroadcastStream = env.fromCollection(getColumnMetaDataList(new ArrayList<>())).broadcast(columnMetaDataStateDescriptor);

        // TODO: 侧输出流
        OutputTag<Tuple2<OUT,SameTask>> outPutTag = new OutputTag<>("last-data", Types.TUPLE(Types.GENERIC(Object.class), Types.GENERIC(SameTask.class)));

        // TODO: 主流数据
        SingleOutputStreamOperator<Tuple2<ColumnMetaData,SameTask>> mainProcessStream = sourceStream.map(new RichMapFunction<IN, Tuple3<String,OUT,SameTask>>() {
            @Override
            public Tuple3<String, OUT,SameTask> map(IN value) throws Exception {
                //定义 key
                String key = String.format("key-%d", value.hashCode() % 10);
                return Tuple3.of(key, transformer.transformBySourceType(value),transformer.task());
            }
        }).returns(Types.TUPLE(Types.STRING, Types.GENERIC(Object.class), Types.GENERIC(SameTask.class)))
                .keyBy((KeySelector<Tuple3<String, OUT,SameTask>, String>) value -> value.f0)
                .connect(columnMetaDataBroadcastStream)
                .process(new KeyedBroadcastProcessFunction<Object, Tuple3<String,OUT,SameTask>, ColumnMetaData, Tuple2<ColumnMetaData,SameTask>>() {
                    private BloomFilter bloomFilter;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        super.open(parameters);
                        bloomFilter = BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), 1000_000_0, 0.0002);
                    }

                    @Override
                    public void processElement(Tuple3<String, OUT,SameTask> value, ReadOnlyContext ctx, Collector<Tuple2<ColumnMetaData,SameTask>> out) throws Exception {
                        // 通过上下文 获取广播状态，取出里面的数据，
                        ReadOnlyBroadcastState<String, ColumnMetaData> broadcastState = ctx.getBroadcastState(columnMetaDataStateDescriptor);
                        long estimateSize = broadcastState.immutableEntries().spliterator().estimateSize();

                        if (estimateSize ==0){

                            // 记录任务丢失记录（写入到侧输出流的次数
                            value.f2.setOutPutTagSize(1L);

                            // 添加到侧输出流中
                            ctx.output(outPutTag,Tuple2.of(value.f1,value.f2));
                        }else{
                            for (Map.Entry<String, ColumnMetaData> entry : broadcastState.immutableEntries()) {
                                ColumnMetaData columnMetaData = new DataExplorer(entry.getValue(),transformer.transformByValue(entry.getKey(),value.f1))
                                        .totalCount() // 总数 统计
                                        .nullCount() // null 值 统计
                                        .textExplorer() // 文本类型检查
                                        .numericalExplorer() // 数值类型检查
                                        .repeatCount(bloomFilter) // 重复值统计
                                        .build();
                                // 记录任务的迭代次数
                                value.f2.setIterationSize(1L);

                                out.collect(Tuple2.of(columnMetaData,value.f2));
                            }
                        }
                    }

                    @Override
                    public void processBroadcastElement(ColumnMetaData value, Context ctx, Collector<Tuple2<ColumnMetaData,SameTask>> out) throws Exception {
                        // 通过上下文获取广播状态，往里面存数据
                        BroadcastState<String, ColumnMetaData> broadcastState = ctx.getBroadcastState(columnMetaDataStateDescriptor);// 状态描述器
                        broadcastState.put(value.getIndex(), value);
                    }
                });


        // 侧输出流
        SingleOutputStreamOperator<Tuple2<ColumnMetaData,SameTask>> sideProcessStream = mainProcessStream.getSideOutput(outPutTag)
                .keyBy((KeySelector<Tuple2<OUT,SameTask>, String>) value -> String.valueOf(value.hashCode() % 10))
                .connect(columnMetaDataBroadcastStream)
                .process(new KeyedBroadcastProcessFunction<Object, Tuple2<OUT,SameTask>, ColumnMetaData,Tuple2<ColumnMetaData,SameTask>>() {

                    private BloomFilter bloomFilter;
                    @Override
                    public void open(Configuration parameters) throws Exception {
                        super.open(parameters);
                        bloomFilter = BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), 1000_000_0, 0.0002);
                    }

                    @Override
                    public void processElement(Tuple2<OUT,SameTask> value, ReadOnlyContext ctx, Collector<Tuple2<ColumnMetaData,SameTask>> out) throws Exception {
                        // 通过上下文 获取广播状态，取出里面的数据，
                        ReadOnlyBroadcastState<String, ColumnMetaData> broadcastState = ctx.getBroadcastState(columnMetaDataStateDescriptor);
                        long estimateSize = broadcastState.immutableEntries().spliterator().estimateSize();

                        if (estimateSize>0) {
                            for (Map.Entry<String, ColumnMetaData> entry : broadcastState.immutableEntries()) {
                                //  transformByValue(entry.getKey(), value)
                                ColumnMetaData columnMetaData = new DataExplorer(entry.getValue(),value)
                                        .totalCount() // 总数 统计
                                        .nullCount() // null 值 统计
                                        .textExplorer() // 文本类型检查
                                        .numericalExplorer() // 数值类型检查
                                        .repeatCount(bloomFilter) // 重复值统计
                                        .build();

                                // 记录任务的迭代次数
                                value.f1.setIterationSize(1L);
                                out.collect(Tuple2.of(columnMetaData,value.f1));
                            }
                        }else{
                            System.out.println("侧数据流中也没有获取到数据,....");
                        }
                    }
                    @Override
                    public void processBroadcastElement(ColumnMetaData value, Context ctx, Collector<Tuple2<ColumnMetaData,SameTask>> out) throws Exception {
                        // 通过上下文获取广播状态，往里面存数据
                        BroadcastState<String, ColumnMetaData> broadcastState = ctx.getBroadcastState(columnMetaDataStateDescriptor);// 状态描述器
                        broadcastState.put(value.getIndex(), value);
                    }
                });

        return mainProcessStream.union(sideProcessStream);
    }


    /**
     * @description: TODO 统一数据处理
     * @author: 阿辉大人
     * @param: env
     * @param: unionStream
     * @create: 2024/8/26 21:19
     * @version: 1.0
    **/
    public void compute(
            StreamExecutionEnvironment env,
            DataStream<Tuple2<ColumnMetaData,SameTask>> unionStream){

        // TODO: 广播 行数
        MapStateDescriptor<String, Integer> limitBroadcastStateDescriptor = new MapStateDescriptor<>(LIMIT_BROADCAST_STATE,  String.class, Integer.class);
        BroadcastStream<Integer> limitBroadcastStream= env.fromElements(getRowSizeLimit() == null ? -1 : getRowSizeLimit()).broadcast(limitBroadcastStateDescriptor);

        SingleOutputStreamOperator<Tuple2<ColumnMetaData,SameTask>> reduceStream = unionStream
                // 过滤 invalidColumn=true 的数据，表示该列不在计算范围内，不参与计算
                .filter((FilterFunction<Tuple2<ColumnMetaData,SameTask>>) value -> {
                    // 过滤无效列
                    value.f1.setInvalidColumnCount(1L);
                    return !value.f0.getInvalidColumn();
                })
                .keyBy((KeySelector<Tuple2<ColumnMetaData,SameTask>, Object>) e -> e.f0.getColumnCode())
                .reduce(new RichReduceFunction<Tuple2<ColumnMetaData,SameTask>>() {
                    @Override
                    public Tuple2<ColumnMetaData,SameTask> reduce(Tuple2<ColumnMetaData,SameTask> value1, Tuple2<ColumnMetaData,SameTask> value2) throws Exception {
                        ColumnMetaData cmA = value1.f0;
                        ColumnMetaData cmB = value2.f0;
                        SameTask stA = value1.f1;
                        SameTask stB = value2.f1;

                        // 总数
                        cmA.setTotalCount(cmA.getTotalCount() + cmB.getTotalCount());
                        // null值数
                        cmA.setNullValueCount(cmA.getNullValueCount() + cmB.getNullValueCount());
                        // 空值数
                        cmA.setEmptyValueCount(cmA.getEmptyValueCount() + cmB.getEmptyValueCount());
                        // 不重复值数
                        cmA.setNotRepeatValueCount(cmA.getNotRepeatValueCount() + cmB.getNotRepeatValueCount());
                        // 无效值
                        cmA.setInvalidValueCount(cmA.getInvalidValueCount() + cmB.getInvalidValueCount());
                        // 统计设置包含关注数据的数量
                        cmA.setInvolvedValueCount(cmA.getInvolvedValueCount() + cmB.getInvolvedValueCount());

                        // 数值型
                        // 0值数
                        cmA.setZeroValueCount(cmA.getZeroValueCount() + cmB.getZeroValueCount());
                        // 最小值
                        cmA.setNumericalMinValue(cmA.getNumericalMinValue().min(cmB.getNumericalMinValue()));
                        // 最大值
                        cmA.setNumericalMaxValue(cmA.getNumericalMaxValue().max(cmB.getNumericalMaxValue()));
                        // 文本类型
                        // 标准值数
                        cmA.setNonStandardValueCount(cmA.getNonStandardValueCount() + cmB.getNonStandardValueCount());
                        // 文本最小长度
                        cmA.setTextMinLength(Math.min(cmA.getTextMinLength(), cmB.getTextMinLength()));
                        // 文本最大长度
                        cmA.setTextMaxLength(Math.max(cmA.getTextMaxLength(), cmB.getTextMaxLength()));
                        // 文本规范数
                        cmA.setIllegalityValueCount(cmA.getIllegalityValueCount() + cmB.getIllegalityValueCount());


                        stA.setInvalidColumnCount(stA.getInvalidColumnCount()+stB.getInvalidColumnCount());
                        stA.setOutPutTagSize(stA.getOutPutTagSize()+stB.getOutPutTagSize());
                        stA.setIterationSize(stA.getIterationSize()+stB.getIterationSize());

                        return Tuple2.of(cmA,stA);
                    }
                });

        // TODO: 数据去重，只去最后那条
        SingleOutputStreamOperator<Tuple2<ColumnMetaData,SameTask>> resultStream = reduceStream.keyBy((KeySelector<Tuple2<ColumnMetaData,SameTask>, String>) e-> e.f0.getColumnCode())
                .connect(limitBroadcastStream)
                .process(new KeyedBroadcastProcessFunction<Object, Tuple2<ColumnMetaData,SameTask>, Integer, Tuple2<ColumnMetaData,SameTask>>() {
                    @Override
                    public void processElement(Tuple2<ColumnMetaData,SameTask> value, ReadOnlyContext readOnlyContext, Collector<Tuple2<ColumnMetaData,SameTask>> collector) throws Exception {
                        // 通过上下文 获取广播状态，取出里面的数据，
                        ReadOnlyBroadcastState<String, Integer> broadcastState = readOnlyContext.getBroadcastState(limitBroadcastStateDescriptor);
                        Integer limit = broadcastState.get(LIMIT_BROADCAST_VALUE);

                        ColumnMetaData columnMetaData = value.f0;
                        SameTask sameTask = value.f1;


                        // 记录任务的迭代次数
                        sameTask.setIterationSize(sameTask.getIterationSize() + 1L);
                        if (limit <1){

                            // 流式统计
                            collector.collect(Tuple2.of(columnMetaData,sameTask));
                        }else if (columnMetaData.getTotalCount().compareTo(Long.valueOf(limit))==0) {
                            // 批量汇总
                            collector.collect(Tuple2.of(columnMetaData,sameTask));
                        }
                    }

                    @Override
                    public void processBroadcastElement(Integer limit, Context context, Collector<Tuple2<ColumnMetaData,SameTask>> collector) throws Exception {
                        // 通过上下文获取广播状态，往里面存数据
                        BroadcastState<String, Integer> broadcastState = context.getBroadcastState(limitBroadcastStateDescriptor);
                        broadcastState.put(LIMIT_BROADCAST_VALUE, limit);
                    }
                });

        sink(resultStream);
    }

    public void run() throws Exception {
        StreamExecutionEnvironment env = getExecutionEnvironment();
        open(env);
        DataStream<Tuple2<ColumnMetaData,SameTask>> processSteam = process(env, source(env),getTypeTransformer());
        compute(env, processSteam);
        env.execute(getJobName());

    }


    public interface DataSourceTransformer<IN,OUT> extends Serializable {

        /**
         * @description: TODO 数据源转换,将输入数据类型转换成输出数据类型
         * @author: 阿辉大人
         * @param: input 输入数据
         * @return: OUT 转换后的数据
         * @create: 2024/8/26 21:15
         * @version: 1.0
        **/
        OUT   transformBySourceType(IN input);

        /**
         * @description: TODO 数据源转换,将输出数据类型转换成目标数据
         * @author: 阿辉大人
         * @param: index 数据索引
         * @param: row  数据对象
         * @return: java.lang.Object
         * @create: 2024/8/26 21:15
         * @version: 1.0
        **/
        Object transformByValue(String index, OUT row);

        SameTask task();

    }



}
