package com.group1.realtime.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.group1.realtime.app.BaseAppV1;
import com.group1.realtime.bean.TableProcess;
import com.group1.realtime.common.Constant;
import com.group1.realtime.util.FlinkSinkUtil;
import com.group1.realtime.util.JdbcUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.List;

public class DimApp extends BaseAppV1 {

    public static void main(String[] args) {
        new DimApp().init(2001, 2, "DimApp", Constant.TOPIC_ODS_DB);
    }

    @Override
    protected void handle(StreamExecutionEnvironment env, DataStreamSource<String> stream) {

        //对流中的数据进行过滤
        SingleOutputStreamOperator<JSONObject> etlStream = etl(stream);
        //etlStream.print();
        //读取配置表数据
        SingleOutputStreamOperator<TableProcess> tpStream = readTableProcess(env);
        //tpStream.print();
        //把配置流做成广播流，数据流和广播流connect
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connectedStream = connect(etlStream, tpStream);
        connectedStream.print();
        //  4.过滤掉多余的列
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> filteredStream = filterNeedlessColumns(connectedStream);
        //filteredStream.print();
        //  5.把维度数据写到phoenix中
        writeToPhoenix(filteredStream);

    }

    private void writeToPhoenix(SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> filteredStream) {
        //按照配置信息中的表名分组，写入HBase
        filteredStream.keyBy(t -> t.f1.getSinkTable()).addSink(FlinkSinkUtil.getPhoenixSink());

    }

    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> filterNeedlessColumns(SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connectedStream) {

        return connectedStream.map(new MapFunction<Tuple2<JSONObject, TableProcess>, Tuple2<JSONObject, TableProcess>>() {
            @Override
            public Tuple2<JSONObject, TableProcess> map(Tuple2<JSONObject, TableProcess> value) throws Exception {
                //从流中获取数据
                JSONObject data = value.f0;
                //从配置信息中获取列名
                List<String> columns = Arrays.asList(value.f1.getSinkColumns().split(","));
                //从数据中移除配置信息中没有的信息，即不需要的数据
                data.keySet().removeIf(key -> !columns.contains(key) && !key.equals("operate_type"));
                //返回处理过的数据
                return value;
            }
        });

    }

    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connect(SingleOutputStreamOperator<JSONObject> etlStream, SingleOutputStreamOperator<TableProcess> tpStream) {
        //创建一个状态描述器
        MapStateDescriptor<String, TableProcess> tpStateDesc = new MapStateDescriptor<>("tpState", String.class, TableProcess.class);
        //将读取到的配置流做成广播流
        BroadcastStream<TableProcess> tpBcStream = tpStream.broadcast(tpStateDesc);
        //将数据流和广播流连接
        return etlStream.connect(tpBcStream)
                .process(new BroadcastProcessFunction<JSONObject, TableProcess, Tuple2<JSONObject, TableProcess>>() {
                    //  4.处理数据流中的数据
                    @Override
                    public void processElement(JSONObject obj, ReadOnlyContext ctx, Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                        //获取广播状态
                        ReadOnlyBroadcastState<String, TableProcess> state = ctx.getBroadcastState(tpStateDesc);
                        //从流中获取表名
                        String table = obj.getString("table");
                        //通过表名从状态中获取对应的value
                        TableProcess tp = state.get(table);
                        //如果value不为空，表示表已经创建
                        if (tp != null) {
                            //从流中获取data部分
                            JSONObject data = obj.getJSONObject("data");
                            //将获取到的操作类型存入data
                            data.put("operate_type", obj.getString("type"));
                            //将data和从状态中获取的value输出
                            out.collect(Tuple2.of(data, tp));
                        }

                    }

                    @Override
                    public void processBroadcastElement(TableProcess tp, Context ctx, Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                        //根据广播流中的配置数据建表
                        createTable(tp);

                        saveTpToState(tp, ctx);
                    }

                    private void saveTpToState(TableProcess tp, Context ctx) throws Exception {
                        //创建一个广播状态
                        BroadcastState<String, TableProcess> state = ctx.getBroadcastState(tpStateDesc);
                        //将表名和对应的配置信息存入广播状态
                        state.put(tp.getSourceTable(), tp);
                    }

                    private Connection conn;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        // 创建连接
                        conn = JdbcUtil.getPhoenixConnection();
                    }

                    @Override
                    public void close() throws Exception {
                        JdbcUtil.closeConnection(conn);
                    }

                    private void createTable(TableProcess tp) {

                        StringBuilder sql = new StringBuilder();
                        //拼接sql语句
                        sql
                                .append("create table if not exists ")
                                .append(tp.getSinkTable())
                                .append("(")
                                .append(tp.getSinkColumns().replaceAll("[^,]+", "$0 varchar"))
                                .append(", constraint pk primary key(")
                                .append(tp.getSinkPk() == null ? "id" : tp.getSinkPk())
                                .append("))")
                                .append(tp.getSinkExtend() == null ? "": tp.getSinkExtend());  // ... null

                        System.out.println("建表语句: \n" + sql);

                        PreparedStatement ps = null;

                        try {
                            ps = conn.prepareStatement(sql.toString());
                            ps.execute();
                            ps.close();
                        } catch (SQLException e) {
                            e.printStackTrace();
                            throw new RuntimeException("在phoenix中建表语句执行失败, 请检查:" + sql);
                        }

                    }
                });

    }

    private SingleOutputStreamOperator<TableProcess> readTableProcess(StreamExecutionEnvironment env) {

        //创建flink-cdc连接，监控指定库中的表
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .databaseList("edu_config")
                .tableList("edu_config.table_process")
                .username("root")
                .password("123321")
                .deserializer(new JsonDebeziumDeserializationSchema())
                .build();

        return env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "mysql-cdc-source")
                .map(new MapFunction<String, TableProcess>() {
                    @Override
                    public TableProcess map(String value) throws Exception {
                        return JSON.parseObject(value).getObject("after", TableProcess.class);
                    }
                });
    }

    private SingleOutputStreamOperator<JSONObject> etl(DataStreamSource<String> stream) {

        return stream.filter(
                new FilterFunction<String>() {
                    @Override
                    public boolean filter(String value) throws Exception {

                        try {
                            //获取json对象
                            JSONObject obj = JSON.parseObject(value);
                            return "edu".equals(obj.getString("database"))
                                    && ("insert".equals(obj.getString("type"))
                                    || "update".equals(obj.getString("type")))
                                    && obj.getString("data") != null
                                    && obj.getString("data").length()>2;
                        } catch (Exception e) {
                            System.out.println("json格式错误......");
                            return false;
                        }
                    }
                }
        ).map(JSON::parseObject);

    }
}
