package com.bw.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import com.bw.util.JsonDeserializationSchemaUtil;
import com.bw.util.MyKafkaUtil;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;

public class Test2 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度为1
        env.setParallelism(1);
        //kafka读取数据
        DataStream<String> stream = env
                .addSource(MyKafkaUtil.getKafkaConsumer("tms_ods_yk6","test1"+System.currentTimeMillis()));


        // 读取配置表
        DebeziumSourceFunction<String> config = MySQLSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .databaseList("tms01_config") // monitor all tables under inventory database
                .tableList("tms01_config.tms_config_dim")
                .username("root")
                .password("123456")
                .startupOptions(StartupOptions.initial())
                .deserializer(new JsonDeserializationSchemaUtil()) // converts SourceRecord to String
                .build();


        DataStreamSource<String> configStream = env.addSource(config);

        configStream.print();

        //广播流
        MapStateDescriptor<String, String> mapStateDescriptor = new MapStateDescriptor<>("tests", String.class, String.class);
        //streamSource.print();
        // 把配置表变成广播流
        BroadcastStream<String> broadcast = configStream.broadcast(mapStateDescriptor);

        // 连接流
        BroadcastConnectedStream<String, String> connect = stream.connect(broadcast);

        // 处理
        SingleOutputStreamOperator<JSONObject> connectStream = connect.process(new BroadcastProcessFunction<String, String, JSONObject>() {
            Connection conn = null;

            @Override
            public void open(Configuration parameters) throws Exception {
                Class.forName("org.apache.phoenix.jdbc.PhoenixDriver");
                conn = DriverManager.getConnection("jdbc:phoenix:hadoop102:2181");
                conn.setAutoCommit(true);
            }

            @Override
            public void close() throws Exception {
                if (conn != null) {
                    conn.close();
                }
            }

            @Override
            public void processElement(String value, BroadcastProcessFunction<String, String, JSONObject>.ReadOnlyContext readOnlyContext, Collector<JSONObject> collector) throws Exception {
                // 取状态
                //广播流
                ReadOnlyBroadcastState<String, String> broadcastState = readOnlyContext.getBroadcastState(mapStateDescriptor);
                //主流
                JSONObject jsonObject = JSON.parseObject(value);
                //根据source_table 得到sinktable
                // 拿出主流的soure table
                String source_table = jsonObject.getString("table");


                // 通过source_table 取出广播状态
                String sinkTable = broadcastState.get(source_table);


                // 如果取出来，证明该条数据是维度数据
                if (sinkTable != null) {
                    PreparedStatement ps = null;
                    try {
                        ps = conn.prepareStatement("upsert into " + sinkTable + " values (?,?)");
                        // 从主流里面取出主键
                        ps.setObject(1, jsonObject.getJSONObject("data").getString("id"));
                        // 从主流取出data
                        ps.setObject(2, jsonObject.getJSONObject("data").toJSONString());
                        // 执行
                        ps.execute();

                    } catch (Exception e) {
                        e.printStackTrace();
                    } finally {
                        if (ps != null) {
                            ps.close();
                        }
                    }
                } else {
                    // 事实流
                    collector.collect(jsonObject);
                }

            }

            @Override
            public void processBroadcastElement(String value, BroadcastProcessFunction<String, String, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
                // {"op":"c","data":{"sink_table":"dim_user_info","source_table":"user_info","sink_pk":"id","sink_columns":"id,login_name,nick_name,passwd,real_name,phone_num,email,user_level,birthday,gender,create_time,update_time,is_deleted"},"db":"tms01_config","table":"tms_config_dim"}
                JSONObject data = JSON.parseObject(value).getJSONObject("data");
                //得到source_table 和 sink_table
                String source_table = data.getString("source_table");
                String sink_table = data.getString("sink_table");
                //存入广播流
                BroadcastState<String, String> broadcastState = context.getBroadcastState(mapStateDescriptor);
                broadcastState.put(source_table, sink_table);

                // 在hbase建表
                String sql = "create table if not exists " + sink_table + " (id varchar primary key , line varchar) ";
                PreparedStatement ps = conn.prepareStatement(sql);
                //System.out.println("创建成功");
                ps.executeUpdate();

            }
        });

        connectStream.print();

        env.execute();
    }
}
