package com.atguigu.bigdata.edu.realtime.app.dim;


import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;

import com.atguigu.bigdata.edu.realtime.app.BaseAppV1;
import com.atguigu.bigdata.edu.realtime.bean.TableProcess;
import com.atguigu.bigdata.edu.realtime.common.Constant;
import com.atguigu.bigdata.edu.realtime.util.FlinkSinkUtil;
import com.atguigu.bigdata.edu.realtime.util.JdbcUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;

/**
 * @Author lzc
 * @Date 2022/10/5 15:08
 */
public class DimApp extends BaseAppV1 {
    public static void main(String[] args) {
        new DimApp().init(
            2001,
            2,
            "DimApp",
            Constant.TOPIC_ODS_DB
        );
        
    }
    
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          DataStreamSource<String> stream) {
        // 对流来进行具体的业务操作
        // 1. 对数据做清洗, 过滤掉一些脏数据
        SingleOutputStreamOperator<JSONObject> etledStream = etl(stream);
        etledStream.print("etledStream");
        // 2. 通过 flink cdc 读取 配置表的数据
        SingleOutputStreamOperator<TableProcess> tpProcess = readTableProcess(env);
        // 3. 根据配置信息, 在 hbase(phoenix )中建表或者删表
        tpProcess = createDimTable(tpProcess);
        tpProcess.print("tpProcess");
//         4. 数据流和配置的 connect
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connectedStream = connect(etledStream, tpProcess);
//         5. 删除不需要的字段
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> resultStream = deleteNoNeedColumns(connectedStream);
//         6. 然后就把维度数据写出到phoenix 中. 自定义 phoenix sink
        writeToPhoenix(resultStream);  // 2-> two->to  4->four->for log4j  log4s  i18n 国际化
        
    }
    
    private void writeToPhoenix(SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> resultStream) {
        /*
        jdbc sink:
            Jdbc.sink(
                "插入语句 insert into t ...",
                处理函数:给 sql 中的占位符进行赋值,
                执行参数: 批次大小,批次执行的时间, 重试次数
                连接参数: url driver name password
                
            )
         不能使用 JdbcSink, 使用我们要写入的表是多长. jdbcSink 只能写入到一张表中
         */
        
        resultStream.addSink(FlinkSinkUtil.getPhoenixSink());
        
    }
    
    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>>  deleteNoNeedColumns(
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connectedStream) {
        return connectedStream
            .map(new MapFunction<Tuple2<JSONObject, TableProcess>, Tuple2<JSONObject, TableProcess>>() {
                @Override
                public Tuple2<JSONObject, TableProcess> map(Tuple2<JSONObject, TableProcess> t) throws Exception {
                    JSONObject data = t.f0; // 维度数据
                    List<String> columns = Arrays.asList(t.f1.getSinkColumns().split(","));// 配置信息
                    // 删除 data 中的那些字段:在SinkColumns中不存在的.
                    
                    data.keySet().removeIf(key -> !columns.contains(key) && !"op_type".equals(key));
                    
                    return t;
                }
            });
    }
    
    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connect(
        SingleOutputStreamOperator<JSONObject> dataStream,
        SingleOutputStreamOperator<TableProcess> confStream) {
        // key的类型String:  数据流中的数据根据 key 来获取一个配置信息(TableProcess)
        // 用 mysql 中的 source_table + all 做为 key
        // value的类型: ableProcess
        MapStateDescriptor<String, TableProcess> tpStateDesc = new MapStateDescriptor<>("tpState", String.class, TableProcess.class);
        // 1. 先把配置流做成广播流
        BroadcastStream<TableProcess> tpBcStream = confStream.broadcast(tpStateDesc);
        // 2. 用数据流去 connect 广播流
        return dataStream
            .connect(tpBcStream)
            .process(new BroadcastProcessFunction<JSONObject, TableProcess, Tuple2<JSONObject, TableProcess>>() {
    
                private HashMap<String, TableProcess> tpMap;
    
                @Override
                public void open(Configuration parameters) throws Exception {
                    // 先去预加载所有配置. 存储到一个 Map 中 key: obj.getString("table") + ":ALL" value:TableProcess
                    // open方法中还不能直接访问状态
                    // 通过普通的 jdbc 来读所有配置信息
                    Connection conn = JdbcUtil.getMysqlConnection();
                    // select * from table_process  实现一个通用的查询工具类
                    String sql = "select * from edu_config.table_process";
                    List<TableProcess> list = JdbcUtil.queryList(conn, sql, null, TableProcess.class);
                    // 把 list 中的配置数据转存到 HashMap
                    tpMap = new HashMap<>();
                    for (TableProcess tp : list) {
                        tpMap.put(tp.getSourceTable() + ":" + tp.getSourceType(),tp);
                    }
        
                }
                
                @Override
                public void processElement(JSONObject obj,
                                           ReadOnlyContext ctx,
                                           Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                    
                    // 4. 处理数据流中的数据: 更加配置信息. 从广播状态中读取配置信息
                    // 根据 key 获取对应的配置信息
                    String key = obj.getString("table") + ":ALL";
                    ReadOnlyBroadcastState<String, TableProcess> state = ctx.getBroadcastState(tpStateDesc);
                    TableProcess tp = state.get(key);
                    if (tp == null) {
                    // 获取 tp 的时候, 先从广播状态中获取, 如果这里没有, 再去 Map中获取, Map中如果也没有, 这条数据不用处理
                        // 状态中没有取到配置信息, 从 map 中取
                        tp = tpMap.get(key);
                    }
                    // 由于数据先来, 配置后来, 导致 tp 为空, tp为空, 导致数据丢失
                    
                    // tp有能为 null 的: 有可能是事实表或者不需要的维度表
                    if (tp != null) {
                        JSONObject data = obj.getJSONObject("data");
                        // 在 data 中, 插入一个 type 的值, 将来后期缓存更新的使用
                        data.put("op_type", obj.getString("type"));  // type=insert/update
                        out.collect(Tuple2.of(data, tp));
                    }
                }
                
                @Override
                public void processBroadcastElement(TableProcess tp,
                                                    Context ctx,
                                                    Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                    // 3. 把 tp 配置信息, 写入到广播状态中
                    // 3.1 获取广播状态
                    BroadcastState<String, TableProcess> state = ctx.getBroadcastState(tpStateDesc);
                    String key = tp.getSourceTable() + ":" + tp.getSourceType();
                    // 3.2 把配置信息写入到广播状态中
                    state.put (key, tp);
                }
            });
        
        
    }
    
    private SingleOutputStreamOperator<TableProcess> createDimTable(
        SingleOutputStreamOperator<TableProcess> tpProcess) {
        /*
        根据配置信息, 来在 phoenix 中建表或者删表
         */
        return tpProcess
            // 只过滤 dim 数据
            .filter(tp -> "dim".equals(tp.getSinkType()))
            .process(new ProcessFunction<TableProcess, TableProcess>() {
                
                private Connection conn;
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    //1. 通过 jdbc 连接 phoenix
                    // alt + ctrl + f
                    conn = JdbcUtil.getPhoenixConnection();
                    // 预先加载 配置表的数据?
                    
                }
                
                @Override
                public void close() throws Exception {
                    JdbcUtil.close(conn);
                }
                
                @Override
                public void processElement(TableProcess tp,
                                           Context ctx,
                                           Collector<TableProcess> out) throws Exception {
                    // 2. 拼接建表语句
                    // create table if not exists user(id varchar, name varchar, constraint pk primary key (id))SALT_BUCKETS = 4
                    String op = tp.getOp();
                    StringBuilder sql = null;
                    
                    if ("r".equals(op) || "c".equals(op)) {
                        sql = getCreateTableSql(tp);
                    } else if ("d".equals(op)) {
                        sql = getDelTableSql(tp);
                    } else {
                        // u: 先删
                        PreparedStatement ps = conn.prepareStatement(getDelTableSql(tp).toString());
                        ps.execute();
                        ps.close();
                        // 后建
                        sql = getCreateTableSql(tp);
                    }
                    
                    // 3. 获取预处理语句
                    PreparedStatement ps = conn.prepareStatement(sql.toString());
                    // 4. 执行
                    ps.execute();
                    // 5. 关闭预处理
                    ps.close();
                    
                    out.collect(tp);
                }
                
                private StringBuilder getDelTableSql(TableProcess tp) {
                    return new StringBuilder("drop table " + tp.getSinkTable());
                }
                
                private StringBuilder getCreateTableSql(TableProcess tp) {
                    StringBuilder sql = new StringBuilder();
                    
                    sql
                        .append("create table if not exists ")
                        .append(tp.getSinkTable())
                        .append("(")
                        .append(tp.getSinkColumns().replaceAll("[^,]+", "$0 varchar"))
                        .append(", constraint pk primary key (")
                        .append(tp.getSinkPk() == null ? "id" : tp.getSinkPk())
                        .append("))")
                        .append(tp.getSinkExtend() == null ? "" : tp.getSinkExtend());   // .. null
                    System.out.println("维度建表语句: " + sql);
                    return sql;
                }
            });
        
    }
    
    private SingleOutputStreamOperator<TableProcess> readTableProcess(StreamExecutionEnvironment env) {
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
            .hostname("hadoop162")
            .port(3306)
            .databaseList("edu_config") // set captured database, If you need to synchronize the whole database, Please set tableList to ".*".
            .tableList("edu_config.table_process") // set captured table
            .username("root")
            .password("aaaaaa")
            .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
            // 默认一启动, 先读取全量数据(快照), 然后再根据 binlog 来实时监控变化数据
            .startupOptions(StartupOptions.initial())
            .build();
        
        return env
            .fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "mysql-source")

            .map((MapFunction<String, TableProcess>) value -> {
                JSONObject obj = JSON.parseObject(value);
                //todo
                String op = obj.getString("op");
                TableProcess tp = null;
                if ("d".equals(op)) {
                    // 如果是删除, 取 before, 封装到 TableProcess 中
                    tp = obj.getObject("before", TableProcess.class);
                } else {
                    tp = obj.getObject("after", TableProcess.class);
                }
                tp.setOp(op);
                return tp;
            });
        
    }
    
    // 对数据进行清洗
    private SingleOutputStreamOperator<JSONObject> etl(DataStreamSource<String> stream) {
        return stream
            .filter(new FilterFunction<String>() {
                @Override
                public boolean filter(String value) throws Exception {
                    try {
                        JSONObject obj = JSON.parseObject(value.replaceAll("bootstrap-", ""));
                        String type = obj.getString("type");
                        String data = obj.getString("data");
                        // 证明 json 格式是对的
                        return "edu".equals(obj.getString("database"))
                            && ("insert".equals(type) || "update".equals(type))
                            && data != null
                            && data.length() > 2;
                        
                    } catch (Exception e) {
                        System.out.println("你的 json 格式数据异常: " + value);
                        return false;
                    }
                }
            })
            .map(json -> JSON.parseObject(json.replaceAll("bootstrap-", "")));
        
    }
}
