package com.atguigu.ting.realtime.app.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.ting.realtime.app.BaseApp;
import com.atguigu.ting.realtime.bean.TableProcess;
import com.atguigu.ting.realtime.common.Constant;
import com.atguigu.ting.realtime.util.FlinkSinkUtil;
import com.atguigu.ting.realtime.util.JdbcUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.util.Collector;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.Set;

/**
 * @Author lzc
 * @Date 2023/3/8 13:52
 */
@Slf4j
public class DimApp extends BaseApp {
    public static void main(String[] args) {
        new DimApp().init(2001, 2, "DimApp", Constant.TOPIC_ODS_DB);
    }
    
    @Override
    public void handle(StreamExecutionEnvironment env,
                       DataStreamSource<String> stream) {
        
        
        // 1. 过滤脏数据
        SingleOutputStreamOperator<JSONObject> etledStream = etl(stream);
        etledStream.print("etled");
        // 2. 通过 flink cdc 读取配置表(mysql)数据
        SingleOutputStreamOperator<TableProcess> tpStream = readTableProcess(env);
        // 3. 在 phoenix 中建表
        tpStream = createPhoenixTable(tpStream);
        tpStream.print("tpStream");
        // 3. 数据流去 connect 的 配置流(广播流)
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> dataTpStream = connect(etledStream, tpStream);
        dataTpStream.print("dataTpStream");
        // 4. 把不需要的字段删除
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> resultStream = delNotNeedColumns(dataTpStream);
        // 5. 把维度数据写出到 phoenix 中  (upsert into ...)
        resultStream.print("result");
        writeToPhoenix(resultStream);
        
    }
    
    private void writeToPhoenix(SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> resultStream) {
        /*
            1. 考虑专门的 phoenix sink
                官方没有提供 phoenix sink
            
            2. 能否使用 jdbc sink
                不能用 jdbc sink. 因为 jdbc sink 只能写入到一个表中, 我们需要多个维度表写入
                
            3. 自定义 sink
         */
        resultStream.addSink(FlinkSinkUtil.getPhoenixSink());
        
    }
    
    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> delNotNeedColumns(
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> dataTpStream) {
        return dataTpStream
            .map(new MapFunction<Tuple2<JSONObject, TableProcess>, Tuple2<JSONObject, TableProcess>>() {
                @Override
                public Tuple2<JSONObject, TableProcess> map(Tuple2<JSONObject, TableProcess> t) throws Exception {
                    
                    // 根据 sink 字段删除 数据中的不需要的字段
                    //"aa,c"
                    List<String> columns = Arrays.asList(t.f1.getSinkColumns().split(","));
                    
                    // data中的 key是否存在于集合columns中, 如果存在就保留, 不存在就删除
                    JSONObject data = t.f0;
                    Set<String> keys = data.keySet();
                    keys.removeIf(key -> !columns.contains(key) && !"op_type".equals(key));
                    return t;
                }
            });
    }
    
    private SingleOutputStreamOperator<Tuple2<JSONObject, TableProcess>> connect(
        SingleOutputStreamOperator<JSONObject> dataStream,
        SingleOutputStreamOperator<TableProcess> tpStream) {
        // 1. 把配置流做成广播流
        // key: "表名+":" +ALL"
        // value: TableProcess
        MapStateDescriptor<String, TableProcess> tpStateDesc = new MapStateDescriptor<>("tpState", String.class, TableProcess.class);
        BroadcastStream<TableProcess> tpBcStream = tpStream.broadcast(tpStateDesc);
        // 2. 数据流去 connect 广播流
        return dataStream
            .connect(tpBcStream)
            .process(new BroadcastProcessFunction<JSONObject, TableProcess, Tuple2<JSONObject, TableProcess>>() {
                // 4. 读取数据流的数据, 从广播状态中读取配置信息
                @Override
                public void processElement(JSONObject obj,
                                           ReadOnlyContext ctx,
                                           Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                    
                    ReadOnlyBroadcastState<String, TableProcess> state = ctx.getBroadcastState(tpStateDesc);
                    String key = getKey(obj.getString("table"), "ALL");
                    
                    TableProcess tp = state.get(key);
                    // 如果tp是 null,表示这条数据不是想要的维度数据
                    if (tp != null) {
                        JSONObject data = obj.getJSONObject("data");
                        data.put("op_type", obj.getString("type"));
                        out.collect(Tuple2.of(data, tp));
                    }
                }
                
                // 3. 读取广播流中的数据, 把配置信息放入到广播状态
                // 每来一条数据, 有几个并行度, 就不执行几次
                @Override
                public void processBroadcastElement(TableProcess tp,
                                                    Context ctx,
                                                    Collector<Tuple2<JSONObject, TableProcess>> out) throws Exception {
                    System.out.println(tp);
                    
                    // 1. 获取广播状态
                    BroadcastState<String, TableProcess> state = ctx.getBroadcastState(tpStateDesc);
                    // 2. 把配置存入到状态中
                    String key = getKey(tp.getSourceTable(), tp.getSourceType());
                    
                    if ("d".equals(tp.getOp())) {
                        // 如果这次的配置是删除, 则需要删除状态中的配置信息
                        state.remove(key);
                    } else {
                        state.put(key, tp);
                    }
                    
                }
                
                private String getKey(String table, String type) {
                    return table + ":" + type;
                }
                
            });
        
        
    }
    
    private SingleOutputStreamOperator<TableProcess> createPhoenixTable(
        SingleOutputStreamOperator<TableProcess> tpStream) {
        return tpStream
            .process(new ProcessFunction<TableProcess, TableProcess>() {
                
                private Connection conn;
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    // 1. 获取到 phoenix 的jdbc连接
                    conn = JdbcUtil.getPhoenixConnection();
                }
                
                @Override
                public void close() throws Exception {
                    // 5. 关闭连接
                    JdbcUtil.closeConnection(conn);
                }
                
                @Override
                public void processElement(TableProcess tp,
                                           Context ctx,
                                           Collector<TableProcess> out) throws Exception {
                    // 执行一个建表语句,通过 jdbc 建表
                    // 2. 通过连接对象获取一个预处理语句 PrepareState
                    // 3. 执行建表语句
                    // 4. 关闭预处理语句
                    
                    String op = tp.getOp();
                    if ("r".equals(op) || "c".equals(op)) {
                        // 建表
                        createTable(tp);
                    } else if ("d".equals(op)) {
                        // 删表
                        delTable(tp);
                    } else {
                        // 先删表
                        delTable(tp);
                        // 再建表
                        createTable(tp);
                    }
                    
                    out.collect(tp);
                }
                
                private void delTable(TableProcess tp) throws SQLException {
                    String sql = "drop table if exists " + tp.getSinkTable();
                    PreparedStatement ps = conn.prepareStatement(sql);
                    ps.execute();
                    ps.close();
                    System.out.println("删表语句: " + sql);
                    
                }
                
                
                private void createTable(TableProcess tp) throws SQLException {
                    // create table if not exists user(id varchar, name varchar, constraint abc primary key(id))
                    StringBuilder sql = new StringBuilder();
                    sql
                        .append("create table if not exists ")
                        .append(tp.getSinkTable())
                        .append("(")
                        //                    .append(tp.getSinkColumns().replaceAll(",", " varchar,"))
                        .append(tp.getSinkColumns().replaceAll("[^,]+", "$0 varchar"))
                        .append(", constraint abc primary key(")
                        .append(tp.getSinkPk() == null ? "id" : tp.getSinkPk())
                        .append("))")
                        .append(tp.getSinkExtend() == null ? "" : tp.getSinkExtend());
                    System.out.println("建表语句: " + sql);
                    PreparedStatement ps = conn.prepareStatement(sql.toString());
                    ps.execute();
                    ps.close();
                }
            });
    }
    
    private SingleOutputStreamOperator<TableProcess> readTableProcess(StreamExecutionEnvironment env) {
        // url?useSSL=false
        Properties props = new Properties();
        props.setProperty("useSSL", "false");
        MySqlSource<String> mySqlSource = MySqlSource.<String>builder()
            .hostname(Constant.MYSQL_HOST)
            .port(Constant.MYSQL_PORT)
            .databaseList("ting_config") // set captured database, If you need to synchronize the whole database, Please set tableList to ".*".
            .tableList("ting_config.table_process") // set captured table
            .username("root")
            .password("aaaaaa")
            .jdbcProperties(props)
            .serverTimeZone("Asia/Shanghai")
            .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
            .startupOptions(StartupOptions.initial()) // 启动时候读取快照(表内所有数据), 然后监控 binlog,读取最新的数据
            .build();
        
        return env
            .fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "mysql-cdc")
            .map(new MapFunction<String, TableProcess>() {
                @Override
                public TableProcess map(String value) throws Exception {
                    JSONObject obj = JSON.parseObject(value);
                    String op = obj.getString("op");
                    TableProcess tp;
                    if ("d".equals(op)) {
                        tp = obj.getObject("before", TableProcess.class);
                    } else {
                        tp = obj.getObject("after", TableProcess.class);
                    }
                    tp.setOp(op);
                    return tp;
                }
            })
            .filter(tp -> "dim".equals(tp.getSinkType())); // 过滤出维度表的配置信息
        
        
    }
    
    private SingleOutputStreamOperator<JSONObject> etl(DataStreamSource<String> stream) {
        
        
        return stream
            .filter(new FilterFunction<String>() {
                @Override
                public boolean filter(String value) throws Exception {
                    try {
                        JSONObject obj = JSON.parseObject(value);
                        String type = obj.getString("type");
                        String data = obj.getString("data");
                        return "ting".equals(obj.getString("database"))
                            && null != obj.getString("table")
                            && ("insert".equals(type)
                            || "update".equals(type)
                            || "delete".equals(type)
                            || "bootstrap-insert".equals(type))
                            && null != data
                            && data.length() > 2;
                    } catch (Exception e) {
                        //System.out.println("数据格式不对,不是正确的 json 数据: " + value);
                        log.warn("数据格式不对,不是正确的 json 数据: " + value);
                        return false;
                    }
                }
            })
            .map(json -> JSON.parseObject(json.replaceAll("bootstrap-", "")));
        
    }
}
/*
 create table if not exists user(id varchar, name varchar, constraint abc primary key(id)) SALT_BUCKETS = 4
 create table if not exists user(id varchar, name varchar, constraint abc primary key(id)) null
    建盐表
    
    ->01
    01-02
    02-03
    04-
    
region server
    region
        数据
    
    默认情况下: 一张表一个 region
    
    region 的分裂
        当增长到一个值之后, 会自动一分为二
        
        0.96 之前: 默认 10g
        0.96 之后:
            min(2*n^3*128,10g )
            
    region 的迁移
        尽量把一张的多个 region 分布到不同的region server
        
如何避免分裂和迁移:
    预分区
        建表的时候, 提前分好区
        
        
phoenix 如何建预分区表:
    盐表






-----
1. 从 ods 读取数据
    ods_db
    
2. 从流中过滤出需要的维度表数据

3. 把不同的维度表的数据,写入到不同的维度表中
        phoenix 中的表


历史维度数据,也需要同步到维度层!
    maxwell-bootstrap
    
------------
如何找到需要的维度表的数据?
    user_info spu_info  sku_info
    
    filter
        "user_info"=table || "spu_info"=table ....

把需要的表的信息放入到mysql表中: 配置表
 配置表的数据做成流
    user_info spu_info  sku_info
 
 配置流中的数据控制数据流中的数据
 
  数据流去 connect 配置流(广播流)

-----
https://developer.aliyun.com/article/777502


op:
    r  读取的是快照
        before= null
        after=not null
    u  更新
        before=not null
        after=not null
    d  删除
        before=not null
        after=null
    c  新增数据
        before= null
        after=not null
    
    修改主键:
    先 d 再 c
    

 */