package com.bujunjie.study.realtime.dim.app;

import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.TypeReference;
import com.alibaba.fastjson.parser.Feature;
import com.bujunjie.study.realtime.common.base.BaseApp;
import com.bujunjie.study.realtime.common.constant.FlinkConstant;
import com.bujunjie.study.realtime.common.enums.FlinkOpEnum;
import com.bujunjie.study.realtime.common.enums.MaxwellOpEnum;
import com.bujunjie.study.realtime.common.util.FlinkSourceUtil;
import com.bujunjie.study.realtime.common.util.HBaseUtil;
import com.bujunjie.study.realtime.dim.function.HBaseSinkFunction;
import com.bujunjie.study.realtime.dim.function.TableProcessFunction;
import com.bujunjie.study.realtime.dim.model.vo.DimTableVO;
import com.bujunjie.study.realtime.dim.model.vo.Mysql2FlinkCdcVO;
import com.bujunjie.study.realtime.dim.model.vo.Mysql2MaxwellVO;
import com.bujunjie.study.realtime.dim.model.vo.ProductVO;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.client.Connection;

import java.util.Arrays;
import java.util.List;
import java.util.Map;

/**
 * <p>实时数仓的维度数据</p>
 *
 * @author bu.junjie
 * @version 1.0.0
 * @createTime 2025/9/12 15:38
 */
public class DimApp extends BaseApp {

    public static void main(String[] args) throws Exception {
        new DimApp().start(10002, 4, "dim_app", FlinkConstant.TOPIC_DB);
    }

    @Override
    public void handle(StreamExecutionEnvironment env, DataStreamSource<String> kafkaStrDS) {
        //  1. 将 kafka 中的数据进行转换操作 数据来源是maxwell
        SingleOutputStreamOperator<Mysql2MaxwellVO<ProductVO>> maxwellBinLogDs = this.etl(kafkaStrDS);
        // 2.  使用 FlinkCDC 读取配置表中的配置信息
        SingleOutputStreamOperator<DimTableVO> dtDS = this.readTableProcess(env);
        // 3. 根据配置表中的配置信息到HBase中执行建表或者删除表操作
        dtDS = createHBaseTable(dtDS);
        // 4. 过滤维度数据，按照配置表进行过滤
        SingleOutputStreamOperator<Tuple2<Map<String, Object>, DimTableVO>> connectDS = this.connect(dtDS, maxwellBinLogDs);
        // 5.将维度表的数据写入 HBase 中
        this.writeHBase(connectDS);

    }

    /**
     * <p>将过滤后的结果写入 HBase 中
     * <ol>
     *     <li>{"tm_name":"Redmi","id":1,"type":"update"},
     *     TableProcessDim(sourceTable=base_trademark, sinkTable=dim_base_trademark, sinkColumns=id,tm_name, sinkFamily=info, sinkRowKey=id, op=r)</li>
     * </ol>
     * </p>
     *
     * @param dimDS 双流 join 的流数据
     * @return
     * @author bu.junjie
     * @date 2025/9/15 9:25
     */
    private void writeHBase(SingleOutputStreamOperator<Tuple2<Map<String, Object>, DimTableVO>> dimDS) {
        dimDS.addSink(new HBaseSinkFunction());
    }

    /**
     * <p>双流 join 过滤维度数据</p>
     *
     * @param dtDS            维度表数据
     * @param maxwellBinLogDs maxwell 同步来的数据
     * @return 返回过滤后复合维度的数据
     * @author bu.junjie
     * @date 2025/9/13 9:13
     */
    private SingleOutputStreamOperator<Tuple2<Map<String, Object>, DimTableVO>> connect(SingleOutputStreamOperator<DimTableVO> dtDS, SingleOutputStreamOperator<Mysql2MaxwellVO<ProductVO>> maxwellBinLogDs) {
        // 将配置信息进行广播操作 -- broadcast
        MapStateDescriptor mapStateDescriptor = new MapStateDescriptor("mapStateDescriptor", String.class, DimTableVO.class);
        BroadcastStream<DimTableVO> broadcastStream = dtDS.broadcast(mapStateDescriptor);
        // 将主流和配置流的信息进行关联 --connect
        BroadcastConnectedStream<Mysql2MaxwellVO<ProductVO>, DimTableVO> connectDS = maxwellBinLogDs.connect(broadcastStream);
        // 处理两条流的数据
        SingleOutputStreamOperator<Tuple2<Map<String, Object>, DimTableVO>> dimDS = connectDS.process(new TableProcessFunction(mapStateDescriptor));
        return dimDS;
    }


    /**
     * <p> 根据 flinkCdc 的流数据进行创建 dim 表</p>
     *
     * @param dtDS flinkCdc 流数据
     * @return 返回流数据
     * @author bu.junjie
     * @date 2025/9/12 17:21
     */
    public SingleOutputStreamOperator<DimTableVO> createHBaseTable(SingleOutputStreamOperator<DimTableVO> dtDS) {
        dtDS = dtDS.map(new RichMapFunction<DimTableVO, DimTableVO>() {

            private Connection hbaseConn;

            @Override
            public void open(Configuration parameters) throws Exception {
                hbaseConn = HBaseUtil.getHBaseConnection();
            }

            @Override
            public void close() throws Exception {
                HBaseUtil.closeHBaseConnection(hbaseConn);
            }

            @Override
            public DimTableVO map(DimTableVO dt) throws Exception {
                //  获取配置表的操作类型
                String op = dt.getOp();
                //  获取 hbase 中纬度表的表名
                String sinkTable = dt.getSinkTable();
                //  列族集合
                List<String> sinkFamilyList = Arrays.asList(dt.getSinkFamily().split(StrUtil.COMMA));
                if (FlinkOpEnum.D.getOp().equals(op)) {
                    // s删除
                    HBaseUtil.dropHBaseTable(hbaseConn, FlinkConstant.HBASE_NAMESPACE, sinkTable);
                } else if (Arrays.asList(FlinkOpEnum.C.getOp(), FlinkOpEnum.R.getOp()).contains(op)) {
                    // 新建和查询
                    HBaseUtil.createHBaseTable(hbaseConn, FlinkConstant.HBASE_NAMESPACE, sinkTable, sinkFamilyList);
                } else {
                    // 代表是修改
                    HBaseUtil.dropHBaseTable(hbaseConn, FlinkConstant.HBASE_NAMESPACE, sinkTable);
                    HBaseUtil.createHBaseTable(hbaseConn, FlinkConstant.HBASE_NAMESPACE, sinkTable, sinkFamilyList);
                }
                return dt;
            }
        }).setParallelism(1);
        return dtDS;
    }

    /**
     * <p>获取维度配置表信息
     * <ol>
     *     <li>"op":"r": {"before":null,"after":{"source_table":"activity_info","sink_table":"dim_activity_info","sink_family":"info","sink_columns":"id,activity_name,activity_type,activity_desc,start_time,end_time,create_time","sink_row_key":"id"},"source":{"version":"1.9.7.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":0,"snapshot":"false","db":"gmall2024_config","sequence":null,"table":"table_process_dim","server_id":0,"gtid":null,"file":"","pos":0,"row":0,"thread":null,"query":null},"op":"r","ts_ms":1716812196180,"transaction":null}</li>
     *     <li>"op":"c": {"before":null,"after":{"source_table":"a","sink_table":"a","sink_family":"a","sink_columns":"aaa","sink_row_key":"aa"},"source":{"version":"1.9.7.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":1716812267000,"snapshot":"false","db":"gmall2024_config","sequence":null,"table":"table_process_dim","server_id":1,"gtid":null,"file":"mysql-bin.000002","pos":11423611,"row":0,"thread":14,"query":null},"op":"c","ts_ms":1716812265698,"transaction":null}</li>
     *     <li>"op":"u": {"before":{"source_table":"a","sink_table":"a","sink_family":"a","sink_columns":"aaa","sink_row_key":"aa"},"after":{"source_table":"a","sink_table":"a","sink_family":"a","sink_columns":"aaabbb","sink_row_key":"aa"},"source":{"version":"1.9.7.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":1716812311000,"snapshot":"false","db":"gmall2024_config","sequence":null,"table":"table_process_dim","server_id":1,"gtid":null,"file":"mysql-bin.000002","pos":11423960,"row":0,"thread":14,"query":null},"op":"u","ts_ms":1716812310215,"transaction":null}</li>
     *     <li>"op":"d": {"before":{"source_table":"a","sink_table":"a","sink_family":"a","sink_columns":"aaabbb","sink_row_key":"aa"},"after":null,"source":{"version":"1.9.7.Final","connector":"mysql","name":"mysql_binlog_source","ts_ms":1716812341000,"snapshot":"false","db":"gmall2024_config","sequence":null,"table":"table_process_dim","server_id":1,"gtid":null,"file":"mysql-bin.000002","pos":11424323,"row":0,"thread":14,"query":null},"op":"d","ts_ms":1716812340475,"transaction":null}</li>
     * </ol>
     * </p>
     *
     * @param env 流环境
     * @return 返回 flinkCDC 的侧输出流
     * @author bu.junjie
     * @date 2025/9/12 16:54
     */
    public SingleOutputStreamOperator<DimTableVO> readTableProcess(StreamExecutionEnvironment env) {
        //  创建MySQLSource对象
        MySqlSource<String> mySqlSource = FlinkSourceUtil.getMySqlSource(FlinkConstant.DATA_BASE, FlinkConstant.DIM_TABLE_NAME);
        // 将数据封装成数据流
        DataStreamSource<String> mysqlStrDS = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "mysql-source").setParallelism(1);
        // 模型转换  jsonStr->实体类对象
        SingleOutputStreamOperator<DimTableVO> tpDS = mysqlStrDS.map(new MapFunction<String, DimTableVO>() {
            @Override
            public DimTableVO map(String value) throws Exception {
                Mysql2FlinkCdcVO<DimTableVO> flinkCdcVO = JSON.parseObject(value, new TypeReference<Mysql2FlinkCdcVO<DimTableVO>>() {
                }, Feature.SupportNonPublicField, Feature.AllowUnQuotedFieldNames);
                // 操作类型
                String op = flinkCdcVO.getOp();
                DimTableVO dimTableVO = null;
                if (FlinkOpEnum.D.getOp().equals(op)) {
                    //  代表对配置表进行删除操作,获取删除前的数据
                    dimTableVO = flinkCdcVO.getBefore();
                } else {
                    //  代表进行了读取，新增，修改操作，获取最新的配置
                    dimTableVO = flinkCdcVO.getAfter();
                }
                dimTableVO.setOp(op);
                return dimTableVO;
            }
        });
        return tpDS;
    }


    /**
     * <p>将 kafka 里面的数据进行 etl 进行解析</p>
     *
     * @param kafkaStrDS kafka 流式数据
     * @return 返回转换后的侧输出流
     * @author bu.junjie
     * @date 2025/9/12 15:45
     */
    private SingleOutputStreamOperator<Mysql2MaxwellVO<ProductVO>> etl(DataStreamSource<String> kafkaStrDS) {

        SingleOutputStreamOperator<Mysql2MaxwellVO<ProductVO>> process = kafkaStrDS.process(new ProcessFunction<String, Mysql2MaxwellVO<ProductVO>>() {
            @Override
            public void processElement(String value, ProcessFunction<String, Mysql2MaxwellVO<ProductVO>>.Context ctx, Collector<Mysql2MaxwellVO<ProductVO>> out) throws Exception {

                Mysql2MaxwellVO<ProductVO> maxwellVO = JSON.parseObject(value, new TypeReference<Mysql2MaxwellVO<ProductVO>>() {
                }, com.alibaba.fastjson.parser.Feature.SupportNonPublicField, com.alibaba.fastjson.parser.Feature.AllowUnQuotedFieldNames);
                //  数据库的库名
                String database = maxwellVO.getDatabase();
                //  操作类型
                String type = maxwellVO.getType();
                //  更新的数据
                ProductVO data = maxwellVO.getData();
                if (FlinkConstant.DATA_BASE.equals(database) && MaxwellOpEnum.checkOpType(type) && ObjectUtil.isNotNull(data)) {
                    out.collect(maxwellVO);
                }
            }
        });
        return process;
    }

}
