package com.dy.realtime.dim.app;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.dy.realtime.common.base.BaseApp;
import com.dy.realtime.common.bean.TableProcessDim;
import com.dy.realtime.common.constant.Constant;
import com.dy.realtime.common.util.FlinkSourceUtil;
import com.dy.realtime.common.util.HbaseUtil;
import com.dy.realtime.dim.function.DimProcessFunction;
import com.dy.realtime.dim.function.DimSinkFunction;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.client.Connection;

import java.io.IOException;
import java.util.Arrays;
import java.util.List;

/**
 * @projectName: MedicalTreatment
 * @package: com.dy.realtime.dim.app
 * @className: DimApp
 * @author: DuanYu
 * @description: TODO
 * @date: 2025/3/10 13:39
 * @version: 1.0
 */

public class DimApp extends BaseApp {
    public static void main(String[] args) {
        new DimApp().start(Constant.TOPIC_DB_Medical, Constant.DIM_APP, 4, 10001);
    }

    @Override
    public void handle(StreamExecutionEnvironment env, DataStreamSource<String> dataStreamSource) {
//       elt过滤数据
        SingleOutputStreamOperator<JSONObject> etlStream = getEtlStream(dataStreamSource);
//        etlStream.print();
        //读取mysql配置表
        DataStreamSource<String> mysqlSource = env.fromSource(FlinkSourceUtil.getMysqlSource(Constant.PROCESS_DATABASE, Constant.PROCESS_DIM_TABLE_NAME), WatermarkStrategy.noWatermarks(), "Mysql Source");
        //对配置流进行处理，创建或删除 HBase 表，并将数据转换为 TableProcessDim 对象
        SingleOutputStreamOperator<TableProcessDim> createTable = getCreateTable(mysqlSource);
//        createTable.print();
//         定义一个广播状态，用于存储配置信息
        MapStateDescriptor<String, TableProcessDim> mapStateDescriptor = new MapStateDescriptor<>("map-state", String.class, TableProcessDim.class);
//       将状态广播出去
        BroadcastStream<TableProcessDim> broadcast = createTable.broadcast(mapStateDescriptor);
//         连接主流和广播流，进行维度数据处理
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDim>> process = etlStream.connect(broadcast).process(new DimProcessFunction(mapStateDescriptor));
       process.print();

//        对写入数据进行处理
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDim>> data = getData(process);
//        data.print();

        data.addSink(new DimSinkFunction());
    }

    private static SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDim>> getData(SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDim>> process) {
        return process.map(new MapFunction<Tuple2<JSONObject, TableProcessDim>, Tuple2<JSONObject, TableProcessDim>>() {
            @Override
            public Tuple2<JSONObject, TableProcessDim> map(Tuple2<JSONObject, TableProcessDim> processDimTuple2) throws Exception {
                // 主流维度数据
                JSONObject f0 = processDimTuple2.f0;
//                System.out.println(f0);
                // 主流维度对应的配置表信息
                TableProcessDim f1 = processDimTuple2.f1;
                System.out.println(f1);
                // 要写入 HBase 的列
                List<String> columns = Arrays.asList(f1.getSinkColumns().split(","));
                // 取出主流数据中的 data 部分
                JSONObject data = f0.getJSONObject("data");
//                System.out.println(data);
                // 移除不需要写入 HBase 的列
                data.keySet().removeIf(key -> !columns.contains(key));
                return processDimTuple2;
            }
        });
    }

    private static SingleOutputStreamOperator<TableProcessDim> getCreateTable(DataStreamSource<String> mysqlSource) {
         return mysqlSource.flatMap(new RichFlatMapFunction<String, TableProcessDim>() {
            private Connection hbaseConnect;

            private TableProcessDim tableProcessDim;

            @Override
            public void open(Configuration parameters) throws Exception {
                hbaseConnect = HbaseUtil.getHbaseConnect();
            }

            @Override
            public void close() throws Exception {
                HbaseUtil.closeHBaseConn(hbaseConnect);
            }

            @Override
            public void flatMap(String s, Collector<TableProcessDim> collector) throws Exception {
                JSONObject jsonObject = JSONObject.parseObject(s);
                String op = jsonObject.getString("op");
                System.out.println(op);
                if ("c".equals(op) || "r".equals(op)) {
                    tableProcessDim = jsonObject.getObject("after", TableProcessDim.class);
                    String[] split = tableProcessDim.getSinkFamily().split(",");
                    System.out.println(op);
                    createTable(split);
                } else if ("d".equals(op)) {
                    tableProcessDim = jsonObject.getObject("before", TableProcessDim.class);
                    System.out.println(op);
                    deleteTable();
                } else if ("u".equals(op)) {
                    tableProcessDim = jsonObject.getObject("after", TableProcessDim.class);
                    deleteTable();
                    String[] split = tableProcessDim.getSinkFamily().split(",");
                    System.out.println(op);
                    createTable(split);
                }
                tableProcessDim.setOp(op);

                collector.collect(tableProcessDim);
            }

            public void createTable(String[] families) {
                try {
                    System.out.println("tableProcessDim.getSinkTable() = " + tableProcessDim.getSinkTable());
                    // 调用 HBase 工具类创建表
                    HbaseUtil.createHBaseTable(hbaseConnect, Constant.HBASE_NAMESPACE, tableProcessDim.getSinkTable(), families);
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }

            /**
             * 删除 HBase 表
             */
            public void deleteTable() {
                System.out.println("tableProcessDim.getSinkTable() = " + tableProcessDim.getSinkTable());
                try {
                    // 调用 HBase 工具类删除表
                    HbaseUtil.dropHBaseTable(hbaseConnect, Constant.HBASE_NAMESPACE, tableProcessDim.getSinkTable());
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }).setParallelism(1);
    }

    private static SingleOutputStreamOperator<JSONObject> getEtlStream(DataStreamSource<String> dataStreamSource) {
        return dataStreamSource.flatMap(new RichFlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String s, Collector<JSONObject> collector) throws Exception {
                JSONObject jsonObject = JSON.parseObject(s);
                String type = jsonObject.getString("type");
                String database = jsonObject.getString("database");
                String data = jsonObject.getString("data");
                if ("medical".equals(database) && !"bootstrap-start".equals(type) && !"bootstrap-complete".equals(type) && data != null && data.length() > 0) {
                    collector.collect(jsonObject);
                }
            }
        });
    }
}