package com.atguigu.app.dim;

        /*
        kafka topic_db主流根据flink CDDC从mysql table_process读取的配置流（广播流）来过滤出维表信息动态分流
        写到phoenix
         */
// 数据流：web/app->mysql(binlog)->maxwell->kafka(ODS)->FlinkKafkaConsumer（主流）
//                                                                     ==>phoenix(DIM)
//                 mysql（binlog）->flinkCDC(配置流)
// 程 序: Mock   ->mysql(binlog)->maxwell->kafka(ZK) ->DimApp(hdfs、zk、hbase)->phoenix(DIM)

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.bean.TableProcess;
import com.atguigu.common.GmallConfig;
import com.atguigu.utils.DruidDSUtil;
import com.atguigu.utils.JdbcUtil;
import com.atguigu.utils.JedisUtil;
import com.atguigu.utils.KafkaUtil;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.util.Collector;
import redis.clients.jedis.Jedis;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.*;


//todo 1.创建执行环境
//todo 2.设置状态后端
//todo 3.读取kafka topic_db主题信息创建主流
//todo 4.flink CDC读取mysql table_process创建配置流
//todo 5.将配置流转换为广播流
//todo 6.连接主流和配置流
//todo 7.根据配置信息（广播流）过滤主流信息
//todo 8.将过滤后的主流维表信息写到phoenix
public class DimApp {
    public static void main(String[] args) throws Exception {
        //todo 1.创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //todo 2.设置状态后端

        //todo 3.读取kafka topic_db主题信息创建主流
        DataStreamSource<String> stringKafkaDS = env.addSource(KafkaUtil.getFlinkKafkaConsumer("topic_db", "dimapp2_220828"));

        //todo 4.将主流非json格式的数据过滤，并转化为jsonObject
        SingleOutputStreamOperator<JSONObject> jsonObjectDS = stringKafkaDS.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                //上面已经对null值处理了，如果为null，则返回null，返回值是void，即没有返回，就不需要用out进行输出
                if (value != null) {
                    try {
                        JSONObject jsonObject = JSONObject.parseObject(value);
                        out.collect(jsonObject);
                    } catch (JSONException e) {
                        System.out.println("非json格式数据");
                    }
                }
            }
        });


        //todo 4.flink CDC读取mysql table_process创建配置流
        MySqlSource<String> mysqlSource = MySqlSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .username("root")
                .password("123456")
                .databaseList("gmall-220828-config")
                .tableList("gmall-220828-config.table_process")
                .startupOptions(StartupOptions.initial())//从头读(这样程序启动时，hashmap会有一份，广播状态也会有一份，广播状态会多判断一次，但open已经让phoenix建表了，也可以写.latest())
                .deserializer(new JsonDebeziumDeserializationSchema())
                .build();
        //将配置信息转化为流
        DataStreamSource<String> mysqlDS = env.fromSource(mysqlSource, WatermarkStrategy.noWatermarks(), "mysqlSource");

        //todo 5.将配置流转换为广播
        MapStateDescriptor<String, TableProcess> mapStateDescriptor
                = new MapStateDescriptor<String, TableProcess>("table-process", String.class, TableProcess.class);
        BroadcastStream<String> broadcastDS = mysqlDS.broadcast(mapStateDescriptor);//广播流是String类型

        //todo 6.连接主流和配置流
        BroadcastConnectedStream<JSONObject, String> connectedStream = jsonObjectDS.connect(broadcastDS);


        //todo 7.根据配置信息（广播流）过滤主流信息
        BroadcastProcessFunction<JSONObject, String, JSONObject> broadcastProcessFunction = new BroadcastProcessFunction<JSONObject, String, JSONObject>() {

            HashMap<String, TableProcess> stringTableProcessHashMap = new HashMap<>();


            @Override
            public void open(Configuration parameters) throws Exception {
                //预加载配置信息，第一次启动时生效
                Connection connection = DriverManager.getConnection("jdbc:mysql://hadoop102:3306/gmall-220828-config?" +
                        "user=root&password=123456&useUnicode=true&" +
                        "characterEncoding=utf8&serverTimeZone=Asia/Shanghai&useSSL=false");
                List<TableProcess> tableProcesses = JdbcUtil.queryList(connection, "select * from table_process where sink_type='dim'", TableProcess.class);


                for (TableProcess tableProcess : tableProcesses) {

                    if ("dim".equals(tableProcess.getSinkType())) {
                        //校验phoenix表是否存在，不存在则建表
                        checkThenCreateTable(tableProcess);

                        stringTableProcessHashMap.put(tableProcess.getSourceTable(), tableProcess);
                    }
                }


            }

            /*广播流的输入
                                value(flink CDC->广播流->value)，也就是flink CDC输出为：{
                        "before":null,
                        "after":{
                            "source_table":"base_trademark",
                            "sink_table":"dim_base_trademark",
                            "sink_columns":"id,tm_name",
                            "sink_pk":"id",
                            "sink_extend":null,
                            "sink_type":"dim"
                        },
                        "source":{
                            "version":"1.5.4.Final",
                            "connector":"mysql",
                            "name":"mysql_binlog_source",
                            "ts_ms":1655172926148,
                            "snapshot":"false",
                            "db":"gmall-211227-config",
                            "sequence":null,
                            "table":"table_process",
                            "server_id":0,
                            "gtid":null,
                            "file":"",
                            "pos":0,
                            "row":0,
                            "thread":null,
                            "query":null
                        },
                        "op":"r",
                        "ts_ms":1655172926150,
                        "transaction":null
                        }
                     */
            //处理配置流（广播流）：
            //todo 1.1将数据解析，封装成tableProcess对象
            //todo 1.2phoenix没有该表则在phoenix建表
            //todo 1.3将数据放入广播状态
            @Override
            public void processBroadcastElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {
                BroadcastState<String, TableProcess> broadcastState = ctx.getBroadcastState(mapStateDescriptor);

                //todo 1.1将数据解析，封装成tableProcess对象
                JSONObject jsonObject = JSONObject.parseObject(value);
                //如果mysql里的配置信息是删除操作，则状态（历史配置信息）里也要删除，否则主流本应该不要相关的信息，但是却因为状态还有而一直保留了
                if ("d".equals(jsonObject.getString("op"))) {
                    String source_table = jsonObject.getJSONObject("before").getString("source_table");
                    broadcastState.remove(source_table);//根据key删除整条数据
                    stringTableProcessHashMap.remove(source_table);
                } else {
                    //todo 不是删除操作的话，获取after数据，并转化为tableProcess对象,获取sink_type为dim类型的数据，
                    TableProcess tableProcess = JSON.parseObject(jsonObject.getString("after"), TableProcess.class);//******************************

                    if ("dim".equals(tableProcess.getSinkType())) {
                        //todo 1.2要先判断phoenix有没有建表，没有则建表
                        checkThenCreateTable(tableProcess);

                        //todo 1.3将数据放入广播状态
                        broadcastState.put(tableProcess.getSourceTable(), tableProcess);
                    }
                }
            }

            //todo 判断phoenix有没有建表，没有则建表
            //sink_columns:id,tm_name
            //sink_extend:SALT_BUCKETS=3
            //-> create table if not exists db.sink_table(id varchar primary key,tm_name varchar)SALT_BUCKETS=3

            private void checkThenCreateTable(TableProcess tableProcess) throws SQLException {
                String sinkTable = tableProcess.getSinkTable();
                String sinkColumns = tableProcess.getSinkColumns();
                String sinkPk = tableProcess.getSinkPk();
                String sinkExtend = tableProcess.getSinkExtend();

                //因为我们要指定phoenix表的主键（主流维表相同主键有更新的话，会覆盖掉phoenix的旧值），有的人可能忘记加主键字段了，如果sinkPk为空，我们就给个默认值id，因为一般主键都叫id（phoenix主键冲突时不会报错，新的数据会覆盖旧的）
                if (sinkPk == null || "".equals(sinkPk)) {
                    sinkPk = "id";
                }

                //拼接建表语句
                StringBuilder createSql = new StringBuilder();
                createSql.append("create table if not exists ")
                        .append(GmallConfig.PHOENIX_DIM_DATABASE)
                        .append(".")
                        .append(sinkTable)
                        .append("(")
                        .append(sinkPk)
                        .append(" varchar primary key,");

                String[] columns = sinkColumns.split(",");
                for (int i = 0; i < columns.length; i++) {
                    String column = columns[i];
                    if (i<columns.length -1 ) {
                        if (!column.equals(sinkPk)) {//字段里的非主键字段要拼接上
                            createSql.append(column).append(" varchar,");
                        }
                    }else if (i == columns.length - 1) {
                        createSql.append(column).append(" varchar)");
                    }
                }

                if (sinkExtend != null) {
                    createSql.append(sinkExtend);
                }

                System.out.println("phoenix的建表语句（不存在则创建）为：" + createSql);

                //todo 获取phoenix连接，执行phoenix建表语句
                DruidPooledConnection phoenixConn = null;
                PreparedStatement preparedStatement = null;
                try {
                    phoenixConn = DruidDSUtil.getPhoenixConn();
                    preparedStatement = phoenixConn.prepareStatement(createSql.toString());
                    preparedStatement.execute();
                } catch (SQLException e) {
                    throw new RuntimeException("phoenix创建表：" + sinkTable + "失败");
                }

                //归还连接（如果获取写在open方法里，该连接就不能关闭，因为open方法只调用一次，后面就无法执行其他的建表语句了）
                try {
                    preparedStatement.close();
                    phoenixConn.close();
                } catch (SQLException e) {
                    throw new RuntimeException("归还连接失败");
                }


            }


            //处理主流：
            //todo 2.1获取广播状态
            //todo 2.2根据广播状态来过滤主流的行和列
            //todo 2.3将过滤后的主流数据返回（补上sink_table字段，后续要根据sink_table在phoenix里插入过滤后的主流数据）
            // 主流，从kafka topic_db消费的数据,kafka数据来源于maxwell读取mysql的数据
    /*
    Maxwell数据格式，也就是value:
{"database":"gmall","table":"cart_info","type":"update","ts":1592270938,"xid":13090,"xoffset":1573,"data":{"id":100924,"user_id":"93","sku_id":16,"cart_price":4488,"sku_num":1,"img_url":"http://47.93.148.192:8080/group1/M0rBHu8l-sklaALrngAAHGDqdpFtU741.jpg","sku_name":"华为 HUAWEI P40 麒麟990 5G SoC芯片 5000万30倍数字变焦 8GB+128GB亮黑色全网通5G手机","is_checked":null,"create_time":"2020-06-14 09:28:57","operate_time":null,"is_ordered":1,"order_time":"2021-10-17 09:28:58","source_type":"2401","source_id":null},"old":{"is_ordered":0,"order_time":null}}

{
    "database":"gmall","table":"cart_info","type":"update","ts":1592270938,"xid":13090,"xoffset":1573,
    "data":{//最新字段
    "id":100924,"user_id":"93","sku_id":16,"cart_price":4488,"sku_num":1
    },
    "old":{
        "is_ordered":0,"order_time":null
    }
}
     */
            @Override
            public void processElement(JSONObject value, ReadOnlyContext ctx, Collector<JSONObject> out) throws Exception {
                //todo 2.1获取广播状态
                ReadOnlyBroadcastState<String, TableProcess> broadcastState = ctx.getBroadcastState(mapStateDescriptor);


                //todo 2.2根据广播状态来过滤主流的行和列
                //过滤行(根据广播状态的key是否有主流的表名来过滤行，并且选取insert、update和bootstrap-insert的行，delete和bootstrap-start和complete不要)
                String mainTable = value.getString("table");
                TableProcess hashMapTableProcess = stringTableProcessHashMap.get(mainTable);
                String type = value.getString("type");
                TableProcess tableProcess = broadcastState.get(mainTable);

                if ((tableProcess != null || hashMapTableProcess !=null) && ("insert".equals(type) || "update".equals(type) || "bootstrap-insert".equals(type))) {
                    //if判断即过滤行，下面要过滤列
                    if (hashMapTableProcess != null){
                        tableProcess=hashMapTableProcess;
                    }
                    String sinkType = tableProcess.getSinkType();
                    if ("dim".equals(sinkType)) {
                        String sinkColumns = tableProcess.getSinkColumns();
                        JSONObject data = value.getJSONObject("data");
                        //移除掉data里sinkColumns没有的字段
                        String[] columns = sinkColumns.split(",");

                        //这样写，会报错：java.util.ConcurrentModificationException，原因：https://blog.csdn.net/qq_35056292/article/details/79751233，解决方法，用iterator去删
                        //并且不能用equals来判断
//                        Set<String> keys = data.keySet();
//                        for (String key : keys) {
//                            for (String column : columns) {
//                                if (!column.equals(key)) {
//                                    //如果column没有data里的字段，则把data里的该字段移除掉
//                                    data.remove(key);
//                                }
//                            }
//                        }

                        Set<Map.Entry<String, Object>> entries = data.entrySet();
                        Iterator<Map.Entry<String, Object>> iterator = entries.iterator();
                        List<String> columnList = Arrays.asList(columns);
                        while (iterator.hasNext()){
                            Map.Entry<String, Object> next = iterator.next();
                            if (!columnList.contains(next.getKey())){
                                iterator.remove();
                            }
                        }

                        //todo 2.3将过滤后的主流数据返回（补上sink_table字段，后续要根据sink_table在phoenix里插入过滤后的主流数据）
                        //补充sink_table字段
                        String sinkTable = tableProcess.getSinkTable();
                        value.put("sink_table", sinkTable);
                        //返回过滤后的value
                        out.collect(value);

                    } else if ("dwd".equals(sinkType)) {
                        System.out.println("过滤了主流的dwd事实表数据");
                    }

                } else if ("bootstrap-start".equals(type) || "bootstrap-complete".equals(type)) {
                    System.out.println("过滤了bootstrap-start或bootstrap-complete：" + value);
                } else if (tableProcess == null) {
                    System.out.println("配置信息里没有主流的：" + mainTable + "表");
                }
            }

        };

        SingleOutputStreamOperator<JSONObject> dimResultDS = connectedStream.process(broadcastProcessFunction);

/*maxwell格式：
    这里的value是主流过滤了部分字段后的value，另外这个value还增加了sinkTable
{"database":"gmall","table":"cart_info","type":"update","ts":1592270938,"xid":13090,"xoffset":1573,"data":{"id":100924,"user_id":"93","sku_id":16,"cart_price":4488,"sku_num":1,"img_url":"http://47.93.148.192:8080/group1/M0rBHu8l-sklaALrngAAHGDqdpFtU741.jpg","sku_name":"华为 HUAWEI P40 麒麟990 5G SoC芯片 5000万30倍数字变焦 8GB+128GB亮黑色全网通5G手机","is_checked":null,"create_time":"2020-06-14 09:28:57","operate_time":null,"is_ordered":1,"order_time":"2021-10-17 09:28:58","source_type":"2401","source_id":null},"old":{"is_ordered":0,"order_time":null}}

{
    "database":"gmall","table":"cart_info","type":"update","ts":1592270938,"xid":13090,"xoffset":1573,
    "data":{//过滤后的字段
    "id":100924,"user_id":"93","sku_id":16,"cart_price":4488,"sku_num":1
    },
    "old":{
        "is_ordered":0,"order_time":null
    }
    sink_table:"dim_xxx"
}
     */
        //todo 8.将过滤后的主流维表信息写到phoenix
        RichSinkFunction<JSONObject> dimSinkFunction = new RichSinkFunction<JSONObject>() {
            DruidDataSource druidDataSource;

            @Override
            public void open(Configuration parameters) throws Exception {
                //获取phoenix的druid连接池

                druidDataSource = DruidDSUtil.getDruidDataSource();
            }


            @Override
            public void invoke(JSONObject value, Context context) throws Exception {
                //1.获取过滤后的value的信息
                String sink_table = value.getString("sink_table");
                JSONObject afterFilterData = value.getJSONObject("data");

                //2.phoenix的插入维表内容语句
//                upsert into db.tableName(id,name) values('1001','zhangsan')，增删改都是用upsert，本案例不需要删
                Set<String> columns = afterFilterData.keySet();//Set: id name
                Collection<Object> values = afterFilterData.values();

                 // 3.如果为更新数据操作，则先将数据写到redis（旁路缓存，当维表有更新的时候需要添加这一步）
                if ("update".equals(value.getString("type"))){
                    Jedis jedis = JedisUtil.getJedis();
                    String redisKey="DIM:"+sink_table.toUpperCase()+":"+afterFilterData.getString("id");

                    JSONObject jsonObject = new JSONObject();
                    Set<Map.Entry<String, Object>> entries = afterFilterData.entrySet();
                    for (Map.Entry<String, Object> entry : entries) {
                        jsonObject.put(entry.getKey().toUpperCase(),entry.getValue().toString());
                    }
                    jedis.set(redisKey,jsonObject.toJSONString());
                    jedis.expire(redisKey,24*3600);


                }

                String phoenixUpsertSql = "upsert into " + GmallConfig.PHOENIX_DIM_DATABASE + "." + sink_table +
                        "(" + StringUtils.join(columns, ",") + ") values('"//mysql的有的value值是int类型，而我们要的是varchar类型，所以都给value变成字符串，加上’‘
                        + StringUtils.join(values, "','")
                        + "')";

                System.out.println("phoenix dim的插入语句为："+phoenixUpsertSql);
//
                //3.获取phoenix连接，写入phoenix
                DruidPooledConnection phoenixConn = druidDataSource.getConnection();
                PreparedStatement preparedStatement = phoenixConn.prepareStatement(phoenixUpsertSql);
                preparedStatement.execute();
                phoenixConn.commit();//DML(增删改查)需要手动提交连接，DDL(建表)会自动提交

                //4.归还连接给连接池
                preparedStatement.close();
                phoenixConn.close();


            }


        };

        dimResultDS.addSink(dimSinkFunction);


        //todo 9.启动任务
        env.execute("DimApp");


    }
}
