package com.bw.gmall.realtime.dim.app;
import com.bw.gmall.realtime.common.base.BaseApp;
import com.bw.gmall.realtime.common.bean.TableProcessDim;
import com.bw.gmall.realtime.common.util.DimsinkFunction;
import com.bw.gmall.realtime.common.util.FlinkSourceUtil;
import com.bw.gmall.realtime.common.util.HBaseUtil;
import com.bw.gmall.realtime.common.util.JdbcUtil;
import com.bw.gmall.realtime.dim.function.dim_processfunction;
import com.esotericsoftware.minlog.Log;
import com.ververica.cdc.debezium.StringDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.bw.gmall.realtime.common.base.BaseApp;
import com.bw.gmall.realtime.common.constant.Constant;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.client.Connection;

import java.io.IOException;
import java.lang.annotation.ElementType;
import java.sql.PreparedStatement;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;

public class DimApp extends BaseApp {
    public static void main(String[] args) {
        new DimApp().start(Constant.TOPIC_DB,Constant.DIM_APP,4,10001);
    }

    @Override
    public void handle(StreamExecutionEnvironment env, DataStreamSource<String> stream) {
       // 1. 对消费的数据, 做数据清洗  主流
        SingleOutputStreamOperator<JSONObject> etlStream  = etl(stream);

        // 2.通过CDC读取配置表,并行度只能是1   cdc读取 配置表数据
        DataStreamSource<String> processStream  = env.fromSource(FlinkSourceUtil.getMysqlSource(Constant.PROCESS_DATABASE,
                        Constant.PROCESS_DIM_TABLE_NAME), WatermarkStrategy.noWatermarks(), "cdc_stream").setParallelism(1);
        //3.在Hbase建表  配置表数据
        SingleOutputStreamOperator<TableProcessDim> createTableStream  = createTable(processStream);

        //4.主流数据和广播进行连接处理
        MapStateDescriptor<String, TableProcessDim> mapStateDescriptor = new MapStateDescriptor<>("broadcast_state", String.class, TableProcessDim.class);

        //配置表结合 成 广播流
        BroadcastStream<TableProcessDim> broadcast = createTableStream.broadcast(mapStateDescriptor);

        //与主流 connect
        BroadcastConnectedStream<JSONObject, TableProcessDim> connect = etlStream.connect(broadcast);

        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDim>> processedstream = connect.process(new dim_processfunction(mapStateDescriptor));

        processedstream.print();
        //5.过滤字段
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDim>> filterStream = getFilterStream(processedstream);

        //6.写入Hbase
        filterStream.addSink(new DimsinkFunction());



    }


    // 1. 对消费的数据, 做数据清洗
    private SingleOutputStreamOperator<JSONObject> etl(DataStreamSource<String> stream){
//        SingleOutputStreamOperator<String> filter = stream.filter(new FilterFunction<String>() {
//            @Override
//            public boolean filter(String s) throws Exception {
//                try {
//                    JSONObject jsonObject = JSON.parseObject(s);
//
//                    String db = jsonObject.getString("database");
//                    String type = jsonObject.getString("type");
//                    String data = jsonObject.getString("data");
//
//                    return "gmall".equals(db) && ("insert").equals(type)
//                            || "update".equals(type) || "delete".equals(type)
//                            || "bootsttrap-insert".equals(type) && data != null
//                            && data.length() > 2;
//                } catch (Exception e) {
//                    Log.warn("不是正确的json格式的数据");
//
//                    return false;
//                }
//            }
//        });
//
//        return filter.map(JSON::parseObject);
          return stream.flatMap(new RichFlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String s, Collector<JSONObject> collector) throws Exception {

                try {
                    if (s != null) {
                        JSONObject jsonObject = JSON.parseObject(s);

                        String db = jsonObject.getString("database");
                        String type = jsonObject.getString("type");
                        String data = jsonObject.getString("data");
                        //                        if ("gmall".equals(db)
                        //                            && "insert".equals(type)
                        //                            || "update".equals(type)
                        //                            || "delete".equals(type)
                        //                            || "bootstrap-insert".equals(type) && data != null && data.length() > 2) {
                        //                        collector.collect(jsonObject);
                        //                    }
                        if ("gmall".equals(db) && !"bootstrap-start".equals(type) && !"bootstrap-complete".equals(type)
                                && data != null && data.length() > 0) {
                            collector.collect(jsonObject);
                        }
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });
    }

    //3.2）在DimApp中定义createHBaseTable方法
    public SingleOutputStreamOperator<TableProcessDim> createTable(DataStreamSource<String> processStream){
       return processStream.flatMap(new RichFlatMapFunction<String, TableProcessDim>() {

           private TableProcessDim tableProcessDim;
           private Connection hbaseConnect;

           @Override
           public void open(Configuration parameters) throws Exception {
               hbaseConnect = HBaseUtil.getHBaseConnection();

           }

           @Override
           public void close() throws Exception {
               hbaseConnect.close();
           }

           @Override
           public void flatMap(String s, Collector<TableProcessDim> collector) throws Exception {

               JSONObject jsonObject = JSON.parseObject(s);

               String op = jsonObject.getString("op");

               System.out.println(op);

               if("r".equals(op) || "c".equals(op)){
                   tableProcessDim = jsonObject.getObject("after", TableProcessDim.class);
                   String[] split  = tableProcessDim.getSinkFamily().split(",");
                   // 创建表
                   createTable(split);
               }else if("d".equals(op)){
                   tableProcessDim = jsonObject.getObject("before", TableProcessDim.class);
                   // 删除表
                   deleteTable();
               }else if("u".equals(op)){
                   tableProcessDim = jsonObject.getObject("after", TableProcessDim.class);
                   String[] split = tableProcessDim.getSinkFamily().split(",");
                   // 先删除后建
                   deleteTable();
                   createTable(split);
               }
               tableProcessDim.setOp(op);
               collector.collect(tableProcessDim);
           }

           //创表
           public void createTable(String[] families){
               try {
                   System.out.println("tableProcessDim.getSinkTable() = " + tableProcessDim.getSinkTable());

                   HBaseUtil.createHBaseTable(hbaseConnect,Constant.HBASE_NAMESPACE,tableProcessDim.getSinkTable(), families);
               } catch (IOException e) {
                   e.printStackTrace();
               }
           }

           //删除表
           public void  deleteTable(){
               try {
                   System.out.println("tableProcessDim.getSinkTable() = " + tableProcessDim.getSinkTable());
                   HBaseUtil.dropHBaseTable(hbaseConnect,Constant.HBASE_NAMESPACE,tableProcessDim.getSinkTable());
               } catch (IOException e) {
                  e.printStackTrace();
               }
           }
       }).setParallelism(1);

    }


    //过滤 主流与配置表都有的字段
    public SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDim>> getFilterStream(SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDim>> processedstream){
        return  processedstream.map(new MapFunction<Tuple2<JSONObject, TableProcessDim>, Tuple2<JSONObject, TableProcessDim>>() {
            @Override
            public Tuple2<JSONObject, TableProcessDim> map(Tuple2<JSONObject, TableProcessDim> jsonObjectTableProcessDimTuple2) throws Exception {
                JSONObject jsonObject = jsonObjectTableProcessDimTuple2.f0;
                TableProcessDim tableProcessDim = jsonObjectTableProcessDimTuple2.f1;

                JSONObject data = jsonObject.getJSONObject("data");
                List<String> columns = Arrays.asList(tableProcessDim.getSinkColumns().split(","));

                //若datakey值 不在 colunms中则舍去
                data.keySet().removeIf(key -> !columns.contains(key));
                return jsonObjectTableProcessDimTuple2;
            }
        });
    }






}
