package com.wsjj.gmall.dim.app;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.wsjj.gmall.base.BaseApp;
import com.wsjj.gmall.bean.TableProcessDim;
import com.wsjj.gmall.constant.Constant;
import com.wsjj.gmall.dim.function.HBaseSinkFunction;
import com.wsjj.gmall.dim.function.TableProcessFunction;
import com.wsjj.gmall.dim.function.createTableProcess;
import com.wsjj.gmall.util.FlinkSourceUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.BroadcastConnectedStream;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;


public class DimApp extends BaseApp {


    public static void main(String[] args) {
        new DimApp().start(10001,4, Constant.TOPIC_DB,"dim_d");
    }

    @Override
    public void handle(StreamExecutionEnvironment env, DataStreamSource<String> stream) {
        stream.print("adsjlkjkdsjds:");

//TODO        1.读取kafka中流数据，然后进行elt过滤
        SingleOutputStreamOperator<JSONObject> etlStream = etl(stream);
        etlStream.print("=========etStream=================");

//TODO        2.读取配置表中的流数据，使用flinkCDC读取
        SingleOutputStreamOperator<TableProcessDim> streamOperator = readTableProcess(env).setParallelism(1);

//TODO        3.在Hbase中创建维度表
        SingleOutputStreamOperator<TableProcessDim> tpDS = streamOperator.map(new createTableProcess()).setParallelism(1);

//TODO        4.将配置流广播到主流
        
//        广播流
        MapStateDescriptor<String, TableProcessDim> mapStateDescriptor = new MapStateDescriptor<>("dim_broadcast", String.class, TableProcessDim.class);
        BroadcastStream<TableProcessDim> broadcatPS = tpDS.broadcast(mapStateDescriptor);

        BroadcastConnectedStream<JSONObject, TableProcessDim> connectDS = etlStream.connect(broadcatPS);

//        主流来后判断是否是维度流，维度流往下传
        SingleOutputStreamOperator<Tuple2<JSONObject, TableProcessDim>> process = connectDS.process(new TableProcessFunction(mapStateDescriptor));
        process.print("process======：");

 // TODO    5.往HBase中插入维度数据
        process.addSink(new HBaseSinkFunction());

    }





    //  elt方法，判断流是否是null，是null过滤掉
    private SingleOutputStreamOperator<JSONObject> etl(DataStreamSource<String> stream){
        SingleOutputStreamOperator<String> filter = stream.filter(new FilterFunction<String>() {
            @Override
            public boolean filter(String value) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                String database = jsonObject.getString("database");
                String type = jsonObject.getString("type");
                String data = jsonObject.getString("data");
                return "gmall".equals(database) && ("delete".equals(type) || "insert".equals(type) || "update".equals(type) || "bootstrap-insert".equals(type)) && data != null && data.length() > 2;
            }
        });
        SingleOutputStreamOperator<JSONObject> map = filter.map(new MapFunction<String, JSONObject>() {
            @Override
            public JSONObject map(String value) throws Exception {
                return JSON.parseObject(value);
            }
        });

        return map;

    }

    private  SingleOutputStreamOperator<TableProcessDim> readTableProcess(StreamExecutionEnvironment env){

//        使用flinkcdc向mysql中读取数据
        MySqlSource<String> mySqlSource = FlinkSourceUtil.getMySqlSource("gmall_config", "table_process_dim");


        DataStreamSource<String> source = env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "cdc-source");
        source.setParallelism(1);

        SingleOutputStreamOperator<TableProcessDim> map = source.map(new MapFunction<String, TableProcessDim>() {
            @Override
            public TableProcessDim map(String value) throws Exception {
                JSONObject obj = JSON.parseObject(value);
                String op = obj.getString("op");
                TableProcessDim tableProcessDim;
                if ("d".equals(op)) {
                    tableProcessDim = obj.getObject("before", TableProcessDim.class);
                } else {
                    tableProcessDim = obj.getObject("after", TableProcessDim.class);
                }
                tableProcessDim.setOp(op);


                return tableProcessDim;
            }
        }).setParallelism(1);
        return map;

    }


}
