package com.atguigu.udmp.stream;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.udmp.stream.bean.UserEvent;
import com.atguigu.udmp.stream.bean.UserEventDefine;
import com.atguigu.udmp.stream.common.function.DorisMapFunction;

import com.atguigu.udmp.stream.common.util.MyDorisSinkUtil;
import com.atguigu.udmp.stream.common.util.MyKafkaUtil;
import com.atguigu.udmp.stream.function.CheckPartitionFunction;
import com.atguigu.udmp.stream.function.UserEventExtractProcessFunction;
import com.atguigu.udmp.stream.process.OrderJoinProcess;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.ArrayList;
import java.util.List;

public class UserEventApp {

    public static void main(String[] args) throws Exception {
        //1 接收kafka数据
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //1.1 声明消费主题以及消费者组
        String groupId = "user_event_app";
        //1.2 创建消费者对象
        List<String> topicList=new ArrayList<>();
        topicList.add("topic_log");
        topicList.add("topic_db");

        env.enableCheckpointing(5000L);

        FlinkKafkaConsumer<String> kafkaConsumer = MyKafkaUtil.getKafkaConsumer(topicList, groupId);
        //1.3 消费数据 封装为流
        DataStreamSource<String> kafkaStrDS = env.addSource(kafkaConsumer);

        SingleOutputStreamOperator<JSONObject> dataJsonObjectStream = kafkaStrDS.map(jsonString -> JSON.parseObject(jsonString));

        //2  把kafka中的部分事实数据需要和另一些事实数据进行流合并 如：订单

        DataStream<JSONObject> dataJsonObjAfterJoinStream = OrderJoinProcess.process(dataJsonObjectStream);
//        dataJsonObjAfterJoinStream.print();
        //3  flinkcdc 读取mysql中的用户事件定义信息
        MySqlSource<String> userEventDefineSource = MySqlSource.<String>builder()
                .hostname("hadoop102")
                .port(3306)
                .databaseList("udmp") // set captured database
                .tableList("udmp.user_event_define") // set captured table
                .username("root")
                .password("000000")
                .deserializer(new JsonDebeziumDeserializationSchema()) // converts SourceRecord to JSON String
                .includeSchemaChanges(false)
                .startupOptions(StartupOptions.initial())
                .build();


        DataStreamSource<String> userEventDefineStrStream = env.fromSource(userEventDefineSource, WatermarkStrategy.noWatermarks(), "user_event_define").setParallelism(1);
        //userEventDefineStrStream.print();
        SingleOutputStreamOperator<JSONObject> userEventDefineStream = userEventDefineStrStream.map(jsonString -> JSON.parseObject(jsonString ));

        //4  提取用户事件的广播处理
        //广播状态的描述符 ，key为用户事件定义Id  ，value为用户事件定义
        MapStateDescriptor<Long, UserEventDefine> userEventDefineMapStateDescriptor = new MapStateDescriptor<>("user_event_define", Long.class, UserEventDefine.class);

        // 创建广播流声明
        BroadcastStream<JSONObject> userEventDefineBroadcastStream = userEventDefineStream.broadcast(userEventDefineMapStateDescriptor);

        // 连接主流和广播流，产生连接流
        BroadcastConnectedStream<JSONObject, JSONObject> broadcastConnectedStream = dataJsonObjAfterJoinStream.connect(userEventDefineBroadcastStream);

        // 执行抽取用户事件的方法，获得用户事件流

        //4.1  对事件定义的 加载
        //4.2  对用户事件的提取
        SingleOutputStreamOperator<UserEvent> userEventStream   = broadcastConnectedStream.process(new UserEventExtractProcessFunction(userEventDefineMapStateDescriptor));

        //5分区检查
        SingleOutputStreamOperator<UserEvent> userEventCheckedStream = userEventStream.map(new CheckPartitionFunction());

        //6  写入用户事件明细
        SingleOutputStreamOperator jsonStrStream = userEventCheckedStream.map(new DorisMapFunction());
       jsonStrStream.sinkTo(MyDorisSinkUtil.getDorisSink("udmp.user_event","user_event"));




        env.execute();
    }
}
