package com.atguigu.one.app.dwd.play;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONAware;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.one.utils.DateFormatUtil;
import com.atguigu.one.utils.DruidDSUtil;
import com.atguigu.one.utils.JdbcUtil;
import com.atguigu.one.utils.MyKafkaUtil;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

import java.util.List;

/**
 *
 * dwd 播放主题代码
 *   代码逻辑：
 *     1.获取执行环境
 *     2.读取Kafka topic_db video_info 表中的数据 创建流
 *     3.转换为JSON对象&过滤数据
 *     4.将数据写出到Kafka
 *     5.启动任务
 *
 * dwd表需要字段：
 *  user_chapter_process：
 *      position_sec（时长位置）、chapter_id、user_id、id、create_time、course_id
 *  数据：
     {
         "database": "edu",
         "table": "user_chapter_process",
         "type": "insert",
         "ts": 1661133549,
         "xid": 48246,
         "commit": true,
         "data": {
                     "id": 5404,
                     "course_id": 194,
                     "chapter_id": 24385,
                     "user_id": 615,
                     "position_sec": 79,
                     "create_time": "2022-08-22 09:59:09",
                     "update_time": null,
                     "deleted": "0"
                 }
     }
 *
 * @Auther: liuYiZhao
 * @Date: 2022-08-22  9:32
 */
public class DwdPlay {

    private static  String topic = "topic_db";
    private static String targetTopic = "dwd_play";
    private static  String groupId = "dwd_play_group";


    public static void main(String[] args) throws Exception {

        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //TODO 2.读取Kafka topic_db video_info 表中的数据 创建流
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(topic, groupId));

        //TODO 3.转换为JSON对象&过滤数据
        SingleOutputStreamOperator<JSONObject> jsonObjDS = kafkaDS.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String value, Context ctx, Collector<JSONObject> out) throws Exception {

                JSONObject dbData = JSONObject.parseObject(value);

               if( dbData.getString("table").equals("user_chapter_process")) {
                   //处理成dwd的数据
                   JSONObject data = dbData.getJSONObject("data");
                   data.remove("deleted");
                   out.collect(data);
               }
            }
        });


        //TODO 4.将数据写出到Kafka
        jsonObjDS.print(">>>>>>>>");

        jsonObjDS.map(JSONAware::toJSONString).addSink(MyKafkaUtil.getFlinkKafkaProducer(targetTopic));

        //TODO 5.启动任务
        env.execute("DwdPlay");

    }
}
