package com.atguigu.edu.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.bean.CommentBean;
import com.atguigu.edu.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;

import java.time.Duration;

public class DwsInteractionComment {
    public static void main(String[] args) {
        //TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO　2 设置状态后端
                 /*
                 env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
                 env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
                 env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
                 env.setStateBackend(new HashMapStateBackend());
                 env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
                 System.setProperty("HADOOP_USER_NAME", "atguigu");
                  */
        //TODO 3 读取dwd_interaction_cource_comment主题数据
        String topicName = "dwd_interaction_cource_comment";
        String groupId = "dws_interaction_comment";
        DataStreamSource<String> commentStream = env.addSource(KafkaUtil.getKafkaConsumer(topicName, groupId));

        //TODO 4 转化格式
        commentStream.map(new MapFunction<String, CommentBean>() {
            @Override
            public CommentBean map(String s) throws Exception {
                JSONObject jsonObject = JSON.parseObject(s);
                return CommentBean.builder()
                        .userCount(1L)
                        .courseId(jsonObject.getJSONObject("data").getString("course_id"))
                        .build();
            }
        });

    }
}
