package lianxi.job;

import com.alibaba.fastjson.JSON;
import lianxi.bean.PageBean;
import lianxi.function.PageBeanMapFunction;
import net.bwie.realtime.jtp.utils.KafkaUtil;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.client.program.StreamContextEnvironment;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class JtpTrafficPageViewMinuteWindowDwsJob {

    public static void main(String[] args) throws Exception{

        //1.执行环境
        StreamExecutionEnvironment env = StreamContextEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //2.读取数据源
        DataStream<String> pageStream = KafkaUtil.consumerKafka(env, "dwd-traffic-page-log");
        pageStream.print();


        //3.数据处理
        DataStream<String> resultStream = handle(pageStream);

        //4.数据输出

        //5.执行
        env.execute("JtpTrafficPageViewMinuteWindowDwsJob");
    }

    private static DataStream<String> handle(DataStream<String> stream) {
        //1.按照设备ID进行分组，用于计算uv，使用状态state记录今日是否第一次访问
        KeyedStream<String, String> midStream = stream.keyBy(new KeySelector<String, String>() {
            @Override
            public String getKey(String value) throws Exception {
                return JSON.parseObject(value).getJSONObject("common").getString("mid");
            }
        });

        //2.将六中每条数据封装成bean对象
        SingleOutputStreamOperator<PageBean> mapStream = midStream.map(new PageBeanMapFunction());




        return null ;
    }

}
