package net.bwie.realtime.jtp.dws.log.job;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import net.bwie.realtime.jtp.common.utils.JdbcUtil;
import net.bwie.realtime.jtp.common.utils.KafkaUtil;
import net.bwie.realtime.jtp.dws.log.bean.PageViewBean;
import net.bwie.realtime.jtp.dws.log.funcation.PageViewBeanMapFunction;
import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.HashMap;

public class JtpTrafficPageViewMinuteWindowDwsJob {

    public static void main(String[] args) throws Exception {

        //1. 执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //2. 获取数据源
        DataStream<String> pageStream = KafkaUtil.consumerKafka(env, "dwd_log_page_view_log");
        //  pageStream.print();

        //3. 数据转换
        DataStream<String> resaultStream = handle(pageStream);



        //4. 数据输出

        JdbcUtil.sinkToClickhouseUpsert(resaultStream,
                "insert into jtp_log_report.dws_log_page_view_window_report\n" +
                        "(\n" +
                        " window_start_time,\n" +
                        " window_end_time,\n" +
                        " brand,\n" +
                        " channel,\n" +
                        " province,\n" +
                        " is_new,\n" +
                        " pv_count,\n" +
                        " pv_during_time,\n" +
                        " uv_count,\n" +
                        " session_count,\n" +
                        " ts\n" +
                        ") values (?,?,?,?,?,?,?,?,?,?,?);");

        //5. 触发执行

        env.execute();

    }

    private static DataStream<String> handle(DataStream<String> pageStream) {

        //1. 按照mid设备ID分组，计算UV，使用状态state计算今日是否第一次访问
        KeyedStream<String, String> keyedStream = pageStream.keyBy(new KeySelector<String, String>() {
            @Override
            public String getKey(String value) throws Exception {

                return JSON.parseObject(value).getJSONObject("common").getString("mid");
            }
        });

        //2. 转换为PageViewBean
        DataStream<PageViewBean> beanStream = keyedStream.map(new PageViewBeanMapFunction());

        //3. 设置水位线
        DataStream<PageViewBean> timeStream = beanStream.assignTimestampsAndWatermarks(
                WatermarkStrategy.<PageViewBean>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner(new SerializableTimestampAssigner<PageViewBean>() {
                            @Override
                            public long extractTimestamp(PageViewBean element, long recordTimestamp) {
                                return element.getTs();
                            }
                        })

        );


        //分组
        KeyedStream<PageViewBean, String> keyedStream1 = timeStream.keyBy(new KeySelector<PageViewBean, String>() {
            @Override
            public String getKey(PageViewBean value) throws Exception {
                return value.getBrand() +","+ value.getChannel()+"," + value.getProvince()+"," + value.getIsNew();
            }
        });

        WindowedStream<PageViewBean, String, TimeWindow> windowedStream = keyedStream1.window(TumblingEventTimeWindows.of(Time.minutes(1)));
        SingleOutputStreamOperator<String> streamOperator = windowedStream.apply(new WindowFunction<PageViewBean, String, String, TimeWindow>() {
            FastDateFormat fastDateFormat = FastDateFormat.getInstance("yyyy-MM-dd HH:mm:ss");

            @Override
            public void apply(String s, TimeWindow window, Iterable<PageViewBean> input, Collector<String> out) throws Exception {

                String start_time = fastDateFormat.format(window.getStart());
                String end_time = fastDateFormat.format(window.getEnd());

                Long pvCount = 0L;
                Long uvCount = 0L;
                Long sessionCount = 0L;
                Long pvDuringTime = 0L;
                for (PageViewBean pageViewBean : input) {
                    pvCount += pageViewBean.getPageCount();
                    uvCount += pageViewBean.getUvCount();
                    sessionCount += pageViewBean.getSessionCount();
                    pvDuringTime += pageViewBean.getPvDuringTime();
                }
                String outString = start_time + "," + end_time + "," + s + "," + pvCount + "," + uvCount + "," + sessionCount + "," + pvDuringTime
                        + "," + System.currentTimeMillis();

                out.collect(outString);
            }
        });


        return streamOperator;
    }


    private HashMap<String, String> getAreaDic() {
        // map集合
        HashMap<String, String> map = new HashMap<>();
        // 添加数据
        map.put("110000","北京");
        map.put("120000","天津");
        map.put("140000","山西");
        map.put("150000","内蒙古");
        map.put("130000","河北");
        map.put("310000","上海");
        map.put("320000","江苏");
        map.put("330000","浙江");
        map.put("340000","安徽");
        map.put("350000","福建");
        map.put("360000","江西");
        map.put("370000","山东");
        map.put("710000","台湾");
        map.put("230000","黑龙江");
        map.put("220000","吉林");
        map.put("210000","辽宁");
        map.put("610000","陕西");
        map.put("620000","甘肃");
        map.put("630000","青海");
        map.put("640000","宁夏");
        map.put("650000","新疆");
        map.put("410000","河南");
        map.put("420000","湖北");
        map.put("430000","湖南");
        map.put("440000","广东");
        map.put("450000","广西");
        map.put("460000","海南");
        map.put("810000","香港");
        map.put("820000","澳门");
        map.put("510000","四川");
        map.put("520000","贵州");
        map.put("530000","云南");
        map.put("500000","重庆");
        map.put("540000","西藏");
        // 返回集合
        return map ;
    }

}
