package com.bw.yk04;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringEncoder;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.core.fs.Path;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.flink.streaming.api.functions.sink.filesystem.rollingpolicies.DefaultRollingPolicy;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.HashSet;
import java.util.Properties;
import java.util.concurrent.TimeUnit;

public class FlinkTM678 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop-single:9092");
        properties.setProperty("group.id", "group1");
        DataStream<String> stream = env.addSource(new FlinkKafkaConsumer<>("dwd_page_log", new SimpleStringSchema(), properties));

        SingleOutputStreamOperator<JSONObject> mapDS = stream.map(new MapFunction<String, JSONObject>() {
            @Override
            public JSONObject map(String s) throws Exception {
                return JSON.parseObject(s);
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy
                .<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                    @Override
                    public long extractTimestamp(JSONObject element, long recordTimestamp) {
                        return element.getLong("ts");
                    }
                }));

        //）6、（DWS层）编写Flink流式程序，实时消费页面日志数据，按天设置窗口window，统计浏览量PV、唯一访客数UV和总访问时长DuringTime，
        // 其中分组维度地区、渠道和品牌，考虑自定义水位线Watermark，实现乱序数据最大允许时间为2秒；（6分）
        //8）、（DWS层）考虑迟到数据，进行侧边流输出，将日志数据保存文件；（5分）
        OutputTag<JSONObject> lateTag = new OutputTag<JSONObject>("late-data") {};
        // keyby->window->reduce/process
        SingleOutputStreamOperator<TM6Entity> xt6 = mapDS.keyBy(new KeySelector<JSONObject, Tuple3<String, String, String>>() {
                    @Override
                    public Tuple3<String, String, String> getKey(JSONObject jsonObject) throws Exception {
                        JSONObject common = jsonObject.getJSONObject("common");
                        Tuple3<String, String, String> o = new Tuple3<>(common.getString("ar"), common.getString("ch"), common.getString("ba"));
                        return o;
                    }
                    //}).window(TumblingEventTimeWindows.of(Time.days(1)))
                }).window(TumblingEventTimeWindows.of(Time.days(1), Time.hours(-8)))
                .sideOutputLateData(lateTag) //8 收集迟到数据
                .process(new ProcessWindowFunction<JSONObject, TM6Entity, Tuple3<String, String, String>, TimeWindow>() {
                    @Override
                    public void process(Tuple3<String, String, String> key, ProcessWindowFunction<JSONObject, TM6Entity, Tuple3<String, String, String>, TimeWindow>.Context context, Iterable<JSONObject> iterable, Collector<TM6Entity> collector) throws Exception {
                        Long pv = 0l;
                        Long duringTime = 0l;
                        HashSet<String> mids = new HashSet<>();
                        for (JSONObject jo : iterable) {
                            pv++;
                            duringTime = duringTime + jo.getJSONObject("page").getLong("during_time");
                            mids.add(jo.getJSONObject("common").getString("mid"));
                        }
                        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                        String stt = sdf.format(context.window().getStart());
                        String edt = sdf.format(context.window().getEnd());
                        collector.collect(new TM6Entity(stt, edt, key.f0, key.f1, key.f2, pv, Long.valueOf(mids.size()), duringTime));
                    }
                    //pv uv DuringTime
                });

        xt6.print("主流聚合>>>>");
        xt6.addSink(new ClickhouseSink());


        DataStream<JSONObject> xt6_late = xt6.getSideOutput(lateTag);
        xt6_late.print("侧流输出迟到数据>>>>>");
        final StreamingFileSink<String> sink = StreamingFileSink
                .forRowFormat(new Path("hdfs://hadoop-single:8020/tk4"), new SimpleStringEncoder<String>("UTF-8"))
                .withRollingPolicy(
                        DefaultRollingPolicy.builder()
                                .withRolloverInterval(TimeUnit.SECONDS.toMillis(15))
                                .withInactivityInterval(TimeUnit.SECONDS.toMillis(5))
                                .withMaxPartSize(1024 * 1024 * 1024)
                                .build())
                .build();

        xt6_late.map(x->x.toString()).addSink(sink);

        env.execute();
    }
}
