package com.bw.yk03;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.HashSet;
import java.util.Properties;

public class FlinkDWS {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //5）、（DWS层）编写Flink DataStream程序，实时消费DWD宽表数据，进行每日下单数据实时统计，按照省份、商品维度分组，统计销售订单总数、销售额和商品数量：（8分）
        //6）、（DWS层）自定义窗口延迟关闭时间，及延迟数据侧边流输出，保存Kafka消息队列和同时写入数据库表中；（7分）
        //7）、（DWS层）clickhouse数据库创建表，保存下单汇总聚合宽表数据，采用upsert方式，主键不存在插入数据，主键相同时更新数据；（7分）
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop-single:9092");
        properties.setProperty("group.id", "group1");
        DataStream<String> stream = env.addSource(new FlinkKafkaConsumer<>("dwd_db_orders", new SimpleStringSchema(), properties));
        SingleOutputStreamOperator<OrderWide> mapDS = stream.map(x -> JSON.parseObject(x, OrderWide.class))
                .assignTimestampsAndWatermarks(WatermarkStrategy
                        .<OrderWide>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                        .withTimestampAssigner((event, timestamp) -> {
                            SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                            try {
                                return sdf.parse(event.getOrderInfo().getCreate_time()).getTime();
                            } catch (ParseException e) {
                                throw new RuntimeException(e);
                            }
                        }));

        //按照省份id,省份名称，商品id,商品名称分组
        //窗口开始时间，结束时间，省份id,省份名称，商品id,商品名称,销售订单总数、销售额和商品数量
        OutputTag<OrderWide> tag = new OutputTag<OrderWide>("late-data") {
        };
        SingleOutputStreamOperator<DwsBean> xt567 = mapDS.keyBy(new KeySelector<OrderWide, Tuple4<String, String, String, String>>() {
                    @Override
                    public Tuple4<String, String, String, String> getKey(OrderWide orderWide) throws Exception {
                        Tuple4<String, String, String, String> o = new Tuple4<>(orderWide.getOrderInfo().getProvince_id() + "", orderWide.getBaseProvince().getName(),
                                orderWide.getOrderDetail().getSku_id() + "", orderWide.getOrderDetail().getSku_name());
                        return o;
                    }
                }).window(TumblingEventTimeWindows.of(Time.seconds(10)))
                .sideOutputLateData(tag)
                .process(new ProcessWindowFunction<OrderWide, DwsBean, Tuple4<String, String, String, String>, TimeWindow>() {
                    @Override
                    public void process(Tuple4<String, String, String, String> t, ProcessWindowFunction<OrderWide, DwsBean, Tuple4<String, String, String, String>, TimeWindow>.Context context, Iterable<OrderWide> iterable, Collector<DwsBean> collector) throws Exception {
                        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                        String stt = sdf.format(context.window().getStart());
                        String edt = sdf.format(context.window().getEnd());
                        //计算下单数 、 下单金额、下单商品数
                        //001  1
                        //001  2
                        //001  3
                        HashSet<String> orderIds = new HashSet<>();//TODO 实际上不需要对订单id去重，因为按照商品分组后 订单id再有重复,
                        Double orderAmount = 0.0;
                        Long skuCnts = 0l;
                        for(OrderWide ow: iterable){
                            orderIds.add(ow.getOrderDetail().getOrder_id()+"");
                            orderAmount = orderAmount + ow.getOrderDetail().getSplit_total_amount().doubleValue();
                            skuCnts = skuCnts + ow.getOrderDetail().getSku_num();
                        }
                        collector.collect(new DwsBean(stt,edt,t.f0,t.f1,t.f2,t.f3,orderIds.size(),orderAmount,skuCnts));
                    }
                });

        xt567.print();
        xt567.addSink(new FlinkDWSSinkClickhouse());
        env.execute();
    }
}
