package com.atguigu.bigdata.gmall.realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.bigdata.gmall.realtime.app.BaseAppV1;
import com.atguigu.bigdata.gmall.realtime.bean.TradeProvinceOrderWindow;
import com.atguigu.bigdata.gmall.realtime.common.Constant;
import com.atguigu.bigdata.gmall.realtime.function.DimAsyncFunction;
import com.atguigu.bigdata.gmall.realtime.util.AtguiguUtil;
import com.atguigu.bigdata.gmall.realtime.util.FlinkSinkUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFilterFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.math.BigDecimal;
import java.time.Duration;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;

/**
 * @Author lzc
 * @Date 2022/10/21 08:56
 */
public class Dws_10_DwsTradeProvinceOrderWindow extends BaseAppV1 {
    public static void main(String[] args) {
        new Dws_10_DwsTradeProvinceOrderWindow().init(
            4010,
            2,
            "Dws_10_DwsTradeProvinceOrderWindow",
            Constant.TOPIC_DWD_TRADE_ORDER_DETAIL
        );
    }
    
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          DataStreamSource<String> stream) {
        // 1. 按照订单详情 id 去重: 取第一个详情. 没有用到 left join 的右表的数据
        SingleOutputStreamOperator<JSONObject> distinctedStream = distinctByOrderDetailId(stream);
        // 2. 解析成一个 pojo
        SingleOutputStreamOperator<TradeProvinceOrderWindow> beanStream = parseToPojo(distinctedStream);
        // 3. 开窗聚和
        SingleOutputStreamOperator<TradeProvinceOrderWindow> beanStreamWithoutDim = windowAndAgg(beanStream);
        // 4. 补充维度: 旁路缓存优化和异步优化
        SingleOutputStreamOperator<TradeProvinceOrderWindow> resultStream = joinDim(beanStreamWithoutDim);
        // 5. 写出到 clickhouse 中
        writeToClickHouse(resultStream);
        
    }
    
    private void writeToClickHouse(SingleOutputStreamOperator<TradeProvinceOrderWindow> resultStream) {
        resultStream.addSink(FlinkSinkUtil.getClickHouseSink("dws_trade_province_order_window", TradeProvinceOrderWindow.class));
    }
    
    private SingleOutputStreamOperator<TradeProvinceOrderWindow> joinDim(SingleOutputStreamOperator<TradeProvinceOrderWindow> stream) {
      return  AsyncDataStream.unorderedWait(
            stream,
            new DimAsyncFunction<TradeProvinceOrderWindow>(){
                @Override
                public String table() {
                    return "dim_base_province";
                }
    
                @Override
                public String id(TradeProvinceOrderWindow input) {
                    return input.getProvinceId();
                }
    
                @Override
                public void addDim(JSONObject dim, TradeProvinceOrderWindow input) {
                    input.setProvinceName(dim.getString("NAME"));
                }
            },
            120,
            TimeUnit.SECONDS
        );
    }
    
    private SingleOutputStreamOperator<TradeProvinceOrderWindow> windowAndAgg(SingleOutputStreamOperator<TradeProvinceOrderWindow> beanStream) {
       return beanStream
            .assignTimestampsAndWatermarks(
                WatermarkStrategy
                    .<TradeProvinceOrderWindow>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                    .withTimestampAssigner((bean, ts) -> {
                        System.out.println("ts: " + ts);
                        return bean.getTs();
                    })
            )
            .keyBy(TradeProvinceOrderWindow::getProvinceId)
            .window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(5)))
            .reduce(
                new ReduceFunction<TradeProvinceOrderWindow>() {
                    @Override
                    public TradeProvinceOrderWindow reduce(TradeProvinceOrderWindow value1,
                                                           TradeProvinceOrderWindow value2) throws Exception {
                        // 把 value2 中的order_id 都存入到 value1 中的 set 集合中
                        value1.getOrderIdSet().addAll(value2.getOrderIdSet());
                        value1.setOrderAmount(value1.getOrderAmount().add(value2.getOrderAmount()));
                        return value1;
                    }
                },
                new ProcessWindowFunction<TradeProvinceOrderWindow, TradeProvinceOrderWindow, String, TimeWindow>() {
                    @Override
                    public void process(String provinceId,
                                        Context ctx,
                                        Iterable<TradeProvinceOrderWindow> elements,
                                        Collector<TradeProvinceOrderWindow> out) throws Exception {
                        TradeProvinceOrderWindow bean = elements.iterator().next();
                        
                        bean.setStt(AtguiguUtil.toDatTime(ctx.window().getStart()));
                        bean.setEdt(AtguiguUtil.toDatTime(ctx.window().getEnd()));
    
                        bean.setOrderCount((long) bean.getOrderIdSet().size());
    
                        bean.setTs(System.currentTimeMillis());
                        
                        out.collect(bean);
                    }
                }
            );
    }
    
    private SingleOutputStreamOperator<TradeProvinceOrderWindow> parseToPojo(
        SingleOutputStreamOperator<JSONObject> stream) {
        return stream
            .map(new MapFunction<JSONObject, TradeProvinceOrderWindow>() {
                @Override
                public TradeProvinceOrderWindow map(JSONObject value) throws Exception {
                    String provinceId = value.getString("province_id");
                    HashSet<String> orderIdSet = new HashSet<>();
                    orderIdSet.add(value.getString("order_id"));
                    
                    BigDecimal orderAmount = value.getBigDecimal("split_total_amount");
                    long ts = value.getLong("ts") * 1000;
                    
                    return new TradeProvinceOrderWindow(
                        "", "",
                        provinceId, "",
                        orderIdSet, 0L, orderAmount,
                        ts
                    );
                }
            });
    }
    
    private SingleOutputStreamOperator<JSONObject> distinctByOrderDetailId(DataStreamSource<String> stream) {
        return stream
            .map(JSON::parseObject)
            .keyBy(obj -> obj.getString("id"))
            .filter(new RichFilterFunction<JSONObject>() {
                
                private ValueState<Boolean> isFirstState;
                
                @Override
                public void open(Configuration parameters) throws Exception {
                    ValueStateDescriptor<Boolean> desc = new ValueStateDescriptor<>("isFirstState", Boolean.class);
                    // 给状态设置 ttl
                    desc.enableTimeToLive(new StateTtlConfig.Builder(Time.seconds(10)).build());
                    isFirstState = getRuntimeContext().getState(desc);
                }
                
                @Override
                public boolean filter(JSONObject value) throws Exception {
                    if (isFirstState.value() == null) {
                        isFirstState.update(false);
                        return true;
                    }
                    
                    return false;
                }
            });
    }
}
/*
从 Kafka 读取业务数据，筛选订单表数据，统计各省份各窗口订单数和订单金额，

1. 数据源:
    dwd 层下单明细
        粒度是详情粒度
        
2. 按照 详情 id 进行去重

3. 订单数
    详情id      订单id    省份id       金额
    1            1         1          100
    2            1         1          200
    3            1         1          250
    4            2         1          250
    4            2         1          250
    
    
    按照省份 id, 统计订单数
            sql:   count(distinct order_id)
            
            流中怎么实现去重:
               他们是属于同一个省份, 事件时间又一样, 所以分组开窗聚会的肯定会尽到同一个窗口内
               
               用普通的 set 集合存储 订单 id, 等等最后的聚合结果出出来的时候, 只要查看
               set集合的长度
               
 ....

 */
