package net.bwie.realtime.jtp.dws.douyin.log.job;

import com.alibaba.fastjson.JSON;
import net.bwie.realtime.jtp.dws.douyin.log.bean.EventLog6;
import net.bwie.realtime.jtp.dws.douyin.log.bean.OrderSourceMetric3;
import net.bwie.realtime.jtp.dws.douyin.log.functions.OrderSourceRatioWindowFunction3;
import net.bwie.realtime.jtp.utils.DorisUtil;
import net.bwie.realtime.jtp.utils.KafkaUtil;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.aggregation.SumAggregationFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.SlidingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

public class DouYinOrderSourceRatio3 {
    public static void main(String[] args) throws Exception {
        // 1. 初始化Flink执行环境（与原类保持一致）
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);          // 测试阶段并行度1，生产按需调整
        env.enableCheckpointing(3000L); // 3秒一次Checkpoint，保证容错

        // 2. 读取Kafka数据源（只需要ods_live_event_log，下单事件在该表中）
        DataStream<String> kafkaDataStream = KafkaUtil.consumerKafka(env, "ods_live_event_log");

        // 3. 数据处理：解析→过滤→窗口聚合→格式化
        DataStream<String> resultStream = handle(kafkaDataStream);

        // 4. 打印结果
        resultStream.print("订单来源占比指标→");

        // 5. 数据输出到Doris
        DorisUtil.saveToDoris(
                resultStream,
                "douyin_realtime_report",  // 数据库名
                "dws_order_source_ratio"   // 表名
        );

        // 6. 触发作业执行
        env.execute("DouYinOrderSourceRatioMonitor6");
    }

    private static DataStream<String> handle(DataStream<String> stream) {
        // 步骤1：解析Kafka JSON数据
        SingleOutputStreamOperator<EventLog6> parsedStream = stream
                .map(jsonStr -> JSON.parseObject(jsonStr, EventLog6.class))
                .filter(event -> event != null);  // 过滤空数据
        // 步骤2：过滤"下单"事件
        SingleOutputStreamOperator<EventLog6> orderEventStream = parsedStream
                .filter(event -> "下单".equals(event.getEventType()));

        // 步骤3：按主播ID分组 + 滑动窗口
        WindowedStream<EventLog6, Long, TimeWindow> windowStream = orderEventStream
                .keyBy(EventLog6::getAnchorId)  // 分组键：主播ID
                .window(SlidingProcessingTimeWindows.of(
                        Time.seconds(5),  // 窗口长度：5秒
                        Time.seconds(1)   // 滑动步长：1秒
                ));

        SingleOutputStreamOperator<OrderSourceMetric3> metricStream = windowStream
                .apply(new OrderSourceRatioWindowFunction3());  // 自定义窗口函数

        // 步骤5：格式化指标为JSON
        SingleOutputStreamOperator<String> resultStream = metricStream
                .map(metric -> String.format(
                        "{\"window_start_time\":\"%s\",\"window_end_time\":\"%s\",\"cur_date\":\"%s\",\"anchor_id\":\"%d\",\"source_type\":\"%s\",\"order_count\":\"%d\",\"total_order_count\":\"%d\",\"order_ratio\":\"%.2f\"}",
                        metric.getWindowStartTime() != null ? metric.getWindowStartTime() : "",
                        metric.getWindowEndTime() != null ? metric.getWindowEndTime() : "",
                        metric.getCurDate() != null ? metric.getCurDate() : "",
                        metric.getAnchorId() != null ? metric.getAnchorId() : 0,
                        metric.getSourceType() != null ? metric.getSourceType() : "",
                        metric.getOrderCount() != null ? metric.getOrderCount() : 0,
                        metric.getTotalOrderCount() != null ? metric.getTotalOrderCount() : 0,
                        metric.getOrderRatio() != null ? metric.getOrderRatio() : 0.00
                ));

        return resultStream;
    }
}
