package org.example.realtime.traffic.dwd.log.job;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.example.realtime.jtp.common.utils.JdbcUtil;
import org.example.realtime.jtp.common.utils.KafkaUtil;

import java.time.Duration;
/**
 * @Title: TransportationWindowDwsJob
 * @Author Lianzy
 * @Package org.example.realtime.traffic.dwd.log.job
 * @Date 2025/5/29 18:07
 * @description 拥堵道路top10
 */
public class TransportationWindowDwsJob {

    public static void main(String[] args) throws Exception {
        //1.执行环境-env
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //2.数据源-source
        DataStream<String> kafkaDataStream = KafkaUtil.consumerKafka(env, "traffic_events");
//        System.out.println(kafkaDataStream);
        kafkaDataStream.print();
        //3.数据转换-transformation
        handle(kafkaDataStream);
        //4.数据输出-sink
//        DataStream<String> jsonStream = LogCleaned(kafkaDataStream);
        //5.触发执行-execute
        env.execute("TransportationWindowDwsJob");
    }

    private static void handle(DataStream<String> kafkaDataStream) {
        SingleOutputStreamOperator<String> watermarks = kafkaDataStream.assignTimestampsAndWatermarks(WatermarkStrategy.<String>forBoundedOutOfOrderness(Duration.ofSeconds(0))
                .withTimestampAssigner(new SerializableTimestampAssigner<String>() {
                    @Override
                    public long extractTimestamp(String element, long recordTimestamp) {
                        return JSON.parseObject(element).getLong("ts");
                    }
                })
        );
        KeyedStream<String, String> roadId = watermarks.keyBy(
                json -> JSON.parseObject(json).getString("roadId")
        );

//                .filter(json -> {
//            Double speed = JSON.parseObject(json).getDouble("speed");
//            if (speed < 10) return "严重拥堵".isEmpty();
//            else if (speed < 30) return "中度拥堵".isEmpty();
//            else if (speed < 50) return "轻度拥堵".isEmpty();
//            else return "畅通".isEmpty();
//        });
        WindowedStream<String, String, TimeWindow> windowStream = roadId.window(
                TumblingEventTimeWindows.of(Time.seconds(10))
        );
        SingleOutputStreamOperator<String> process = windowStream.process(new ProcessWindowFunction<String, String, String, TimeWindow>() {
            @Override
            public void process(String s, Context context, Iterable<String> elements, Collector<String> out) throws Exception {
                String roadId = JSONObject.parseObject(elements.iterator().next()).getString("roadId");
                long severeCount = 0;
                double totalSpeed = 0.0;
//                for (String element : elements) {
//                        Double speed = JSON.parseObject(element).getDouble("speed");
//                        if (speed < 60) {
//                            severeCount++;
//                        }
//                        totalSpeed += speed;
//                    }
                for (String element : elements) {
                    severeCount++;
                    totalSpeed += JSON.parseObject(element).getDouble("speed");
                }
                String output =  roadId + ","  + severeCount+ "," + totalSpeed;
                out.collect(output);
                System.out.println(output);
            }
//                String windowStart = DateTimeUtil.convertLongToString(context.window().getStart(), DateTimeUtil.DATE_TIME_FORMAT);
//                String windowEnd = DateTimeUtil.convertLongToString(context.window().getEnd(), DateTimeUtil.DATE_TIME_FORMAT);

//                System.out.println(output);

        });
        JdbcUtil.sinkToClickhouseUpsert(process, "insert into traffic_monitoring.dwd_congestion (" +
                " road_id,severeCount,avgspeed)\n" +
                "values (?,?,?)");

    }
}
