package com.flinkBigWork;

import com.flinkBigWork.entity.Entity;
import com.flinkBigWork.util.OneSecondAggregate2;
import com.flinkBigWork.util.OneSecondRes2;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.time.Duration;
import java.time.LocalTime;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.util.Properties;
import java.util.concurrent.TimeUnit;

/**
 * Main 类
 *
 * @author 熊苏鹏1
 * @date 2025/3/4 11:31
 * @description 启动类
 */
public class Main {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
//        env.setParallelism(6);           //这行代码最好注释，这样子在网页填写的参数就有效了
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(
                333, // 尝试重启的次数
                org.apache.flink.api.common.time.Time.of(10, TimeUnit.SECONDS) // 延迟
        ));
        String Topic="foo";
        // 2. Kafka 配置
        Properties props = new Properties();
        props.put("bootstrap.servers", "192.168.88.101:19092,192.168.88.101:29092,192.168.88.101:39092");
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "2"); // 设置消费者组 ID
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); // 从最早的消息开始消费

        // 3. 创建 Kafka 消费者
        FlinkKafkaConsumer<String> source = new FlinkKafkaConsumer<>(
                "foo",                    // Kafka topic
                new SimpleStringSchema(),  // Kafka 消息的解码器
                props                      // Kafka 配置
        );
        //kafka数据源流入flink
        DataStream<Entity> stockStream = env.addSource(source)
                .filter(value -> !value.startsWith("id"))
                .map((MapFunction<String, Entity>) value -> {
                    String[] fields = value.split(",");
                    if (fields.length < 10) {
                        System.err.println("Invalid input data: " + value);
                        return null;
                    }
                    try {
                        //将csv文件转化成实体类
                        return  convertToEntity(value);
                    } catch (NumberFormatException e) {
                        System.err.println("Number format error in data: " + value);
                        return null;
                    }
                })
                .filter(value -> value != null)
                //水印策略
                .assignTimestampsAndWatermarks(WatermarkStrategy
                        .<Entity>forBoundedOutOfOrderness(Duration.ofSeconds(1))
                        .withTimestampAssigner(new SerializableTimestampAssigner<Entity>() {
                            //时间是根据timeStamp字段来进行提取的
                                                   @Override
                                                   public long extractTimestamp(Entity entity, long l) {
                                                       return parseTimeToTimestamp(entity.getTimestamp());
                                                   }
                                               }
                        ));

        //
        SingleOutputStreamOperator<String> result2 = stockStream
                .windowAll(TumblingEventTimeWindows.of(Time.seconds(1)))
                .aggregate(new OneSecondAggregate2(), new OneSecondRes2());



    }
    // 假设存在一个方法用于将数据转换为 Entity 对象
    public static Entity convertToEntity(String data) {
        // 新的实体类转化逻辑
        String[] parts = data.split(",");
        Entity entity = new Entity();
        entity.setId(Integer.parseInt(parts[0]));
        entity.setStudentId(parts[1]);
        entity.setStudentName(parts[2]);
        entity.setProvince(parts[3]);
        entity.setWindowId(parts[4]);
        entity.setDish(parts[5]);
        entity.setPrice((int) Double.parseDouble(parts[6]));
        entity.setTimestamp((parts[7])); // 修改这里
        entity.setCostTime(Integer.parseInt(parts[8]));
        entity.setQueueLength(Integer.parseInt(parts[9]));
        return entity;
    }

    // 新增方法：将时间字符串转换为时间戳
    public static long parseTimeToTimestamp(String timeStr) {
        try {
            DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
            LocalDateTime dateTime = LocalDateTime.parse(timeStr, formatter);
            return dateTime.toInstant(ZoneOffset.UTC).toEpochMilli();
        } catch (DateTimeParseException e) {
            System.err.println("Date format error in data: " + timeStr);
            return 0L; // 或者抛出异常，根据需求决定
        }
    }
}
