package com.flinkBigWork.alltask;


import com.flinkBigWork.entity.Entity;
import com.flinkBigWork.util.*;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import redis.clients.jedis.Jedis;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;


//从 CSV 文件中读取股票交易数据，进行数据清洗、转换、时间戳分配和聚合。

// redis数据库0是本地使用kafka搞的，这里只需要启动kafka集群
// redis数据库1是在node1:8081,即flink网页上面搞的，这里需要启动kafka集群和flink集群

// 对了如果修改了flink代码的话一定要记得把旧的jar包给删掉，双击maven中的package按键重新生成jar包
// 还有就是每次在node1:8081,即flink网页上面提交了任务后，不用的时候记得把这个任务给结束掉，
// 不然下回启动flink集群进入该页面时还是会执行
// 在node1:8081,即flink网页上面填写提交任务的参数时，第二个参数写1
//离线测试只需要开启redis就行

public class flink_test {
    public static void main(String[] args) throws Exception {


        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
//        env.setParallelism(1);           //这行代码最好注释，这样子在网页填写的参数就有效了
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(
                333, // 尝试重启的次数
                org.apache.flink.api.common.time.Time.of(10, TimeUnit.SECONDS) // 延迟
        ));
        String Topic="foo";
        // 2. Kafka 配置
        Properties props = new Properties();
        props.put("bootstrap.servers", "192.168.88.101:19092,192.168.88.101:29092,192.168.88.101:39092");
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "2"); // 设置消费者组 ID
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); // 从最早的消息开始消费

        // 3. 创建 Kafka 消费者
        FlinkKafkaConsumer<String> source = new FlinkKafkaConsumer<>(
                "foo",                    // Kafka topic
                new SimpleStringSchema(),  // Kafka 消息的解码器
                props                      // Kafka 配置
        );

        AtomicLong nowMillis2 = new AtomicLong(System.currentTimeMillis());
        AtomicLong cnt= new AtomicLong(0L);
        DataStream<Entity> stockStream = env.addSource(source)
                .filter(value -> !value.startsWith("time"))
                .map((MapFunction<String, Entity>) value -> {
                    String[] fields = value.split(",");
                    if (fields.length < 9) {
                        System.err.println("Invalid input data: " + value);
                        return null;
                    }
                    try {
                        long nowMillis1 = System.currentTimeMillis();
//                        System.out.println(nowMillis1);
                        cnt.getAndIncrement();
                        if (cnt.get()>=1000) {
                            cnt.set(0);
                            long num = 0;
                            if (nowMillis1 - nowMillis2.get() != 0) {
                                num = 1000000 / (nowMillis1 - nowMillis2.get());
                            }
                            Jedis jedis = jedisPool.getJedis();
                            // 切换到数据库1号
                            jedisPool.select_db0(jedis);
//                            jedis.rpush("speed", String.valueOf(num));
                            jedis.close();
                            System.out.println(1000 / (nowMillis1 - nowMillis2.get()) * 1000);
                            nowMillis2.set(nowMillis1);
                        }
//                        System.out.println(fields[0]);
//                        System.out.println(fields[0]);
                        return convertToEntity(value);
                    } catch (NumberFormatException e) {
                        System.err.println("Number format error in data: " + value);
                        return null;
                    }
                })
                .filter(value -> value != null)
                .assignTimestampsAndWatermarks(WatermarkStrategy
                        .<Entity>forBoundedOutOfOrderness(Duration.ofSeconds(1))//筛选出垃圾数据的时间
//                        .withTimestampAssigner((event, timestamp) -> parseTimeToSeconds(event.getTime())));
                        .withTimestampAssigner(new SerializableTimestampAssigner<Entity>() {
                                                   @Override
                                                   public long extractTimestamp(Entity Entity, long l) {
                                                       return convertToTimestamp(Entity.getTimestamp());
                                                   }
                                               }
                        ));
        SingleOutputStreamOperator<String> result1 = stockStream
                .windowAll(TumblingEventTimeWindows.of(Time.seconds(60)))
                .aggregate(new OneSecondAggregate1(), new OneSecondRes1());
        SingleOutputStreamOperator<String> result2 = stockStream
                .windowAll(TumblingEventTimeWindows.of(Time.seconds(60)))
                .aggregate(new OneSecondAggregate2(), new OneSecondRes2());
        SingleOutputStreamOperator<String> result3 = stockStream
                .windowAll(TumblingEventTimeWindows.of(Time.seconds(60)))
                .aggregate(new OneSecondAggregate3(), new OneSecondRes3());
        SingleOutputStreamOperator<String> result4 = stockStream
                .windowAll(TumblingEventTimeWindows.of(Time.seconds(60)))
                .aggregate(new OneSecondAggregate4(), new OneSecondRes4());





        env.execute();
    }
    public static long convertToTimestamp(String dateTimeStr) {
        SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        try {
            Date date = dateFormat.parse(dateTimeStr);
            return date.getTime();
        } catch (ParseException e) {
            System.err.println("Error parsing date: " + dateTimeStr);
            e.printStackTrace();
            return -1; // 返回一个错误码或者抛出自定义异常
        }
    }
    //这个有问题，只能够读取一天的数据
    private static long parseTimeToSeconds(String time) {
        String[] dateTimeParts = time.split(" ");
        String[] dateParts = dateTimeParts[0].split("/");
        String[] timeParts = dateTimeParts[1].split(":");

        int hours = Integer.parseInt(timeParts[0]);
        int minutes = Integer.parseInt(timeParts[1]);
        int seconds = Integer.parseInt(timeParts[2]);;

        return (hours * 3600L + minutes * 60L + seconds) * 1000L;
    }
    // 假设存在一个方法用于将数据转换为 Entity 对象
    public static Entity convertToEntity(String data) {
        // 新的实体类转化逻辑
        String[] parts = data.split(",");
        Entity entity = new Entity();
        entity.setId(Integer.parseInt(parts[0]));
        entity.setStudentId(parts[1]);
        entity.setStudentName(parts[2]);
        entity.setProvince(parts[3]);
        entity.setWindowId(parts[4]);
        entity.setDish(parts[5]);
        entity.setPrice((int) Double.parseDouble(parts[6]));
        entity.setTimestamp((parts[7])); // 修改这里
        entity.setCostTime(Integer.parseInt(parts[8]));
        entity.setQueueLength(Integer.parseInt(parts[9]));
        return entity;
    }
}
