package com.alison.datastream.exactlyonce;

import cn.hutool.core.io.FileUtil;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.runtime.state.FunctionInitializationContext;
import org.apache.flink.runtime.state.FunctionSnapshotContext;
import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.ShutdownHookUtil;

import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;

/**
 * @Author alison
 * @Date 2024/4/10 1:02
 * @Version 1.0
 * @Description
 */
public class E1_FlinkKafkaProducer {

    private static String taskName = FlinkKafkaProducer.class.getSimpleName();
    public static final Random random = new Random();

    final static String path = "D:\\workspace\\lab\\learnbigdata\\learnflink\\flink-datastream\\src\\main\\java\\com\\alison\\datastream\\exactlyonce\\";

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//        env.setParallelism(1);
        //kafka sink配置
        Properties sinkProperties = new Properties();
        sinkProperties.setProperty("bootstrap.servers", "192.168.56.101:9092");
        sinkProperties.setProperty("group.id", "kafkaProducerGroup");
//        sinkProperties.setProperty("client.id", "flinkInputTopicClient");
        sinkProperties.setProperty("client.id.prefix", "flinkInputTopicClient");

//        KafkaSerializationSchema<String> serializationSchema = new KafkaSerializationSchema<String>() {
//            @Override
//            public ProducerRecord<byte[], byte[]> serialize(String element, @Nullable Long timestamp) {
//                return new ProducerRecord<>(
//                        "flink_input_topic", // target topic
//                        element.getBytes(StandardCharsets.UTF_8)); // record contents
//            }
//        };
//        FlinkKafkaProducer<String> myProducer = new FlinkKafkaProducer<>(
//                "flink_input_topic",             // target topic
//                serializationSchema,    // serialization schema
//                sinkProperties,             // producer config
//                FlinkKafkaProducer.Semantic.EXACTLY_ONCE); // fault-tolerance

        FlinkKafkaProducer<String> kafkaSink = new FlinkKafkaProducer<>("flink_input_topic", new SimpleStringSchema(), sinkProperties);
        // 失败，构造函数出错
//        FlinkKafkaProducer011 kafkaSink = new FlinkKafkaProducer011("flink_input_topic", new SimpleStringSchema(), sinkProperties);
//        AtomicInteger finalAtomicInteger = atomicInteger;
        SourceFunction<String> mySource = new MySource();
        env.addSource(mySource).addSink(kafkaSink).name("kafkaSink");
        env.execute(taskName);
    }

    private static class MySource extends RichSourceFunction<String> {
        boolean isRunning = true;
        private Long count = 1L;

        @Override
        public void run(SourceContext<String> ctx) throws Exception {
            while (isRunning) {
                TimeZone tz = TimeZone.getTimeZone("Asia/Shanghai");
                Instant instant = Instant.ofEpochMilli(System.currentTimeMillis() + tz.getOffset(System.currentTimeMillis()));
                String outline = String.format(
                        "{\"ts\": \"%s\",\"seq\": \"%d\",\"user_id\": \"%s\", \"item_id\":\"%s\", \"category_id\": \"%s\"}",
                        instant.toString(),
                        count,
                        random.nextInt(10),
                        random.nextInt(100),
                        random.nextInt(1000)
                );
                count++;
//                System.out.println(outline);
                ctx.collect(outline);
//                Thread.sleep(1000);
            }
        }

        @Override
        public void cancel() {
            isRunning = false;
        }
    }

}
