package org.example.dobs.demo;

import com.twitter.chill.protobuf.ProtobufSerializer;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.example.dobs.demo.proto.TestSampleOuterClass;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.StringTokenizer;
import java.util.concurrent.TimeUnit;

/**
 * Hello world!
 */
public class LocalProtoFileDemo {
    public static void main(String[] args) {
        StreamExecutionEnvironment see = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
        see.enableCheckpointing(Time.minutes(1).toMilliseconds());

        see.getConfig().registerTypeWithKryoSerializer(TestSampleOuterClass.TestSample.class, ProtobufSerializer.class);

    }

    public static void proto2kafka(StreamExecutionEnvironment env) {
        // 添加自定义数据源生成器
        DataStream<String> continuousData = env.addSource(new SourceDataGenerator());
        // 配置 Kafka 连接信息
        Properties properties = new Properties();
        Config config = ConfigFactory.load("local_proto_file_demo.conf");
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getString("kafka.bootstrap-servers"));
//        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, config.getString("kafka.group.id"));
        String topic = config.getString("kafka.topic");

        FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<>(topic, new SimpleStringSchema(), properties);
        // 将数据写入 Kafka
        continuousData.addSink(kafkaProducer);

    }

    // 自定义数据源生成器
    public static class SourceDataGenerator implements SourceFunction<String> {
        private volatile boolean isRunning = true;
        private int cnt = 0;

        static {
            // 预定义的单词列表
            List<String> wordList = new ArrayList<>();
            try (BufferedReader br = new BufferedReader(new FileReader("HarryPotter1-7.txt"))) {
                String line;
                while ((line = br.readLine()) != null) {
                    StringTokenizer tokenizer = new StringTokenizer(line.trim());
                    while (tokenizer.hasMoreTokens()){
                        wordList.add(tokenizer.nextToken());
                    }
                    if (wordList.size()>10000){
                        break;
                    }
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

        @Override
        public void run(SourceContext<String> ctx) throws Exception {
            Time.minutes(1).toMilliseconds();

            while (isRunning) {
                cnt += 1;
                // 生成测试数据
                TestSampleOuterClass.TestSample.Builder personBuilder = TestSampleOuterClass.TestSample.newBuilder();
                personBuilder.setName("Alice");
                personBuilder.setId(cnt);
                personBuilder.setEmail("alice@example.com");
                TestSampleOuterClass.TestSample person = personBuilder.build();

                // 序列化对象
                String data = person.toByteArray().toString();

                ctx.collect(data);
                // 每隔 1 秒生成一条数据，可以根据需要调整间隔时间
                TimeUnit.SECONDS.sleep(1);
            }
        }

        @Override
        public void cancel() {
            isRunning = false;
        }
    }
}
