package com.xqianli.bigdata.project;

import com.xqianli.bigdata.flink.utils.SensorReading;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Properties;
import java.util.Random;
import java.util.concurrent.TimeUnit;

public class DataProducer {
    public static void main(String[] args) throws Exception {
        // 创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置Checkpoint（10s）周期性启动
        env.enableCheckpointing(TimeUnit.SECONDS.toMillis(10));
        env.setStateBackend(new FsStateBackend("file:///home/max/Logs/flink"));
        // 指定并行度为1
        env.setParallelism(1);
        // 生产数据
        DataStream<String> dataStream = env.addSource(new SensorProducer());
        // 写入kafka和日志
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "localhost:9092");
        properties.setProperty("group.id", "consumer-group");
        properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        properties.setProperty("auto.offset.reset", "latest");

        // 将数据写入Kafka
        dataStream.addSink( new FlinkKafkaProducer<String>("localhost:9092", "sensor", new SimpleStringSchema()));
        // 将数据写入文件
        dataStream.addSink(new SensorSink());

        dataStream.print();
        env.execute();
    }

    // 自定义source
    public static class SensorProducer implements SourceFunction<String> {
        // 设置标志位
        private volatile boolean running = true;

        @Override
        public void run(SourceContext<String> sourceContext) throws Exception {
            //定义一个随机数发生器
            Random random = new Random();

            // 设置10个传感器的初始温度
            HashMap<String, Double> sensorTempMap = new HashMap<>();
            for (int i = 0; i < 30; ++i) {
                sensorTempMap.put("sensor_" + (i + 1), 60 + random.nextGaussian() * 20);
            }

            while (running) {
                for (String sensorId : sensorTempMap.keySet()) {
                    // 在当前温度基础上随机波动
                    Double newTemp = sensorTempMap.get(sensorId) + random.nextGaussian();
                    sensorTempMap.put(sensorId, newTemp);
                    String res = sensorId + "," + System.currentTimeMillis()+","+newTemp;
                    sourceContext.collect(res);
                    Thread.sleep(300 + random.nextInt(500));
                }
            }
        }

        @Override
        public void cancel() {
            this.running = false;
        }
    }

    // 自定义sink
    public static class SensorSink extends RichSinkFunction<String> {
        @Override
        public void invoke(String value, Context context) throws Exception {
            try {

                SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
                String fileName = formatter.format(new Date()) + ".log";
                File file = new File("/home/max/Logs/flink/flink-producer/"+fileName);

                // if file doesnt exists, then create it
                if (!file.exists()) {
                    file.createNewFile();
                }


                FileWriter fw = new FileWriter(file.getAbsoluteFile(), true);
                BufferedWriter bw = new BufferedWriter(fw);
                bw.write(value);
                bw.newLine();
                bw.close();

            } catch (IOException e) {
                e.printStackTrace();
            }

            super.invoke(value, context);
        }
    }
}
