package com.iot.app.flink;

import com.iot.app.flink.Source.RichSource;
import com.iot.app.flink.sink.RichSink;
import com.iot.app.flink.vo.CassandraProp;
import com.iot.app.flink.vo.KafkaProp;
import com.iot.app.flink.processor.IoTDataProcessor;
import com.iot.app.flink.util.PropertyFileReader;
import com.iot.app.flink.vo.RedisProp;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.runtime.state.memory.MemoryStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.log4j.Logger;

import java.util.Properties;

/**
 * This class IotTrafficMonitoringApp for processing the IoT data.
 *
 * @author Ryo Yang
 *
 */
public class IotTrafficMonitoringApp {

    private static final Logger logger = Logger.getLogger(IotTrafficMonitoringApp.class);

    private static KafkaProp buildKafkaConfiguration() throws Exception {

        Properties properties = PropertyFileReader.readPropertyFile();

        //read flink and Cassandra properties and get Environment
        Integer parallel = new Integer(properties.getProperty("com.iot.app.kafka.partition"));
        String topic = properties.getProperty("com.iot.app.kafka.topic");
        String groupId = properties.getProperty("com.iot.app.kafka.group.id");
        String commit = properties.getProperty("com.iot.app.kafka.enable.auto.commit");
        String reset = properties.getProperty("com.iot.app.kafka.auto.offset.reset");
        String bootstrapServers = properties.getProperty("com.iot.app.kafka.bootstrap.servers");

        KafkaProp conf = new KafkaProp(parallel, topic, groupId, commit, reset, bootstrapServers);

        return conf;
    }

    private static StreamExecutionEnvironment buildExecutionContext() throws Exception {

        Properties properties = PropertyFileReader.readPropertyFile();

        StreamExecutionEnvironment env = null;

        if (properties.getProperty("com.iot.app.flink.Configuration").equals("debug")) {
            env = StreamExecutionEnvironment.createLocalEnvironment();
            /*
            *  save to memory
            * */
            env.setStateBackend(new MemoryStateBackend(false));

        }
        else {
            env = StreamExecutionEnvironment.getExecutionEnvironment();
            env.setStateBackend(new FsStateBackend(properties.getProperty("com.iot.app.flink.checkpoint.dir")));
        }


        Integer parallel = new Integer(properties.getProperty("com.iot.app.kafka.partition"));

        //注释，我们这儿其实需要设置state backed类型，我们要把checkpoint的数据存储到
        //rocksdb里面
        env.enableCheckpointing(10000);
        env.setParallelism(parallel);
        env.getCheckpointConfig().setCheckpointTimeout(30000);
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(50000);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
        env.getCheckpointConfig().enableExternalizedCheckpoints(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);



        //使用事件时间
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        //设置 watermark 产生的周期为 1000ms
        env.getConfig().setAutoWatermarkInterval(1000);


        //初始化cassandra的配置
        RichSink.initSink(new CassandraProp(properties.getProperty("com.iot.app.cassandra.host"),
                                            properties.getProperty("com.iot.app.cassandra.port")));


        //初始化redis的配置
        RichSource.initSource(new RedisProp(properties.getProperty("com.iot.app.redis.host"),
                                            Integer.valueOf(properties.getProperty("com.iot.app.redis.port")),
                                            Integer.valueOf(properties.getProperty("com.iot.app.redis.interval_ms"))));

        return env;

    }

    public static void main(String[] args) {
        try {
            IoTDataProcessor.process(buildExecutionContext(), buildKafkaConfiguration());
        } catch (Exception e) {
            logger.error(e);
        }
    }

}
