package com.bigdata.iotdb;

import org.apache.flink.shaded.guava18.com.google.common.collect.Lists;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.ParallelSourceFunction;
import org.apache.iotdb.flink.DefaultIoTSerializationSchema;
import org.apache.iotdb.flink.IoTDBOptions;
import org.apache.iotdb.flink.IoTDBSink;
import org.apache.iotdb.flink.IoTSerializationSchema;
import org.apache.iotdb.tsfile.file.metadata.enums.CompressionType;
import org.apache.iotdb.tsfile.file.metadata.enums.TSDataType;
import org.apache.iotdb.tsfile.file.metadata.enums.TSEncoding;

import java.util.HashMap;
import java.util.Map;
import java.util.Random;

/**
 * 利用flink框架 sink数据到iotdb
 */
public class SinkToIoTDB {

    private static String host;
    public static void main(String[] args) throws Exception{
        host=args[0];

        //流处理执行环境
        StreamExecutionEnvironment senv = StreamExecutionEnvironment.getExecutionEnvironment();
        IoTDBOptions options = new IoTDBOptions();
        options.setHost(host);
        options.setPort(6667);
        options.setUser("root");
        options.setPassword("root");
        options.setStorageGroup("root.wf");
        //定义一个DOUBLE类型的时间

        options.setTimeseriesOptionList(Lists.newArrayList(
                new IoTDBOptions.TimeseriesOption("root.wf.d1.s1", TSDataType.DOUBLE,
                        TSEncoding.RLE, CompressionType.SNAPPY),
                new IoTDBOptions.TimeseriesOption("root.wf.d1.s2", TSDataType.INT64,
                        TSEncoding.RLE,CompressionType.SNAPPY)));
        IoTSerializationSchema serializationSchema = new DefaultIoTSerializationSchema();
        IoTDBSink ioTDBSink = new IoTDBSink(options,serializationSchema);
        //enable batching
        //.withBatchSize(10);
        senv.addSource(new iotdbSource())
                .name("iotdb-source")
                .setParallelism(1)
                .addSink(ioTDBSink)
                .name("iotdb-sink")
                .setParallelism(1);

        senv.execute("SinkToIoTDB");

    }

    private static class iotdbSource implements ParallelSourceFunction<Map<String,String>> {
        boolean isRunning = true;

        @Override
        public void run(SourceContext<Map<String, String>> ctx) throws Exception {
            Random random = new Random();
            while(isRunning){
                Map<String,String> tuple = new HashMap<>();
                tuple.put("device", "root.wf.d1");
                tuple.put("timestamp", String.valueOf(System.currentTimeMillis()));
                tuple.put("measurements", "s1");
                tuple.put("types", "DOUBLE");
                tuple.put("values", String.valueOf(random.nextDouble()));
                Map<String,String> tuple1 = new HashMap<>();
                tuple1.put("device", "root.wf.d1");
                tuple1.put("timestamp", String.valueOf(System.currentTimeMillis()));
                tuple1.put("measurements", "s2");
                tuple1.put("types", "INT64");
                tuple1.put("values", String.valueOf(random.nextLong()));
                ctx.collect(tuple);
                ctx.collect(tuple1);
                Thread.sleep(1000);
            }
        }

        @Override
        public void cancel() {
            isRunning = false;
        }
    }
}
