package com.yc.bigdata.flink.demo;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

/**
 * <p></p>
 *
 * @author: YuanChilde
 * @date: 2020-02-10 9:33
 * @version: 1.0
 * Modification History:
 * Date    Author      Version     Description
 * -----------------------------------------------------------------
 * 2020-02-10 9:33    YuanChilde     1.0        新增
 */
public class DataStreamToRedis {

    public static void main(String[] args) throws Exception {

        String[] args1 = {"--input-topic",""};
        final ParameterTool params = ParameterTool.fromArgs(args1);
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        if (!params.has("bootstrap.servers")) {
            System.out.println("Use --input-topic to specify file input.");
            System.out.println("Use --bootstrap.servers to specify file input.");
            System.out.println("Use --bootstrap.servers to specify file input.");
        }

        params.getProperties().put("group.id", "flink-group");
        env.getConfig().setGlobalJobParameters(params);
        // 设置检查点
        env.enableCheckpointing(5000);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        FlinkKafkaConsumer consumer = new FlinkKafkaConsumer<>(params.get("input-topic"), new SimpleStringSchema(), params.getProperties());
        consumer.assignTimestampsAndWatermarks(new MessageWaterEmitter());


/*        FlinkJedisPoolConfig conf = new FlinkJedisPoolConfig.Builder().setHost("127.0.0.1").setPort(6379).setPassword("kMn9tdlTnB8dQsMw").build();
        counts.addSink(new RedisSink<Tuple2<String, Integer>>(conf,new RedisExampleMapper()));*/

    }

/*    public static class RedisExampleMapper implements RedisMapper<Tuple2<String, String>>{

        @Override
        public RedisCommandDescription getCommandDescription() {
            return new RedisCommandDescription(RedisCommand.HSET, "HASH_NAME");
        }

        @Override
        public String getKeyFromData(Tuple2<String, String> data) {
            return data.f0;
        }

        @Override
        public String getValueFromData(Tuple2<String, String> data) {
            return data.f1;
        }
    }*/

}
