package com.taimanetworks.kafka;


import com.taimanetworks.kafka.parseTsp.GpsData;
import com.taimanetworks.kafka.parseTsp.Sourcedata;
import com.taimanetworks.kafka.parseTsp.StructureBeanUtil;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.text.SimpleDateFormat;
import java.util.Objects;
import java.util.Random;

public class Kafka2Redis extends Kafka2TableApp {
    private static final String HADOOP_USER_NAME = "hadoop";
    private static final String CHECKPOINT_NAME = "kafka2hive";
    private static final String KAFKA_SERVERS = "172.22.2.80:9092,172.22.2.81:9092,172.22.2.82:9092";
    private static final String KAFKA_TOPICS = "gac";
    private static final String KAFKA_GROUP_ID = "kafka2redis";
    private static final String CHECKPOINT_STORAGE = "hdfs://myha01/flink/redispro/checkpoint/";


    @Override
    public void handler(StreamTableEnvironment tableEnv, DataStreamSource<String> sourceStream, StreamExecutionEnvironment env) throws Exception {
        System.out.println("from rocketmq... sink to clickhouse...");
        sink2clickhouse(tableEnv, sourceStream, env);
    }

    private void sink2clickhouse(StreamTableEnvironment tableEnv, DataStreamSource<String> sourceStream, StreamExecutionEnvironment env) throws Exception {
        System.out.println("save2redis...");

        System.out.println("解析字段,封装样例类...");
        // 解析字段,封装样例类
        SingleOutputStreamOperator<GpsData> resultBean = sourceStream
                .map(StructureBeanUtil::getArea)
                .filter(Objects::nonNull);


/*        //数据保存到redis:
        // 定义jedis连接配置
        FlinkJedisPoolConfig config = new FlinkJedisPoolConfig.Builder()
                .setHost("172.22.2.112")
                .setPort(6381)
                // 设置最大空闲连接数，默认为8
                .setMaxIdle(10)
                .setMaxTotal(100)
                .setTimeout(3000)
                .setPassword("Tima@!QAZ")
                .build();*/

        resultBean.addSink( new RedisSinkFunction());

        System.out.println("数据插入redis。。。");
        env.execute("redis sink");

    }

    public static void main(String[] args) throws Exception {
        Kafka2Redis app = new Kafka2Redis();
        ParameterTool parameterTool = ParameterTool.fromArgs(args);
        String servers = parameterTool.get("servers", KAFKA_SERVERS);
        String topic = parameterTool.get("topic", KAFKA_TOPICS);
        String group = parameterTool.get("group", KAFKA_GROUP_ID);
        String checkpointStorage = parameterTool.get("checkpointStorage", CHECKPOINT_STORAGE);
        String checkpointName = parameterTool.get("checkpointName", CHECKPOINT_NAME);
        String hadoopUser = parameterTool.get("hadoopUser", HADOOP_USER_NAME);
        app.initAndStart(servers, topic, group, checkpointStorage, checkpointName, hadoopUser);

    }

    public static String getRandomString(int length) {
        String str = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
        Random random = new Random();
        StringBuffer sb = new StringBuffer();
        for (int i = 0; i < length; i++) {
            int number = random.nextInt(62);
            sb.append(str.charAt(number));
        }
        return sb.toString();
    }
}
