package com.uptown.matrix.stream;

import java.util.List;
import java.util.Map;

import com.alibaba.fastjson.JSONObject;
import com.uptown.matrix.common.task.JobMeta;
import com.uptown.matrix.common.task.KafkaReaderMeta;
import com.uptown.matrix.stream.builder.RedisTaskBuilder;
import com.uptown.matrix.stream.builder.kafkaTaskBuilder;
import com.uptown.matrix.stream.sink.MatrixStreamSink;
import io.lettuce.core.RedisClient;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.util.Collector;

/**
 *  flink应用启动类
 *  解析启动参数 java -jar *.jar 后跟任务配置
 *
 *  {
 *  "name": "flink_test",
 * 	"reader": {
 * 		"topic": "uptown_topic",
 * 		"boot_server": "192.168.1.51:9092",
 * 		"group_id": "uptown_group"
 *        },
 * 	"writer": {
 * 		"host": "192.168.1.167",
 * 		"key": "uptown",
 * 		"port": "6679",
 * 		"password": "h1iz6i2dp4redis"
 *    }
 * }
 *
 */
@Slf4j
public class StreamServerApp {

    public static void main(String[] args) throws Exception {

        // 1、解析任务配置
        String jobMetaStr = args[0];
        JobMeta jobMeta = JSONObject.parseObject(jobMetaStr, JobMeta.class);

        // 2、构造flink环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(1000);

        // 2.1、构造kafka source
        KafkaSource<String> kafkaReaderSource = kafkaTaskBuilder.buildKafkaSource(jobMeta.getReader());
        DataStreamSource<String> kafkaSource = env.fromSource(kafkaReaderSource, WatermarkStrategy.noWatermarks(), "kafka-reader");

        SingleOutputStreamOperator<Map> dataStream = kafkaSource.flatMap(new FlatMapFunction<String, Map>() {
            @Override
            public void flatMap(String value, Collector<Map> out) throws Exception {
                out.collect(JSONObject.parseObject(value, Map.class));
            }
        });

        // 2.3、构造redis sink
        RedisClient redisClient = RedisTaskBuilder.buildRedisSink(jobMeta.getWriter());
        dataStream.addSink(new MatrixStreamSink(redisClient)).name("reader-writer");

        // 3、执行
        env.execute(jobMeta.getName());
    }
}
