package com.jxstjh.conch.stream;

import com.jxstjh.conch.common.domain.AnchorInfo;
import com.jxstjh.conch.stream.constants.ConchStreamConstant;
import com.jxstjh.conch.stream.function.ConchFlatMapFunction;
import com.jxstjh.conch.stream.sink.ConchSink;
import com.jxstjh.conch.stream.utils.MessageQueueUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

@Slf4j
public class ConchStreamApplication {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment streamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();

        KafkaSource<String> kafkaConsumer = MessageQueueUtil.getKafkaConsumer(ConchStreamConstant.TOPIC_NAME, ConchStreamConstant.GROUP_ID, ConchStreamConstant.BROKER);
        DataStreamSource<String> kafkaSource = streamExecutionEnvironment.fromSource(kafkaConsumer, WatermarkStrategy.noWatermarks(), ConchStreamConstant.SOURCE_NAME);
        SingleOutputStreamOperator<AnchorInfo> stream = kafkaSource.flatMap(new ConchFlatMapFunction()).name(ConchStreamConstant.FUNCTION_NAME);
        // 实时多维数据写入Redis,  离线数据写入Hive
        stream.addSink(new ConchSink()).name(ConchStreamConstant.SINK_NAME);
        streamExecutionEnvironment.execute(ConchStreamConstant.JOB_NAME);
    }
}