package com.zhengbo.flink.demo.mq.cardata;

import com.zhengbo.flink.demo.base.kafka.KafkaConfig;
import com.zhengbo.flink.demo.mq.bean.CarStatus;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;

import java.util.Arrays;
import java.util.List;

/** 车辆数据任务启动 相当于storm中的topology
 * @author zhengbo
 * @date 2019/8/15
 */
public class CarDataRecieve {

    public static void main(String[] args) throws Exception {

        final StreamExecutionEnvironment executionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment();

        executionEnvironment.setParallelism(4);

        String brokerList = "10.0.10.21:15386,10.0.10.22:15386,10.0.10.23:15386";

        List<String> topicList = Arrays.asList("NEU_CAR");

        //创建kafka消息数据源
        FlinkKafkaConsumer011<String> msgDataSource = new FlinkKafkaConsumer011<>(topicList,
                new SimpleStringSchema(), KafkaConfig.kafkaConsumerProperties(brokerList,
                "flink_test_group"));

        msgDataSource.setStartFromGroupOffsets();
        //添加数据源至运行环境
        DataStreamSource<String> dataStreamSource = executionEnvironment.addSource(msgDataSource);

        dataStreamSource
                .flatMap(new CarDataMapFunction())
                //司机号分区
                .keyBy((KeySelector<CarStatus, Object>) CarStatus::getDriverNo)
                .addSink(new CarDataSinkFunction()).setParallelism(10);

        executionEnvironment.execute("CarDataRecieve job");
    }
}
