package com.aurora;


import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.Random;
import java.util.UUID;

/**
 * @author 浅夏的猫
 * @description 主启动类
 * @date 22:46 2024/1/13
 */
public class Application {

    private static final Logger logger = LoggerFactory.getLogger(Application.class);

    public static void main(String[] args) throws Exception {

        //1.创建flink程序运行所需环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //2.创建数据集
        ArrayList<String> list = new ArrayList<>();
        list.add("001");
        list.add("002");
        list.add("003");

        //3.把有限数据集转换为数据源
        DataStreamSource<String> dataStreamSource = env.fromCollection(list).setParallelism(1);

        //4.简单通过flatmap处理数据，
        SingleOutputStreamOperator<String> flatMap = dataStreamSource.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String record, Collector<String> collector) throws Exception {
                //数据追加随机数
                String uuidRecord=record+ UUID.randomUUID().toString();
                //当前环节处理完需要传递数据给下个环节
                collector.collect(uuidRecord);
            }
        });

        //5.输出最终结果
        flatMap.addSink(new SinkFunction<String>() {
            @Override
            public void invoke(String value) throws Exception {
                logger.info("当前正在处理的数据:{}",value);
            }
        }).setParallelism(1);

        //6.启动任务
        env.execute();
    }
}
