package com.atuigu.streampark;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.streampark.flink.connector.kafka.bean.KafkaRecord;
import org.apache.streampark.flink.connector.kafka.source.KafkaJavaSource;
import org.apache.streampark.flink.core.StreamEnvConfig;
import org.apache.streampark.flink.core.StreamEnvConfigFunction;
import org.apache.streampark.flink.core.scala.StreamingContext;

/**
 * @Author lzc
 * @Date 2023/2/28 14:09
 */
public class KafkaDemo {
    public static void main(String[] args) {
        // 配置
        StreamEnvConfig javaConfig = new StreamEnvConfig(args, new StreamEnvConfigFunction() {
            @Override
            public void configuration(StreamExecutionEnvironment env,
                                      ParameterTool parameterTool) {
                
            }
        });
        // 创建StreamingContext对象, 是一个核心类
        StreamingContext ctx = new StreamingContext(javaConfig);
    
        SingleOutputStreamOperator<String> stream = new KafkaJavaSource<String>(ctx)
            .getDataStream()
            .map(new MapFunction<KafkaRecord<String>, String>() {
                @Override
                public String map(KafkaRecord<String> value) throws Exception {
                    return value.value();
                }
            });
    
        stream.print();
        
        // 启动任务
        ctx.start();
    }
}
