package com.zhang;

import com.streamxhub.streamx.flink.connector.kafka.bean.KafkaRecord;
import com.streamxhub.streamx.flink.connector.kafka.source.KafkaJavaSource;
import com.streamxhub.streamx.flink.core.StreamEnvConfig;
import com.streamxhub.streamx.flink.core.scala.StreamingContext;
import org.apache.flink.api.common.functions.MapFunction;

/**
 * @title:
 * @author: zhangyifan
 * @date: 2022/6/25 18:18
 */
public class Demo02 {
    public static void main(String[] args) {
        StreamEnvConfig envConfig = new StreamEnvConfig(args, null);

        StreamingContext context = new StreamingContext(envConfig);

        new KafkaJavaSource<String>(context)
                .getDataStream()
                .setParallelism(1)
                .map(new MapFunction<KafkaRecord<String>, String>() {
                    @Override
                    public String map(KafkaRecord<String> value) throws Exception {

                        return value.value();
                    }
                })
                .print();


        context.start();
    }
}
