package com.qf.flinketl.job;

import com.qf.flinketl.mapper.MessageRow;
import com.qf.flinketl.mapper.UserMapper;
import com.qf.flinketl.sink.CustomSink;
import com.qf.flinketl.source.KafkaCommonSource;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @Description: @Author: KyrieFc @Date: 2023/10/22
 */
public class FlinkJob {
  public static void main(String[] args) {
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    CustomSink customSink = new CustomSink();
    DataStream<MessageRow> sourceStream =
        env.addSource(new KafkaCommonSource())
            .name("kafka-source")
            .setParallelism(16)
            .flatMap(new UserMapper())
            .name("user-mapper")
            .setParallelism(100);
    sourceStream.addSink(new CustomSink()).setParallelism(100).name("redis-sink");
    try {
      env.execute("flink-etl");
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
}
