package com.zhang;

import com.streamxhub.streamx.flink.connector.function.TransformFunction;
import com.streamxhub.streamx.flink.connector.jdbc.sink.JdbcJavaSink;
import com.streamxhub.streamx.flink.connector.kafka.bean.KafkaRecord;
import com.streamxhub.streamx.flink.connector.kafka.source.KafkaJavaSource;
import com.streamxhub.streamx.flink.core.StreamEnvConfig;
import com.streamxhub.streamx.flink.core.scala.StreamingContext;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;

/**
 * @title:
 * @author: zhangyifan
 * @date: 2022/6/25 20:50
 */
public class StreamXKafkaToMysql {
    public static void main(String[] args) {
        StreamEnvConfig config = new StreamEnvConfig(args, null);
        StreamingContext context = new StreamingContext(config);

        // 读取kafka数据
        SingleOutputStreamOperator<User> kafkaDS = new KafkaJavaSource<String>(context)
                .getDataStream()
                .map(new MapFunction<KafkaRecord<String>, User>() {
                    @Override
                    public User map(KafkaRecord<String> data) throws Exception {
                        String[] fields = data.value().split(",");
                        return new User(
                                Integer.parseInt(fields[0]),
                                fields[1],
                                Double.parseDouble(fields[2])
                        );
                    }
                });
        kafkaDS.print();

        new JdbcJavaSink<User>(context)
                .sql(new TransformFunction<User, String>() {
                    @Override
                    public String transform(User user) {
                        return user.toSql();
                    }
                })
                .sink(kafkaDS);

        context.start();
    }
}
