package com.atguigu.flink;

import com.streamxhub.streamx.flink.core.StreamEnvConfig;
import com.streamxhub.streamx.flink.core.java.function.SQLFromFunction;
import com.streamxhub.streamx.flink.core.java.sink.JdbcSink;
import com.streamxhub.streamx.flink.core.java.source.KafkaSource;
import com.streamxhub.streamx.flink.core.scala.StreamingContext;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;

/**
 * @Author lzc
 * @Date 2022/3/30 15:08
 */
public class StreamXJdbc {
    public static void main(String[] args) {
        // 传入main函数的参数, main函数参数中application.yml的地址
        StreamEnvConfig config = new StreamEnvConfig(args, null);
        StreamingContext ctx = new StreamingContext(config);
        // 从Kafka读数据, 写入到Mysql中
        SingleOutputStreamOperator<WaterSensor> stream = new KafkaSource<String>(ctx)
            .alias("k1")
            .getDataStream()
            .map(record -> {
                String[] data = record.value().split(",");
                return new WaterSensor(data[0], Long.valueOf(data[1]), Integer.valueOf(data[2]));
            });
        
        
        new JdbcSink<WaterSensor>(ctx)
            .sql(new SQLFromFunction<WaterSensor>() {
                @Override
                public String from(WaterSensor bean) {
                    // 返回一个可以向mysql插入数据的sql语句
                    // insert into sensor(id, ts, vc)values(?,?,?)
                    return String
                        .format("insert into sensor(id, ts, vc)values('%s',%d,%d)",
                                bean.getId(), bean.getTs(), bean.getVc()
                        );
                }
            })
            .sink(stream);
        
        
        ctx.start();
    }
}
