package com.wang.streamx;

import com.streamxhub.streamx.flink.core.StreamEnvConfig;
import com.streamxhub.streamx.flink.core.java.function.SQLFromFunction;
import com.streamxhub.streamx.flink.core.java.function.StreamEnvConfigFunction;
import com.streamxhub.streamx.flink.core.java.sink.JdbcSink;
import com.streamxhub.streamx.flink.core.java.source.KafkaSource;
import com.streamxhub.streamx.flink.core.scala.StreamingContext;
import com.streamxhub.streamx.flink.core.scala.source.KafkaRecord;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;

public class StreamMySqlSinkDemo {


    public static void main(String[] args) {


        StreamEnvConfig envConfig = new StreamEnvConfig(args, null);
        StreamingContext context = new StreamingContext(envConfig);

        SingleOutputStreamOperator<WaterSensor> source =
                new KafkaSource<String>(context)
                        .alias("kafka1")
                        .getDataStream()
                        .map(new MapFunction<KafkaRecord<String>, WaterSensor>() {
                            @Override
                            public WaterSensor map(KafkaRecord<String> stringKafkaRecord) throws Exception {

                                String[] data = stringKafkaRecord.value().split(",");
                                return new WaterSensor(data[0], Long.valueOf(data[1]), Integer.valueOf(data[2]));

                            }
                        });

        new JdbcSink<WaterSensor>(context)
                .sql(new SQLFromFunction<WaterSensor>() {
                    @Override
                    public String from(WaterSensor waterSensor) {
                        return waterSensor.toSql();
                    }
                })
                .sink(source);

        context.start();

    }
}


















