package com.atguigu.day04;

import com.atguigu.bean.Event;
import com.mysql.jdbc.Driver;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;

import java.sql.PreparedStatement;
import java.sql.SQLException;

public class Flink03_Sink_Mysql {
    public static void main(String[] args) throws Exception {
        //1.获取流的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        //2.获取数据
        DataStreamSource<String> streamSource = env.socketTextStream("localhost", 9999);
//        DataStreamSource<Event> streamSource = env.fromElements(new Event("Songsong", "./home", 1000L),
//                new Event("Bingbing", "./cart", 2000L)
//        );

        SingleOutputStreamOperator<Event> map = streamSource.map(new MapFunction<String, Event>() {
            @Override
            public Event map(String value) throws Exception {
                String[] split = value.split(",");
                return new Event(split[0], split[1], Long.parseLong(split[2]));
            }
        });

        //TODO 3.将数据写入Mysql

        SinkFunction<Event> sink = JdbcSink.sink(
                "insert into clicks values(?,?)",
                new JdbcStatementBuilder<Event>() {
                    @Override
                    public void accept(PreparedStatement preparedStatement, Event event) throws SQLException {
                        preparedStatement.setString(1, event.user);
                        preparedStatement.setString(2, event.url);
                    }
                },
                JdbcExecutionOptions.builder()
                        //指的是攒够指定条数据的数据写一次，如果是有界流即使没有达到这个阈值，在程序跑完的时候也会写入，
                        //但是如果是无界流，那么就要等到数据条数达到这个阈值才写，这里设置为1是为了上课演示方便，在生产中
                        //这个参数不要设置为1 否则会增加IO压力
                        .withBatchSize(1)
                        .build(),
                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                        .withUrl("jdbc:mysql://hadoop102:3306/test")
                        .withUsername("root")
                        .withPassword("123456")
                        .withDriverName(Driver.class.getName())
                        .build()
        );
        map.addSink(sink);
        env.execute();
    }
}
