package cn.itcast.flink.sink;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

import java.sql.PreparedStatement;
import java.sql.SQLException;

/**
 * Author itcast
 * Date 2022/1/13 11:27
 * 从socket数据源来获取数据，单词统计，将单词统计的结果写入到 mysql
 * CREATE TABLE `t_wordcount` (
 *                                `word` varchar(255) NOT NULL,
 *                                `counts` int(11) DEFAULT '0',
 *                                PRIMARY KEY (`word`)
 * ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
 */
public class SinkJdbcDemo {
    public static void main(String[] args) throws Exception {
        //数据表创建好

        //获取流环境，wordcount
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        DataStreamSource<String> source = env.socketTextStream(
                "node1",
                9999
        );
        SingleOutputStreamOperator<Tuple2<String, Integer>> result = source.flatMap(
                new FlatMapFunction<String, Tuple2<String, Integer>>() {
                    @Override
                    public void flatMap(String value, Collector<Tuple2<String, Integer>> out) throws Exception {
                        String[] words = value.split(" ");
                        for (String word : words) {
                            out.collect(Tuple2.of(word, 1));
                        }
                    }
                }
        ).keyBy(t -> t.f0)
                .sum(1);
        result.print();
        //将wordcount的结果集保存到mysql，通过 connector 保存进去
        result.addSink(
                JdbcSink.sink(
                        "insert into t_wordcount(word,counts) values (?,?) on duplicate key update counts=?",
                        new JdbcStatementBuilder<Tuple2<String, Integer>>(){
                            @Override
                            public void accept(PreparedStatement ps, Tuple2<String, Integer> v) throws SQLException {
                                ps.setString(1,v.f0);
                                ps.setInt(2,v.f1);
                                ps.setInt(3,v.f1);
                            }
                        },
                        new JdbcExecutionOptions.Builder()
                        .withMaxRetries(3)
                        .withBatchSize(1)
                        .withBatchIntervalMs(1000)
                        .build()
                        ,
                       new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                        .withDriverName("com.mysql.jdbc.Driver")
                        .withUrl("jdbc:mysql://node1:3306/flink?useSSL=false")
                        .withUsername("root")
                        .withPassword("123456")
                        .build()
                )
        );
        //使用 JdbcSink
        env.execute();
    }
}
