package com.itcast.flink.connectors.jdbc;

import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.ArrayList;
import java.util.List;

/**
 * @program: flink-app
 * @description: 将集合数据写入数据库中
 * @author: zhanghz001
 * @create: 2021-07-23 08:52
 **/
public class ZhzJdbcConnectorsApplication {
    public static void main(String[] args) throws Exception {
        //    创建运行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //创建集合数据
        List<String> arrs = new ArrayList<>();
        arrs.add("10.10.20.107\t1603847899847\tview\t10005");
        arrs.add("10.10.20.104\t1603847900847\tview\t10002");
        //读取集合数据,写入数据库
        String sql = "insert into t_access_log ( ip, time, type, api)\n" +
                "values (?,?,?,?)";
        env.fromCollection(arrs).addSink(JdbcSink.sink(sql,
                (ps, value) ->
                {
                    System.out.println("receive==> " + value);
                    String[] arrValue = String.valueOf(value).split("\t");
                    for (int i = 0; i < arrValue.length; i++) {
                        //新增数据
                        ps.setString(i + 1, arrValue[i]);
                    }
                },
                //jdbc 链接配置
                new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
                        .withUrl("jdbc:mysql://192.168.23.140:3306/flink?useSSL=false")
                        .withDriverName("com.mysql.jdbc.Driver")
                        .withUsername("root")
                        .withPassword("654321")
                        .build()));
        
        //    执行任务
        env.execute("flink jdbc sink job");
    }
}
