package cn.itcast.flink.sink.connector;

import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * 读取 kafka 中的数据并将其写入到 mysql 中
 * 创建了两张表 ：映射的 kafka 的数据，映射的 mysql 的数据，映射。
 */
public class MysqlSinkTest {
    public static void main(String[] args) {
        // **********************
        // 01.定义Blink执行环境
        // **********************
        StreamExecutionEnvironment bsEnv = StreamExecutionEnvironment.getExecutionEnvironment();
        EnvironmentSettings bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        StreamTableEnvironment bsTableEnv = StreamTableEnvironment.create(bsEnv, bsSettings);
        // or val bsTableEnv = TableEnvironment.create(bsSettings)
        // 02.定义kafka输入表
        //创建 topic : orderjson
        String souce_table =
                "create table kafkaInputTable (" +
                " id varchar," +
                " `timestamp` varchar," +
                " category varchar," +
                " areaName varchar," +
                " money double" +
                " ) with (" +
                " 'connector' = 'kafka'," +
                " 'topic' = 'orderjsons'," +
                " 'properties.bootstrap.servers'='node1:9092,node2:9092,node3:9092'," +
                " 'scan.startup.mode' = 'earliest-offset'," +
                " 'properties.group.id' = '_consumer_order_'," +
                " 'format' = 'json'" +
                " )";
        bsTableEnv.executeSql(souce_table);

        Table table = bsTableEnv.from("kafkaInputTable");
        // 03.将kafka数据转换成流打印
        DataStream<Row> value  = bsTableEnv.toAppendStream(table, Row.class);
        value.print();
        // 04.定义mysql输出表
        String sink_table = "create table order_test (" +
                "                  id varchar," +
                "                  `timestamp` varchar," +
                "                  category varchar," +
                "                  areaName varchar," +
                "                  money double" +
                ") with (" +
                "  'connector' = 'jdbc'," +
                "  'url' = 'jdbc:mysql://node1:3306/test?characterEncoding=utf-8&useSSL=false'," +
                "  'table-name' = 'order_test'," +
                "  'driver'='com.mysql.jdbc.Driver'," +
                "  'username' = 'root'," +
                "  'password' = '123456'," +
                "  'sink.buffer-flush.interval'='1s'," +
                "  'sink.buffer-flush.max-rows'='1'," +
                "  'sink.max-retries' = '5'" +
                ")";
        // 05.查询kafka table并插入mysql table
        String insert = "INSERT INTO order_test SELECT  * FROM kafkaInputTable";

        bsTableEnv.executeSql(sink_table);
        bsTableEnv.executeSql(insert);

        try {
            bsEnv.execute();
        } catch (Exception exception) {
            exception.printStackTrace();
        }
    }
}