package cn._51doit.flink.sql;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 *
 */
public class Kafka2StarRocks {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());

        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        //创建一个Source表（以后从哪里读取数据，从Kafka读取数据）
        //测试数据如下
        //laozhao,99.9
        tEnv.executeSql(
                "CREATE TABLE kafka_user (\n" +
                        "  `name` VARCHAR,\n" +
                        "  `score` DOUBLE" +
                        ") WITH (\n" +
                        "  'connector' = 'kafka',\n" +
                        "  'topic' = 'kafka-user',\n" +
                        "  'properties.bootstrap.servers' = 'node-1.51doit.cn:9092,node-2.51doit.cn:9092,node-3.51doit.cn:9092',\n" +
                        "  'properties.group.id' = 'test666',\n" +
                        "  'scan.startup.mode' = 'latest-offset',\n" +
                        "  'csv.ignore-parse-errors' = 'true', \n" +
                        "  'format' = 'csv'\n" +
                        ")"
        );


        tEnv.executeSql(
                "CREATE TABLE result_sink(" +
                        "  `name` VARCHAR," +
                        "  `score` DOUBLE" +
                        ") WITH ( " +
                        "  'connector' = 'starrocks'," +
                        "  'jdbc-url'='jdbc:mysql://node-1.51doit.cn:9030?characterEncoding=utf-8'," +
                        "  'load-url'='node-1.51doit.cn:8030'," +
                        "  'database-name' = 'doit'," +
                        "  'table-name' = 'tb_user'," +
                        "  'username' = 'root'," +
                        "  'password' = ''," +
                        "  'sink.buffer-flush.max-rows' = '1000000'," +
                        "  'sink.buffer-flush.max-bytes' = '300000000'," +
                        "  'sink.buffer-flush.interval-ms' = '30000'," +
                        "  'sink.properties.column_separator' = '\\x01'," +
                        "  'sink.properties.row_delimiter' = '\\x02'," +
                        "  'sink.max-retries' = '3'" +
                        ")"
        );
        //从视图中取数据插入到Sink表中
        TableResult tableResult = tEnv.executeSql("INSERT INTO result_sink SELECT name, score FROM kafka_user");

        tableResult.print();

        env.execute();

    }
}
