package cn._51doit.flink.day12;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * 从Kafka中读取数据，获取kafka中的元数据信息，然后将字段拼接
 */
public class KafkaConnectorMateDemo1 {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        env.enableCheckpointing(50000);


        //使用KafkaConnector从Kafka中读取数据
        tEnv.executeSql(
                "CREATE TABLE tb_user_order (\n" +
                        "  `oid` STRING,  \n" +
                        "  `cid` STRING,  \n" +
                        "  `money` DOUBLE,\n" +
                        "  `ts` TIMESTAMP(3) METADATA FROM 'timestamp' ,\n" +
                        "  `topic` STRING METADATA VIRTUAL, \n" +
                        "  `partition` BIGINT METADATA VIRTUAL, \n" +
                        "  `offset` BIGINT METADATA VIRTUAL \n" +
                        ") WITH (\n" +
                        "  'connector' = 'kafka',\n" +
                        "  'topic' = 'order-json',\n" +
                        "  'properties.bootstrap.servers' = 'node-1.51doit.cn:9092,node-2.51doit.cn:9092,node-3.51doit.cn:9092',\n" +
                        "  'properties.group.id' = 'testGroup',\n" +
                        "  'scan.startup.mode' = 'earliest-offset',\n" +
                        "  'format' = 'json', \n" +
                        "  'json.ignore-parse-errors' = 'true'" +
                        ")"
        );


        //使用sql方式定义sink
        tEnv.executeSql(
                "CREATE TABLE tb_print_table (\n" +
                        "  `id`  STRING,  \n" +
                        "  `oid` STRING,  \n" +
                        "  `cid` STRING,  \n" +
                        "  `money` DOUBLE \n" +
                        ") WITH (\n" +
                        "  'connector' = 'print'\n" +
                        ")"
        );

        tEnv.executeSql("INSERT INTO tb_print_table SELECT concat_ws('-', topic, CAST(`partition` AS VARCHAR), CAST(`offset` AS VARCHAR), DATE_FORMAT(ts, 'yyyyMMdd')) id, oid, cid, money FROM tb_user_order");
    }
}
