package com._51doit.flinksql;

import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * 需要添加flink和kafka的连接器
 *         <dependency>
 *             <groupId>org.apache.flink</groupId>
 *             <artifactId>flink-connector-kafka_${scala.binary.version}</artifactId>
 *             <version>${flink.version}</version>
 *         </dependency>
 * 创建一个名为kafka-csv的topic,并打开生产者
 * 先开启zookeeper,后开启kafka
 *
 * 大致流程:数据在kafka中输入,格式为CSV格式,读取之后,将数据写到来临时Source表中,然后对里面的数据进行操作.
 */
public class SQLKafkaTable {
    public static void main(String[] args) throws Exception{
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        //u001,i1000,view
        //创建一个Source表
        tEnv.executeSql(
                "CREATE TABLE KafkaTable (\n" +
                        "  `user_id` STRING,\n" +
                        "  `item_id` STRING,\n" +
                        "  `behavior` STRING,\n" +
                        "  `ts` TIMESTAMP(3) METADATA FROM 'timestamp'\n" +
                        ") WITH (\n" +
                        "  'connector' = 'kafka',\n" +
                        "  'topic' = 'kafka-csv',\n" + //指定读取kafka的topic
                        "  'properties.bootstrap.servers' = 'doit01:9092,doit02:9092,doit03:9092',\n" +
                        "  'properties.group.id' = 'testGroup',\n" +
                        "  'scan.startup.mode' = 'earliest-offset',\n" +
                        "  'format' = 'csv',\n" + //读取的数据格式
                        "  'csv.ignore-parse-errors' = 'true'\n" + //解析出错,不抛异常,将他过滤掉
                        ")"
        );

        Table table = tEnv.sqlQuery("SELECT * FROM KafkaTable "); // where ts >= 1635585660000

        DataStream<Row> appendStream = tEnv.toAppendStream(table, Row.class);

        appendStream.print();
        env.execute();
    }
}
