package com.allen.flink.batch.sql;

import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo;
import org.apache.flink.api.scala.typeutils.Types;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Json;
import org.apache.flink.table.descriptors.Kafka;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.types.Row;

/**
 * 功能: kafka json 数据源
 *
 * @date: 2020-03-25 16:20
 * @author: Allen
 * @version: 0.0.4-snapshot
 * @Email: allenZyhang@163.com
 * @since: JDK 1.8
 **/
public class KafkaAndJsonSource {
    
    public static void main(String[] args) throws Exception {
        //获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        
        //创建一个TableEnvironment
        StreamTableEnvironment streamTabEnv = StreamTableEnvironment.create(env);
        
        streamTabEnv.connect(new Kafka().version("2.11").topic("test").startFromLatest().property("group.id", "group1").property("bootstrap.servers", "master:9092")).withFormat(
            new Json().failOnMissingField(false).deriveSchema()).
                        withSchema(new Schema().field("userId", Types.LONG()).field("day", Types.STRING()).field("beginTime", Types.LONG()).field("endTime", Types.LONG())
                                               .field("data", ObjectArrayTypeInfo.getInfoFor(Row[].class,
                                                   Types.ROW(new String[] {"package", "activeTime"}, new TypeInformation[] {Types.STRING(), Types.LONG()})))).inAppendMode()
                    .registerTableSource("user_log");
        
        
        
        Table result = streamTabEnv.sqlQuery("select userId from user_log");
        
        DataStream<Row> rowDs = streamTabEnv.toAppendStream(result, Row.class);
        rowDs.print();
        
        env.execute(" kafka and json");
    }
}
