package cn.doitedu.sql;

import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import java.util.Map;

public class Demo16_TableToStream {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);


        //
        tenv.executeSql(
                "create table user_events_kafka(  \n" +
                        "     uid bigint,     \n" +
                        "     event_id string,\n" +
                        "     properties map<string,string>,\n" +
                        "     action_time bigint\n" +
                        ") with (\n" +
                        "  'connector' = 'kafka',\n" +
                        "  'topic' = 'tpc-a',\n" +
                        "  'properties.bootstrap.servers' = 'doitedu:9092',\n" +
                        "  'properties.group.id' = 'doit47-g2',\n" +
                        "  'scan.startup.mode' = 'latest-offset',\n" +
                        "  'value.format' = 'json',\n" +
                        "  'value.fields-include' = 'EXCEPT_KEY'\n" +
                        ")");


        // 先把sql名字的表，转成table对象
        Table table = tenv.from("user_events_kafka");
        // 再把table转成流
        DataStream<Row> dataStream = tenv.toDataStream(table);

        SingleOutputStreamOperator<UserEvent> eventStream = dataStream.map(row -> {
            Long uid = row.getFieldAs("uid");
            Long actionTime = row.getFieldAs("action_time");
            String eventId = row.getFieldAs("event_id");
            Map<String, String> properties = row.getFieldAs("properties");
            return new UserEvent(uid, eventId, actionTime, properties);
        });


        eventStream.print();

        env.execute();
    }


    @Data
    @NoArgsConstructor
    @AllArgsConstructor
    public static class UserEvent{
        private Long uid;
        private String event_id;
        private Long action_time;
        private Map<String,String> properties;
    }

}
