package cn.doitedu.demo.stream_table;

import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;

public class appendonly表转appendonly流 {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(2000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");
        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 建表，映射kafka中的ss-1
        tenv.executeSql(
                "CREATE TABLE t1                  (        \n" +
                "   uid BIGINT                                \n" +
                "  ,event_id STRING                           \n" +
                "  ,action_time BIGINT                        \n" +
                ") WITH (                                     \n" +
                "  'connector' = 'kafka',                     \n" +
                "  'topic' = 'a-1',                           \n" +
                "  'properties.bootstrap.servers' = 'doitedu:9092',\n" +
                "  'properties.group.id' = 'doit44_g1',      \n" +
                "  'scan.startup.mode' = 'latest-offset',    \n" +
                "  'value.format' = 'json',                  \n" +
                "  'value.fields-include' = 'EXCEPT_KEY'     \n" +
                ")");

        tenv.executeSql("create temporary view t2 as  select * from t1 where event_id ='page_load' ");

        // 把查询结果t2转成流
        // 先将sql中的视图名、表名，转成TableApi中的Table对象
        Table tableT2 = tenv.from("t2");
        // 将table对象转成流
        DataStream<Row> dataStream = tenv.toDataStream(tableT2);
        // 将table对象转成流:如果在转换时传入了自定义类型，则转换出来的流中的数据会被自动封装成指定的JavaBean
        DataStream<UserEvent> dataStream1 = tenv.toDataStream(tableT2, UserEvent.class);

        SingleOutputStreamOperator<String> resultStream
                = dataStream.process(new ProcessFunction<Row, String>() {
            @Override
            public void processElement(Row row, ProcessFunction<Row, String>.Context context, Collector<String> collector) throws Exception {

                // 字段序号从0开始
                Long uid = row.getFieldAs(0);
                String eventId = row.getFieldAs("event_id");
                Long action_time = row.getFieldAs("action_time");

                collector.collect(uid + "," + eventId + "," + action_time);
            }
        });
        resultStream.print();
        env.execute();
    }

    @Data
    @AllArgsConstructor
    @NoArgsConstructor
    public static class UserEvent{
        private Long uid;
        private String event_id;
        private Long action_time;
    }

}
