package cn.itcast.flink.sink.connector;

import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.java.io.TextInputFormat;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.FileProcessingMode;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import static org.apache.flink.table.api.Expressions.$;

/**
 * 2022/1/18 11:23
 */
public class OrderSinkFsDemo {
    public static void main(String[] args) throws Exception {
        //todo 创建流执行环境，设置并行度
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //todo 设置环境设置为blink计划器并且是流模式
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .inStreamingMode()
                .useBlinkPlanner()
                .build();
        //todo 设置重启策略
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 3000));
        //todo 从 order.csv 读取数据源按流读取，每20s读一次
        DataStreamSource<String> source = env.readFile(new TextInputFormat(null),
                "D:\\project\\flinkbase26\\data\\order.csv",
                FileProcessingMode.PROCESS_ONCE,
                20000L
        );
        //todo 将字符串map转换切分转换成 OrderInfo 对象
        SingleOutputStreamOperator<OrderInfo> mapStream = source.map(
                line -> {
                    String[] arrs = line.split(",");
                    return new OrderInfo(
                            arrs[0],
                            Long.parseLong(arrs[1]),
                            arrs[2],
                            Double.parseDouble(arrs[3]),
                            arrs[4]
                    );
                }
        ).returns(OrderInfo.class);
        //todo 创建流表环境
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, settings);
        //todo 将数据流转换成 Table
        Table table = tEnv.fromDataStream(mapStream,
                $("uid"),
                $("tms"),
                $("category"),
                $("price"),
                $("areaName")
        );
        //todo DSL语法筛选filter出北京的所有字段数据
        Table result1 = table.where($("areaName").isEqual("北京"))
                .select(
                        $("uid"),
                        $("tms"),
                        $("category"),
                        $("price"),
                        $("areaName")
                );

        //todo 将table创建临时视图
        tEnv.createTemporaryView("t_order", table);
        //todo 编写sql查询获取北京的所有字段信息
        Table datastream2 = tEnv.sqlQuery("select * from t_order where areaName='北京'");

        //todo 将结果Table转换成数据流
        DataStream<Row> dataStream1 = tEnv.toDataStream(result1);
        DataStream<OrderInfo> dataStream2 = tEnv.toAppendStream(datastream2, OrderInfo.class);
        //todo 打印输出 ，输出到文件系统中
        //必须使用 tableEnvironment 来注册表

        //Please use {@link #executeSql(String) executeSql(ddl)} to register a table instead
        //default_catalog default_database 下没有这个 t_order_result
        //创建表的时候需要注意 空格
        String sql = "CREATE TABLE if not exists t_order_result ( " +
                " uid string, "+
                " tms bigint, "+
                " category string, "+
                " price double, "+
                " areaName string" +
                " ) WITH ( " +
                "  'connector' = 'filesystem', " +
                "  'path' = 'file:///d:/order', " +
                "  'format' = 'csv' " +
                ")";
        System.out.println(sql);
        tEnv.executeSql(sql);

        //将数据 table 写入到注册的表中
        datastream2.executeInsert("t_order_result");
        //dataStream1.print();
        //dataStream2.print();
        //todo 执行流环境
        env.execute();
    }

    @AllArgsConstructor
    @NoArgsConstructor
    @Data
    public static class OrderInfo {
        private String uid;
        private Long tms;
        private String category;
        private Double price;
        private String areaName;
    }
}