package cn.itcast.flink.sink.connector;

import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.java.io.TextInputFormat;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.FileProcessingMode;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static org.apache.flink.table.api.Expressions.$;

/**
 * Author itcast
 * Date 2022/1/18 11:23
 * Desc TODO
 */
public class OrderSinkKafkaDemo {
    public static void main(String[] args) throws Exception {
        //todo 创建流执行环境，设置并行度
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //todo 设置环境设置为blink计划器并且是流模式
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        //todo 设置重启策略
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,3000));
        //todo 从 order.csv 读取数据源
        DataStreamSource<String> source = env.readFile(
                new TextInputFormat(null),
                "D:\\project\\flinkbase27\\data\\order.csv",
                FileProcessingMode.PROCESS_CONTINUOUSLY,
                50000
        );
        //todo 将字符串map转换切分转换成 OrderInfo 对象
        SingleOutputStreamOperator<OrderInfo> mapStream = source.map(new MapFunction<String, OrderInfo>() {
            @Override
            public OrderInfo map(String value) throws Exception {
                String[] arr = value.split(",");
                OrderInfo order = new OrderInfo(
                        arr[0],
                        Long.parseLong(arr[1]),
                        arr[2],
                        Double.parseDouble(arr[3]),
                        arr[4]
                );
                return order;
            }
        });
        //todo 创建流表环境
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, settings);
        //todo 将数据流转换成 Table 类型
        Table table = tEnv.fromDataStream(mapStream
                , $("uid"), $("tms"), $("category"), $("price"), $("areaName"));
        //todo DSL语法筛选filter出北京的所有字段数据
        Table result = table.filter($("areaName").isEqual("北京"))
                .select($("uid"), $("tms"), $("category"), $("price"), $("areaName"));
        //todo 将table创建临时视图
        tEnv.createTemporaryView("t_order",table);
        //todo 编写sql查询获取北京的所有字段信息
        Table result2 = tEnv.sqlQuery(
                "select * " +
                        "from t_order " +
                        "where areaName='北京'"
        );
        //todo 将结果Table转换成数据流
        DataStream<Tuple2<Boolean, OrderInfo>> dataStream = tEnv.toRetractStream(result, OrderInfo.class);
        DataStream<OrderInfo> dataStream2 = tEnv.toAppendStream(result2, OrderInfo.class);
        //todo 打印输出
        //dataStream2.print();
        //todo 将数据写出到 FileSystem HDFS 或本地文件系统 file:///
        //todo 在文件系统中先创建表
       /* tEnv.useCatalog("default");
        tEnv.useDatabase("default");*/
        tEnv.executeSql(
                "create table if not exists t_order(" +
                        "uid string," +
                        "tms bigint," +
                        "category string," +
                        "price double," +
                        "areaName string )" +
                        "WITH(" +
                        "'connector'='kafka'," +
                        "'topic'='order'," +
                        "'format'='csv'," +
                        "'properties.bootstrap.servers'='node1:9092,node2:9092,node3:9092'," +
                        "'properties.group.id'='testorder'," +
                        "'scan.startup.mode'='latest-offset')"
        );
        //todo 将结果数据保存到这张表中
        /*tEnv.executeSql(
                "insert into t_order select * from "+dataStream
        );*/
        result2.executeInsert("t_order");
        //todo 执行流环境
        env.execute();
    }

    @AllArgsConstructor
    @NoArgsConstructor
    @Data
    public static class OrderInfo {
        //用户id
        private String uid;
        //订单时间戳
        private Long tms;
        //商品分类
        private String category;
        //价格
        private Double price;
        //区域名称
        private String areaName;
    }
}
