package com.bw.app.dws;


import com.bw.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Dws_money {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        env.setParallelism(1);

        String topic = "dwd_order_pre";
        String groupId = "dws_payment_buyer_count";

        // 2. 创建Kafka源表
        tableEnv.executeSql("CREATE TABLE order_source (" +
                "  id STRING," +
                "  order_id STRING," +
                "  sku_id STRING," +
                "  sku_name STRING," +
                "  order_price DECIMAL(10, 2)," +
                "  sku_num INT," +
                "  create_time STRING," +
                "  source_type_name STRING," +
                "  total_amount DECIMAL(10, 2)," +
                "  order_status STRING," +
                "  feight_fee DECIMAL(10, 2)," +
                "  row_op_ts STRING," +
                "  `time_ltz` AS TO_TIMESTAMP_LTZ(" +
                "    UNIX_TIMESTAMP(REPLACE(row_op_ts, ' ', 'T'), 'yyyy-MM-dd''T''HH:mm:ss') * 1000," +
                "    3" +
                "  )," +
                "  WATERMARK FOR time_ltz AS time_ltz - INTERVAL '5' SECOND" +
                ")" + MyKafkaUtil.getKafkaDDL(topic, groupId));


        tableEnv.executeSql("with  t1  as( " +
                        "select   *   from order_source where order_status='1002' or order_status='1003' or order_status ='1004' or order_status='1005'  " +
                        ") SELECT " +
                        "  window_start, " +
                        "  window_end, " +
                        "  COUNT(DISTINCT order_id) AS order_count, " +
                        "  SUM(order_price * sku_num + feight_fee) AS total_payment_amount " +
                        "FROM TABLE(" +
                        "  TUMBLE(TABLE t1, DESCRIPTOR(`time_ltz`), INTERVAL '10' SECOND)" +
                        ") " +
                        "  " +
                        "GROUP BY window_start, window_end")
                .print();
        env.execute();

    }
}
