package com.bw.gmall.realtime.app.dwd;


import com.alibaba.fastjson.JSONObject;
import com.bw.gmall.realtime.app.func.DimAsyncFunction;
import com.bw.gmall.realtime.utils.DateFormatUtil;
import com.bw.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;
import java.util.concurrent.TimeUnit;

// select   *   from  五表  where  type='insert'
public class DwdTradeOrderDetail_shopTable {

    public static void main(String[] args) throws Exception {
        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1); //生产环境中设置为Kafka主题的分区数
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //讲kakfaDwdTradeOrderDetail_shop转换成flink表
        tableEnv.executeSql("" +
                "create table dwd_trade_order_detail_with_shop( " +
                "    `id` string, " +
                "    `order_id` string, " +
                "    `sku_id` string, " +
                "    `shop` string, " +
                "    `sku_name` string, " +
                "    `order_price` string, " +
                "    `sku_num` string, " +
                "    `create_time` string, " +
                "    `source_type_id` string, " +
                "    `source_type_name` string, " +
                "    `source_id` string, " +
                "    `split_total_amount` string, " +
                "    `split_activity_amount` string, " +
                "    `split_coupon_amount` string, " +
                "    `consignee` string, " +
                "    `consignee_tel` string, " +
                "    `total_amount` string, " +
                "    `order_status` string, " +
                "    `user_id` string, " +
                "    row_op_ts TIMESTAMP_LTZ(3) "+
                ")" + MyKafkaUtil.getKafkaDDL("dwd_trade_order_detail_with_shop", "dwd_trade_order_detail_with_shop"));
        Table table = tableEnv.sqlQuery("" +
                "SELECT \n" +
                "  DATE_FORMAT(TO_TIMESTAMP(create_time), 'yyyy-MM-dd') AS date_id,\n" +
                "  user_id,\n" +
                "  shop,\n" +
                "  SUM(CAST(split_total_amount AS DOUBLE)) AS total_amount,\n" +
                "  TO_TIMESTAMP(MAX(create_time)) AS ts\n" +
                "FROM dwd_trade_order_detail_with_shop\n" +
                "WHERE shop IS NOT NULL\n" +
                "GROUP BY \n" +
                "  DATE_FORMAT(TO_TIMESTAMP(create_time), 'yyyy-MM-dd'),\n" +
                "  user_id,\n" +
                "  shop ");
        tableEnv.createTemporaryView("dws_trade_shop_user_order_amount", table);
        tableEnv.executeSql("CREATE TABLE dws_trade_shop_user_order (\n" +
                "  date_id STRING,\n" +
                "  user_id STRING,\n" +
                "  shop STRING,\n" +
                "  total_amount DOUBLE,\n" +
                "  ts TIMESTAMP(3),\n" +
                "  PRIMARY KEY (date_id, user_id, shop) NOT ENFORCED\n" +
                ")"+MyKafkaUtil.getUpsertKafkaDDL("dws_trade_shop_user_order_amount"));
        tableEnv.executeSql("INSERT INTO dws_trade_shop_user_order SELECT * FROM dws_trade_shop_user_order_amount");
//        env.execute("DwdTradeOrderDetailShop");


    }

}