package com.bw.gmall.app.dws;

import com.bw.gmall.bean.DiYiZgiBao;
import com.bw.gmall.utils.MyClickHouseUtil;
import com.bw.gmall.utils.MyKafkaUtil;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

public class DwsDiYiZhiBIao {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        tableEnv.executeSql("CREATE TABLE dwd_trade_order_pre_process ( " +
                "    `id` string, " +
                "    `shop_id` string, " +
                "    `order_id` string, " +
                "    `sku_id` string, " +
                "    `sku_name` string, " +
                "    `order_price` string, " +
                "    `sku_num` string, " +
                "    `create_time` string, " +
                "    `source_type_id` string, " +
                "    `source_type_name` string, " +
                "    `source_id` string, " +
                "    `split_total_amount` decimal(16,2), " +
                "    `split_activity_amount` decimal(16,2), " +
                "    `split_coupon_amount` decimal(16,2), " +
                "    `consignee` string, " +
                "    `consignee_tel` string, " +
                "    `total_amount` string, " +
                "    `order_status` string, " +
                "    `user_id` string, " +
                "    `payment_way` string, " +
                "    `delivery_address` string, " +
                "    `order_comment` string, " +
                "    `out_trade_no` string, " +
                "    `trade_body` string, " +
                "    `operate_time` string, " +
                "    `expire_time` string, " +
                "    `process_status` string, " +
                "    `tracking_no` string, " +
                "    `parent_order_id` string, " +
                "    `province_id` string, " +
                "    `activity_reduce_amount` decimal(16,2), " +
                "    `coupon_reduce_amount` decimal(16,2), " +
                "    `original_total_amount` decimal(16,2), " +
                "    `feight_fee` decimal(16,2), " +
                "    `feight_fee_reduce` decimal(16,2), " +
                "    `refundable_time` string, " +
                "    `order_detail_activity_id` string, " +
                "    `activity_id` string, " +
                "    `activity_rule_id` string, " +
                "    `order_detail_coupon_id` string, " +
                "    `coupon_id` string, " +
                "    `coupon_use_id` string, " +
                "    `type` string, " +
                "    `old` map<string,string>, " +
                "    row_op_ts TIMESTAMP_LTZ(3) "+
                ") " + MyKafkaUtil.getKafkaDDL("dwd_trade_order_pre_process", "dws_order_pre_process_12563"));

//        MyKafkaUtil.getTopicOPP("dws_order_pre_process_12563");

        tableEnv.executeSql("create table dwd_cart_add_count(" +
                "shop_id string, " +
                "uid string, " +
                "sku_id string," +
                "ts bigint," +
                "    `rt` as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)), " +
                "    WATERMARK FOR rt AS rt - INTERVAL '2' SECOND " +
                " ) "+ MyKafkaUtil.getKafkaDDL("dwd_cart_add_count", "dwd_cart_add_count_2365"));

        tableEnv.executeSql("create table dwd_fang_log_count(" +
                "shop_id string, " +
                "uid string, " +
                "during_time int, " +
                "sku_id string," +
                "ts bigint," +
                "    `rt` as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)), " +
                "    WATERMARK FOR rt AS rt - INTERVAL '2' SECOND " +
                " ) "+ MyKafkaUtil.getKafkaDDL("dwd_fang_log_count", "dwd_fang_log_count_24563"));

//        tableEnv.sqlQuery("select during_time from dwd_fang_log_count").execute().print();

        Table table = tableEnv.sqlQuery("" +
                "select " +
                "a1.sku_id, " +
                "a1.sku_name, " +
                "a1.shop_id, " +
                "COALESCE(a1.pay_amount,0.0) as pay_amount, " +
                "COALESCE(a1.pay_ren,0) as pay_ren, " +
                "COALESCE(a2.jia_count,0) as jia_count, " +
                "COALESCE(a3.fang_count,0) as fang_count, " +
                "COALESCE(a3.fang_ren_count,0) as fang_ren_count, " +
                "COALESCE(a3.during_time,0) as during_time, " +
                "COALESCE(a1.pay_ren/a3.fang_ren_count*1.0,0) as pay_rate," +
                "DATE_FORMAT(TO_TIMESTAMP(a1.create_time, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd') as create_time " +
                "from (" +
                "select " +
                "shop_id,sku_id,sku_name,create_time," +
                "sum(cast(split_total_amount1 as decimal(16,2))) as pay_amount," +
                "count(distinct user_id) pay_ren " +
                "from " +
                "(" +
                "select " +
                "shop_id,sku_id,sku_name,user_id," +
                "DATE_FORMAT(TO_TIMESTAMP(create_time, 'yyyy-MM-dd HH:mm:ss'), 'yyyy-MM-dd') as create_time," +
                "cast((`split_total_amount` + `feight_fee`) as decimal(16,2)) as split_total_amount1 " +
                "from dwd_trade_order_pre_process " +
                "where order_status in (1002,1004,1005,1006)" +
                " ) " +
                "group by create_time,shop_id,sku_id,sku_name) a1 " +
                "join (select " +
                "shop_id,sku_id," +
                "count(distinct uid) as jia_count," +
                "DATE_FORMAT(rt, 'yyyy-MM-dd') as create_time " +
                "from dwd_cart_add_count " +
                "group by shop_id,sku_id,DATE_FORMAT(rt, 'yyyy-MM-dd')" +
                ") a2 on a1.shop_id=a2.shop_id and a1.sku_id=a2.sku_id  " +
                "and a1.create_time=a2.create_time " +
                "join (select " +
                "shop_id,sku_id," +
                "count(*) as fang_count, " +
                "count(distinct uid) as fang_ren_count," +
                "sum(cast(during_time as int)) as during_time," +
                "DATE_FORMAT(rt, 'yyyy-MM-dd') as create_time " +
                "from dwd_fang_log_count " +
                "group by shop_id,sku_id,DATE_FORMAT(rt, 'yyyy-MM-dd')" +
                ") a3 on a1.shop_id=a3.shop_id and a1.sku_id=a3.sku_id " +
                "and a1.create_time=a3.create_time ");


        DataStream<Tuple2<Boolean, DiYiZgiBao>> tuple2DataStream = tableEnv.toRetractStream(table, DiYiZgiBao.class);
        tuple2DataStream.print();
        tuple2DataStream.map(a->a.f1)
                .addSink(MyClickHouseUtil
                .getSinkFunction("insert into table dwd_di_yi_zhou values(?,?,?,?,?,?,?,?,?,?,?)"));

        env.execute();


    }
}
