package com.bw.app.dws;

import com.bw.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class zhibiao {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        tEnv.executeSql("create table dwd_wide( " +
                "`uid` string, " +
                "`mid_type` string, " +
                "`md` string, " +
                "`mid` string, " +
                "`page_id` string, " +
                "`shop_id` string, " +
                "`id` string, " +
                "`order_id` string, " +
                "`sku_id` string, " +
                "`sku_name` string, " +
                "`order_price` string, " +
                "`sku_num` string, " +
                "`create_time` string, " +
                "`source_type_id` string, " +
                "`source_type_name` string, " +
                "`source_id` string, " +
                "`split_total_amount` string, " +
                "`split_activity_amount` string, " +
                "`split_coupon_amount` string, " +
                "`consignee` string, " +
                "`consignee_tel` string, " +
                "`total_amount` string, " +
                "`order_status` string, " +
                "`user_id` string, " +
                "`payment_way` string, " +
                "`delivery_address` string, " +
                "`order_comment` string, " +
                "`out_trade_no` string, " +
                "`trade_body` string, " +
                "`operate_time` string, " +
                "`expire_time` string, " +
                "`process_status` string, " +
                "`tracking_no` string, " +
                "`parent_order_id` string, " +
                "`province_id` string, " +
                "`activity_reduce_amount` string, " +
                "`coupon_reduce_amount` string, " +
                "`original_total_amount` string, " +
                "`feight_fee` string, " +
                "`feight_fee_reduce` string, " +
                "`refundable_time` string, " +
                "`order_detail_activity_id` string, " +
                "`activity_id` string, " +
                "`activity_rule_id` string, " +
                "`order_detail_coupon_id` string, " +
                "`coupon_id` string, " +
                "`coupon_use_id` string, " +
                "`type` string, " +
                "row_op_ts TIMESTAMP_LTZ(3), " +
                "WATERMARK FOR row_op_ts AS row_op_ts - INTERVAL '5' SECOND " +
                ") WITH ( " +
                "'connector' = 'kafka', " +
                "'topic' = 'dwd_wide', " +
                "'properties.bootstrap.servers' = 'hadoop102:9092', " +
                "'properties.group.id' = 'testGroup', " +
                "'scan.startup.mode' = 'earliest-offset', " +
                "'format' = 'json')");

//        Table table = tEnv.sqlQuery("select * from dwd_wide");
//        tEnv.toChangelogStream(table).print();

        Table result1 = tEnv.sqlQuery("select shop_id,count(distinct uid),count(*),sum(cast(total_amount as double)),count(distinct order_id),sum(if(`order_status`='1002',1,0)) from dwd_wide " +
                "group by shop_id");
//        tEnv.toChangelogStream(result1).print();

        Table result2 = tEnv.sqlQuery("select mid_type,(select count(*)  from dwd_wide where mid_type = '移动端')/count(*)" +
                " from dwd_wide group by mid_type");
//        tEnv.toChangelogStream(result2).print();


        env.execute();
    }
}
