package cn.doitedu.c_topn;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class 各品类商品收藏topn {

    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 建表，映射dwd层的kafka行为日志明细topic
        tenv.executeSql(
                "create table dwd_events_kafka(                                \n" +
                        "     user_id           string                         \n" +
                        "    ,session_id         string                        \n" +
                        "    ,event_id           string                        \n" +
                        "    ,action_time        bigint                        \n" +
                        "    ,properties         map<string,string>            \n" +
                        "    ,item_id as cast(properties['item_id'] as bigint) \n"  +
                        "    ,pt as proctime()    \n" +
                        ") WITH (\n" +
                        "    'connector' = 'kafka',\n" +
                        "    'topic' = 'dwd-events',\n" +
                        "    'properties.bootstrap.servers' = 'doitedu:9092',\n" +
                        "    'properties.group.id' = 'doit44_g1',\n" +
                        "    'scan.startup.mode' = 'latest-offset',\n" +
                        "    'value.format' = 'json',               \n" +
                        "    'value.fields-include' = 'EXCEPT_KEY'\n" +
                        ")                                                    ");


        // 建表，映射hbase中的维表  dim_product_info
        tenv.executeSql(
                " create table dim_product_info_hbase(                         "+
                        "    product_id  bigint,                               "+
                        "    f row<brand_id bigint,product_category_id bigint,price decimal(10,2)>, "+
                        "    primary key(product_id) not enforced              "+
                        " ) WITH (                                             "+
                        "  'connector' = 'hbase-2.2',                          "+
                        "  'table-name' = 'dim_product_info',                  "+
                        "  'zookeeper.quorum' = 'doitedu:2181'                 "+
                        " )                                                    "
        );


        // 统计各品类收藏量topn个商品
        tenv.executeSql(
                // 1.过滤收藏事件
                "with tmp as (                                            \n" +
                "select  * from dwd_events_kafka where event_id='favorite'\n" +
                ")                                                        \n" +
                // 2.关联品类
                ",tmp2 as (\n" +
                "SELECT\n" +
                "    user_id,\n" +
                "    item_id,\n" +
                "    product_category_id as cid\n" +
                "from tmp e \n" +
                "left join dim_product_info_hbase for system_time as of e.pt as p \n" +
                "on e.item_id = p.product_id )\n" +
                "\n" +
                // 3.计算分组topn
                //  1,item01,品类1
                //  2,item02,品类1
                //  3,item03,品类2
                //  6,item03,品类2
                //  4,item01,品类1
                //  5,item04,品类2
                // 3.1 计算每个品类中的每个商品的收藏次数
                ",tmp3 as (\n" +
                "select\n" +
                "    cid,\n" +
                "    item_id,\n" +
                "    count(1) as cnt\n" +
                "from tmp2\n" +
                "group by cid,item_id  )\n" +
                "\n" +
                "\n" +
                //3.2 按品类partition，按次数排序，打行号
                ",tmp4 as (\n" +
                "select\n" +
                "    cid,\n" +
                "\titem_id,\n" +
                "\tcnt,\n" +
                "\trow_number() over(partition by cid order by cnt desc ) as rn \n" +
                "from tmp3  )\n" +
                "\n" +
                "\n" +
                // 3.3 过滤出行号<=N
                "select\n" +
                "    *\n" +
                "from tmp4 \n" +
                "where rn<=10").print();















    }

}
