package app.dwd;

import com.bw.gmall.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdDiYiZhiBiao_fang {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        tableEnv.executeSql("create table page_log( " +
                "    `common` map<string,string>, " +
                "    `page` map<string,string>, " +
                "    `ts` bigint, " +
                "    `rt` as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)), " +
                "    WATERMARK FOR rt AS rt - INTERVAL '2' SECOND " +
                " ) " + MyKafkaUtil.getKafkaDDL("dwd_traffic_page_log", "dwd_traffic_page_log_21522"));

        Table table2 = tableEnv.sqlQuery("select " +
                "CONCAT('shop_', CAST(CAST(FLOOR(RAND() * 10) + 1 AS INT) AS STRING)) AS shop_id, " +
                "`common`['uid'] as uid, " +
                "`common`['during_time'] as during_time, " +
                "`page`['item'] as sku_id," +
                "`ts`" +
                "from page_log " +
                "where `page`['item_type'] = 'sku_id'");

        tableEnv.createTemporaryView("fang_log",table2);

        tableEnv.executeSql("create table dwd_fang_log_count(" +
                "shop_id string, " +
                "uid string, " +
                "during_time string, " +
                "sku_id string," +
                "ts BIGINT," +
                "primary key(shop_id) not enforced" +
                ")"+ MyKafkaUtil.getUpsertKafkaDDL("dwd_fang_log_count"));

        tableEnv.executeSql("insert into dwd_fang_log_count select * from fang_log");


    }
}
