package com.bw.app.ads;

import com.bw.util.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class zhibiao3 {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        env.setParallelism(1);

        String groupId="AllInfoWide";
        String topic="dwm_info_wide";
        //用户的性别、星座、学历、贵宾、婚否 的保单数量分布
        tEnv.executeSql("CREATE TABLE table2 (\n" +
                "  `order_id` Bigint,\n" +
                "  `order_userid` String,\n" +
                "  `sex` String,\n" +
                "  `start_tagname` String,\n" +
                "  `xueli_tagname` String,\n" +
                "  `vip_tagname` String,\n" +
                "  `marry_tagname` String,\n" +
                "  `order_orderTime` String,\n" +
                "  `ts` AS TO_TIMESTAMP(order_orderTime),\n" +
                "  WATERMARK FOR `ts` AS `ts` - INTERVAL '5' SECOND\n" +
                ") WITH ("+ MyKafkaUtil.getKafkaDDL(topic,groupId)+")");

        tEnv.sqlQuery("select order_id,sex,start_tagname,xueli_tagname,vip_tagname,marry_tagname,count(*) cnt1 \n" +
                "from table2 group by order_id,sex,start_tagname,xueli_tagname,vip_tagname,marry_tagname");

        tEnv.executeSql("CREATE TABLE tmp_result (\n" +
                "                order_id BIGINT PRIMARY KEY,\n" +
                "                sex STRING NOT NULL,\n" +
                "                start_tagname STRING NOT NULL,\n" +
                "                xueli_tagname STRING NOT NULL,\n" +
                "                vip_tagname STRING NOT NULL,\n" +
                "                marry_tagname STRING NOT NULL,\n" +
                "                cnt1 BIGINT NOT NULL\n" +
                "            ) WITH (\n" +
                "                'connector' = 'jdbc',\n" +
                "                'url' = 'jdbc:mysql://hadoop101:3306/mydb',\n" +
                "                'driver' = 'com.mysql.cj.jdbc.Driver',\n" +
                "                'table-name' = 'tmp_result',\n" +
                "                'username' = 'root',\n" +
                "                'password' = '123456'\n" +
                "            )");

        tEnv.executeSql("insert into tmp_result select order_id,sex,start_tagname,xueli_tagname,vip_tagname,marry_tagname,count(*) cnt1 from table2 group by order_id,sex,start_tagname,xueli_tagname,vip_tagname,marry_tagname");

    }
}
/*
CREATE TEMPORARY TABLE result (
                id BIGINT PRIMARY KEY,
                sex STRING NOT NULL,
                start_tagname STRING NOT NULL,
                xueli_tagname STRING NOT NULL,
                vip_tagname STRING NOT NULL,
                marry_tagname STRING NOT NULL,
                cnt1 INT NOT NULL
            ) WITH (
                'connector' = 'jdbc',
                'url' = 'jdbc:mysql://hadoop101:3306/mydb',
                'driver' = 'com.mysql.cj.jdbc.Driver',
                'table-name' = 'result',
                'username' = 'root',
                'password' = '12345678'
            )
 */
/*
select sex,start_tagname,xueli_tagname,vip_tagname,marry_tagname,count(*) cnt1
from table2 group by sex,start_tagname,xueli_tagname,vip_tagname,marry_tagname;
 */
/*
CREATE TABLE table2 (
  `id` Bigint,
  `order_userid` String,
  `sex` String,
  `start_tagname` String,
  `xueli_tagname` String,
  `vip_tagname` String,
  `marry_tagname` String,
  `order_orderTime` String,
  `ts` AS TO_TIMESTAMP(order_orderTime),
  WATERMARK FOR `ts` AS `ts` - INTERVAL '5' SECOND
) WITH ("+ MyKafkaUtil.getKafkaDDL(topic,groupId)+");
 */