package com.bw.app.ads;

import com.bw.util.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class zhibiao1 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        env.setParallelism(1);

        String groupId="AllInfoWide";
        String topic="dwm_info_wide";
//
//        FlinkKafkaConsumer<String> source = MyKafkaUtil.getKafkaSource(groupId, topic);
//        DataStreamSource<String> AllWide = env.addSource(source);
//        AllWide.print();

        tEnv.executeSql("CREATE TABLE table1 (\n" +
                "  `order_id` STRING,\n" +
                "  `order_polno` STRING,\n" +
                "  `order_userid` STRING,\n" +
                "  `gj` BIGINT,\n" +
                "  `province` STRING,\n" +
                "  `order_orderTime` STRING,\n" +
                "  `ts` AS TO_TIMESTAMP(order_orderTime),\n" +
                "  WATERMARK FOR `ts` AS `ts` - INTERVAL '5' SECOND\n" +
                ") WITH ("+ MyKafkaUtil.getKafkaDDL(topic,groupId)+")");
        //日新增保单数、用户数、保单总额
        tEnv.sqlQuery("SELECT\n" +
                "     sdt,\n" +
                "     edt,\n" +
                "     province,\n" +
                "     count(order_polno) p_ct,\n" +
                "     count(DISTINCT order_userid) user_ct,\n" +
                "     sum(gj) p_total_amount,\n" +
                "     order_orderTime\n" +
                "FROM (\n" +
                "select * from\n" +
                "(select\n" +
                "   DATE_FORMAT(order_orderTime, 'yyyy-MM-dd HH:mm:ss') AS sdt,\n" +
                "   DATE_FORMAT(order_orderTime, 'yyyy-MM-dd HH:mm:ss') AS edt,\n" +
                "   order_userid,\n" +
                "   order_polno,\n" +
                "   province,\n" +
                "   gj,\n" +
                "   order_orderTime,\n" +
                "   ts,\n" +
                "   row_number() over(partition by order_userid order by order_orderTime desc) rn\n" +
                "from table1) where rn=1 )\n" +
                "GROUP BY sdt,edt,\n" +
                "province,order_orderTime").execute().print();
        //按省份当日交易量和交易金额top5
//        tEnv.sqlQuery("select * from table1").execute().print();
//        tEnv.sqlQuery("select b.*\n" +
//                "from (select\n" +
//                "    a.province,\n" +
//                "    count(a.order_id) ct1,\n" +
//                "    sum(a.gj) s1,\n" +
//                "    a.order_orderTime,\n" +
//                "    row_number() over (partition by a.province\n" +
//                "    order by count(a.order_id),sum(a.gj) desc) cnt\n" +
//                "from (select province,\n" +
//                "    order_id,\n" +
//                "    gj,\n" +
//                "    order_orderTime\n" +
//                "    from table1 where order_orderTime=CURRENT_TIME) a\n" +
//                "group by province,order_orderTime) b where b.cnt<=5 and order_orderTime=CURRENT_TIME");




        env.execute();
    }
}
/*
;

select b.*
from (select
    a.province,
    count(a.order_id) ct1,
    sum(a.gj) s1,
    a.order_orderTime,
    row_number(partition by a.province
    order by count(a.order_id),sum(a.gj) desc) cnt
from (select province,
    order_id,
    gj,
    order_orderTime
    from table1 where order_orderTime=CURRENT_TIME) a
group by province,order_orderTime) b where b.cnt<=5 and order_orderTime=CURRENT_TIME
 */
/*
SELECT
     sdt,
     edt,
     province,
     count(order_polno) p_ct,
     count(DISTINCT order_userid) user_ct,
     sum(gj) p_total_amount,
     order_orderTime
FROM (
select * from
(select
   DATE_FORMAT(order_orderTime, 'yyyy-MM-dd HH:mm:ss') AS sdt,
   DATE_FORMAT(order_orderTime, 'yyyy-MM-dd HH:mm:ss') AS edt,
   order_userid,
   order_polno,
   province,
   gj,
   order_orderTime,
   ts,
   row_number() over(partition by order_userid order by order_orderTime desc) rn
from table1) where rn=1 )
GROUP BY sdt,edt,
province,order_orderTime



 */
/*
CREATE TABLE table1 (
  `order_id` String,
  `order_polno` String,
  `order_userid` String,
  `gj` BIGINT,
  `province` String,
  `order_orderTime` String,
  `ts` AS TO_TIMESTAMP(order_orderTime),
  WATERMARK FOR `ts` AS `ts` - INTERVAL '5' SECOND
) WITH (MyKafkaUtil.getKafkaDDL(topic,groupId));
 */