package com.bw.yk05;

import com.alibaba.fastjson.JSON;
import com.bw.yk02.bean.SearchKeyWord;
import com.bw.yk08.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;


public class Mn5TM4_2 {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        tEnv.executeSql("create table dwd_order_detail (" +
                "    id int," +
                "    sender_city_id int," +
                "    status string," +
                "    create_time string," +
                "    times as to_timestamp(create_time)," +
                "    WATERMARK FOR times AS times - INTERVAL '0' SECOND" +
                ")with(" +
                "    'connector' = 'kafka'," +
                "    'topic' = 'dwd-order-detail_all'," +
                "    'properties.bootstrap.servers' = 'hadoop-single:9092'," +
                "    'properties.group.id' = 'group1'," +
                "    'scan.startup.mode' = 'earliest-offset'," +
                "    'format' = 'json'" +
                ")");

        //按日期统计各城市快递状态(揽收、运输、中转、派送)占比，若中转量是派送量3倍以上，发送kafka队列；
        //60030	揽收
        //60050	发单 运输
        //60060	转运完成 中转
        //60070	派送成功
        Table table=tEnv.sqlQuery("select wsStart,wsStart,sender_city_id," +
                "ls/cast(ct as double) lszb," +
                "ys/cast(ct as double) yszb," +
                "zz/cast(ct as double) zzzb," +
                "ps/cast(ct as double) pszb," +
                "from " +
                "(" +
                "select" +
                " TUMBLE_START(times,INTERVAL '1' day) as wsStart" +
                " TUMBLE_END(times,INTERVAL '1' day) as wsEnd," +
                " sender_city_id," +
                " count(*) ct" +
                " SUM(IF(STATUS='60030',1,0)) AS ls," +
                " SUM(IF(STATUS='60050',1,0)) AS ys," +
                " SUM(IF(STATUS='60060',1,0)) AS zz," +
                " SUM(IF(STATUS='60070',1,0)) AS ps" +
                " from dwd_order_detail" +
                " where rank<=5" +
                " group by TUMBLE(times,INTERVAL '1' day),sender_city_id" +
                ")where ps*3<zz"
        );

        DataStream<Row> keywordStatsDataStream = tEnv.toAppendStream(table, Row.class);

        keywordStatsDataStream.map(JSON::toJSONString).addSink(MyKafkaUtil.getFlinkKafkaProducer("3ps_zz"));


    }

}














