package com.qingyunge.app.dws;

import com.qingyunge.app.func.SplitConfFuncton;
import com.qingyunge.app.func.SplitFunction;
import com.qingyunge.bean.KeywordSkuClassBean;
import com.qingyunge.bean.KeywordSkuId;
import com.qingyunge.util.MyClickHouseUtil;
import com.qingyunge.util.MyKafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;

import java.time.Duration;

public class Dws_proconf_keyword_num_window {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment().setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        String topic = "sku_eval_topic";
        String gourpid = "Dws_proconf_keyword_num";
        // TODO 创建表
        tableEnv.executeSql("create table dws_shiyun_get_num_table( " +
                " `id` bigint, " +
                "    `skuId` string, " +
                "    `name` string, " +
                "    `price` double, " +
                "    `imgUrl` string, " +
                "    `evalType` string, " +
                "    `evalNum` bigint, " +
                "    `porView` int, " +
                "    `evaluate` string, " +
                "    `proconf` string, " +
                "    `date` string, " +
                "    `location` string, " +
                "    `ts` bigint " +
                "    )"+ MyKafkaUtil.getKafkaDDL(topic,gourpid));
        tableEnv.createTemporaryFunction("SplitConfFuncton", SplitConfFuncton.class);
        Table keyWordTable = tableEnv.sqlQuery("select  " +
                "    word AS keyword, " +
                "    skuId, " +
                "    ts " +
                "  from dws_shiyun_get_num_table, " +
                "LATERAL TABLE(SplitConfFuncton(proconf))");
        DataStream<KeywordSkuId> keywordSkuIdDS = tableEnv.toAppendStream(keyWordTable, KeywordSkuId.class);
        SingleOutputStreamOperator<KeywordSkuClassBean> keywordSkuClassBeanFM = keywordSkuIdDS.flatMap(new FlatMapFunction<KeywordSkuId, KeywordSkuClassBean>() {
            @Override
            public void flatMap(KeywordSkuId keywordSkuId, Collector<KeywordSkuClassBean> collector) throws Exception {
                try {
                    String skuClass = keywordSkuId.getSkuId().substring(0, 1);
                    String keyword = keywordSkuId.getKeyword();
                    Long ts = keywordSkuId.getTs();
                    collector.collect(new KeywordSkuClassBean(skuClass, keyword, 1L, ts));
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });
        SingleOutputStreamOperator<KeywordSkuClassBean> keyWaterMark = keywordSkuClassBeanFM.assignTimestampsAndWatermarks(WatermarkStrategy.<KeywordSkuClassBean>forBoundedOutOfOrderness(Duration.ofSeconds(2)).withTimestampAssigner(new SerializableTimestampAssigner<KeywordSkuClassBean>() {
            @Override
            public long extractTimestamp(KeywordSkuClassBean keywordSkuClassBean, long l) {
                return keywordSkuClassBean.getTs();
            }
        }));

        // TODO 分组 开窗 聚合
        SingleOutputStreamOperator<KeywordSkuClassBean> reduceDS = keyWaterMark.keyBy(new KeySelector<KeywordSkuClassBean, Tuple2<String, String>>() {
            @Override
            public Tuple2<String, String> getKey(KeywordSkuClassBean value) throws Exception {
                return Tuple2.of(value.getSkuClass(), value.getKeyword());
            }
        }).window(TumblingEventTimeWindows.of(Time.seconds(10)))
                .reduce(new ReduceFunction<KeywordSkuClassBean>() {
                    @Override
                    public KeywordSkuClassBean reduce(KeywordSkuClassBean keywordSkuClassBean, KeywordSkuClassBean t1) throws Exception {
                        keywordSkuClassBean.setCount(keywordSkuClassBean.getCount() + t1.getCount());
                        return keywordSkuClassBean;
                    }
                });
        reduceDS.print("===>");
        // TODO 写入clickhouse
        reduceDS.addSink(MyClickHouseUtil.getSinkFunction("insert into  dws_sku_conf_count_table values(?,?,?,?)"));
        env.execute("dws_sku_conf_count");

        // TODO 好评率品牌平均好评
    }
}
