package cn.doitedu.olap_agg;

import cn.doitedu.beans.SearchAggBean;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.async.ResultFuture;
import org.apache.flink.streaming.api.functions.async.RichAsyncFunction;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.util.EntityUtils;

import java.util.Collections;
import java.util.concurrent.TimeUnit;

/**
 * @Author: 深似海
 * @Site: <a href="www.51doit.com">多易教育</a>
 * @QQ: 657270652
 * @Date: 2024/5/9
 * @Desc: 学大数据，上多易教育
 * 搜索行为主题olap分析轻度聚合表开发
 **/
public class Job04_SearchOlapAggregateAsync {
    public static void main(String[] args) throws Exception {


        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        env.setParallelism(1);

        env.setStateBackend(new EmbeddedRocksDBStateBackend(true));



        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);
        tenv.getConfig().set("table.exec.source.idle-timeout", "1 ms");


        // 1. 创建kafka数据源表（行为日志明细宽表）
        tenv.executeSql(
                "create table dwd_events_kafka(     " +
                        "     user_id bigint             " +
                        "    ,event_id           string             " +
                        "    ,action_time        bigint             " +
                        "    ,properties         map<string,string> " +
                        "    ,keyword as properties['keyword'] " +
                        "    ,res_cnt as cast(properties['res_cnt'] as bigint) " +
                        "    ,search_id as properties['search_id'] " +
                        "    ,province   string        " +
                        "    ,city       string        " +
                        "    ,region     string        " +
                        "    ,rt as to_timestamp_ltz(action_time,3)   " +
                        "    ,watermark for rt as rt - interval '0' second   " +
                        ") WITH (                      " +
                        "    'connector' = 'kafka',    " +
                        "    'topic' = 'dwd-user-action-log',  " +
                        "    'properties.bootstrap.servers' = 'doitedu:9092',  " +
                        "    'properties.group.id' = 'doit44_g1',              " +
                        "    'scan.startup.mode' = 'latest-offset',            " +
                        "    'value.format' = 'json',                          " +
                        "    'value.fields-include' = 'EXCEPT_KEY'             " +
                        ")                                                     ");


        // 聚合
        Table table = tenv.sqlQuery("with tmp1 as (\n" +
                "    select\n" +
                "    *\n" +
                "    from dwd_events_kafka\n" +
                "    where event_id in ('search','search_return','search_click')\n" +
                ")" +
                "select   \n" +
                "    window_start,\n" +
                "    window_end,\n" +
                "    province,  \n" +
                "    city,      \n" +
                "    region,    \n" +
                "    user_id,   \n" +
                "    search_id, \n" +
                "    keyword,   \n" +
                "    min(action_time) as search_start_time,                       \n" +
                "    max(action_time) filter(where event_id = 'search_return') as search_return_time,      \n" +
                "    max(res_cnt) as search_return_cnt,                                     \n" +
                "    count(event_id) filter(where event_id='search_click' ) as search_click_cnt,  \n" +
                "    '' as similar_word,\n" +
                "    '' as split_word  \n" +
                "from table(                                                      \n" +
                "    tumble(table tmp1, descriptor(rt) ,interval '1' minute)      \n" +
                ")                \n" +
                "group by         \n" +
                "    window_start,\n" +
                "    window_end,  \n" +
                "    province,    \n" +
                "    city,        \n" +
                "    region,      \n" +
                "    user_id,     \n" +
                "    search_id,   \n" +
                "    keyword");


        // 表转流
        DataStream<SearchAggBean> dataStream = tenv.toDataStream(table, SearchAggBean.class);


        KeyedStream<SearchAggBean, String> keyedStream = dataStream.keyBy(SearchAggBean::getKeyword);

        SingleOutputStreamOperator<SearchAggBean> resultStream = AsyncDataStream.unorderedWait(keyedStream, new HttpRequestAsyncFunction(), 1000, TimeUnit.SECONDS);

        // 流转表
        tenv.createTemporaryView("res", resultStream);

        tenv.executeSql("select * from res").print();


    }


    public static class HttpRequestAsyncFunction extends RichAsyncFunction<SearchAggBean, SearchAggBean> {
        CloseableHttpClient client;
        HttpPost post;
        Cache<String, String> cache;

        @Override
        public void open(Configuration parameters) throws Exception {
            client = HttpClientBuilder.create().build();

            post = new HttpPost("http://doitedu:8081/api/post/simwords");
            post.addHeader("Content-Type", "application/json;utf-8");
            post.addHeader("Accept", "application/json");

            cache = CacheBuilder.newBuilder()
                    .maximumSize(100)
                    .expireAfterAccess(5, TimeUnit.MINUTES)
                    .build();

        }

        @Override
        public void asyncInvoke(SearchAggBean bean, ResultFuture<SearchAggBean> resultFuture) throws Exception {
            String keyword = bean.getKeyword();

            // 先从缓存中读取该搜索词的近义词和分词
            String cacheResult = cache.getIfPresent(keyword);

            if (cacheResult != null) {
                String[] split = cacheResult.split("\001");
                bean.setSplit_word(split[0]);
                bean.setSimilar_word(split[1]);

                resultFuture.complete(Collections.singletonList(bean));
                return;
            }


            // http请求
            post.setEntity(new StringEntity("{\"origin\":\"" + keyword + "\"}", "utf-8"));
            CloseableHttpResponse response = client.execute(post);
            String resultJson = EntityUtils.toString(response.getEntity(), "utf-8");
            /**
             * {
             *     "origin":"usb 移动固态",
             *     "words":"usb|移动|固态|",
             *     "similarWord":"移动固态硬盘"
             * }
             */

            JSONObject jsonObject = JSON.parseObject(resultJson);

            String split_word = jsonObject.getString("words");
            String similar_word = jsonObject.getString("similarWord");

            cache.put(keyword, split_word + "\001" + similar_word);


            bean.setSplit_word(split_word);
            bean.setSimilar_word(similar_word);

            resultFuture.complete(Collections.singletonList(bean));


        }
    }


}
