package cn.doitedu.olap_agg;

import cn.doitedu.beans.SearchAggBean;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.util.EntityUtils;

import java.util.concurrent.TimeUnit;

/**
 * @Author: 深似海
 * @Site: <a href="www.51doit.com">多易教育</a>
 * @QQ: 657270652
 * @Date: 2024/5/9
 * @Desc: 学大数据，上多易教育
 *   搜索行为主题olap分析轻度聚合表开发
 **/
public class Job04_SearchOlapAggregate {
    public static void main(String[] args) throws Exception {


        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        env.setParallelism(1);


        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);
        tenv.getConfig().set("table.exec.source.idle-timeout","1 ms");



        // 1. 创建kafka数据源表（行为日志明细宽表）
        tenv.executeSql(
                "create table dwd_events_kafka(     " +
                        "     user_id bigint             " +
                        "    ,event_id           string             " +
                        "    ,action_time        bigint             " +
                        "    ,properties         map<string,string> " +
                        "    ,keyword as properties['keyword'] " +
                        "    ,res_cnt as cast(properties['res_cnt'] as bigint) " +
                        "    ,search_id as properties['search_id'] " +
                        "    ,province   string        " +
                        "    ,city       string        " +
                        "    ,region     string        " +
                        "    ,rt as to_timestamp_ltz(action_time,3)   " +
                        "    ,watermark for rt as rt - interval '0' second   " +
                        ") WITH (                      " +
                        "    'connector' = 'kafka',    " +
                        "    'topic' = 'dwd-user-action-log',  " +
                        "    'properties.bootstrap.servers' = 'doitedu:9092',  " +
                        "    'properties.group.id' = 'doit44_g1',              " +
                        "    'scan.startup.mode' = 'latest-offset',            " +
                        "    'value.format' = 'json',                          " +
                        "    'value.fields-include' = 'EXCEPT_KEY'             " +
                        ")                                                     ");


        // 聚合
        Table table = tenv.sqlQuery("with tmp1 as (\n" +
                "    select\n" +
                "    *\n" +
                "    from dwd_events_kafka\n" +
                "    where event_id in ('search','search_return','search_click')\n" +
                ")" +
                "select   \n" +
                "    window_start,\n" +
                "    window_end,\n" +
                "    province,  \n" +
                "    city,      \n" +
                "    region,    \n" +
                "    user_id,   \n" +
                "    search_id, \n" +
                "    keyword,   \n" +
                "    min(action_time) as search_start_time,                       \n" +
                "    max(action_time) filter(where event_id = 'search_return') as search_return_time,      \n" +
                "    max(res_cnt) as search_return_cnt,                                     \n" +
                "    count(event_id) filter(where event_id='search_click' ) as search_click_cnt,  \n" +
                "    '' as similar_word,\n" +
                "    '' as split_word  \n" +
                "from table(                                                      \n" +
                "    tumble(table tmp1, descriptor(rt) ,interval '1' minute)      \n" +
                ")                \n" +
                "group by         \n" +
                "    window_start,\n" +
                "    window_end,  \n" +
                "    province,    \n" +
                "    city,        \n" +
                "    region,      \n" +
                "    user_id,     \n" +
                "    search_id,   \n" +
                "    keyword");


        // 表转流
        DataStream<SearchAggBean> dataStream = tenv.toDataStream(table, SearchAggBean.class);

        DataStream<SearchAggBean> resStream = dataStream.keyBy(SearchAggBean::getKeyword)
                .process(new KeyedProcessFunction<String, SearchAggBean, SearchAggBean>() {
                    CloseableHttpClient client;
                    HttpPost post;
                    Cache<String, String> cache;
                    @Override
                    public void open(Configuration parameters) throws Exception {

                        client = HttpClientBuilder.create().build();

                        post = new HttpPost("http://doitedu:8081/api/post/simwords");
                        post.addHeader("Content-Type","application/json;utf-8");
                        post.addHeader("Accept","application/json");

                        cache = CacheBuilder.newBuilder()
                                .maximumSize(100)
                                .expireAfterAccess(5, TimeUnit.MINUTES)
                                .build();
                    }

                    @Override
                    public void processElement(SearchAggBean bean, KeyedProcessFunction<String, SearchAggBean, SearchAggBean>.Context ctx, Collector<SearchAggBean> out) throws Exception {

                        String keyword = bean.getKeyword();

                        // 先从缓存中读取该搜索词的近义词和分词
                        String cacheResult = cache.getIfPresent(keyword);

                        if( cacheResult != null) {
                            String[] split = cacheResult.split("\001");
                            bean.setSplit_word(split[0]);
                            bean.setSimilar_word(split[1]);

                            out.collect(bean);
                            return;
                        }



                        // http请求
                        post.setEntity(new StringEntity("{\"origin\":\""+keyword+"\"}","utf-8"));
                        CloseableHttpResponse response = client.execute(post);
                        String resultJson = EntityUtils.toString(response.getEntity(),"utf-8");
                        /**
                         * {
                         *     "origin":"usb 移动固态",
                         *     "words":"usb|移动|固态|",
                         *     "similarWord":"移动固态硬盘"
                         * }
                         */

                        JSONObject jsonObject = JSON.parseObject(resultJson);

                        String split_word = jsonObject.getString("words");
                        String similar_word = jsonObject.getString("similarWord");

                        cache.put(keyword,split_word+"\001"+similar_word);


                        bean.setSplit_word(split_word);
                        bean.setSimilar_word(similar_word);

                        out.collect(bean);

                    }
                });


        // 流转表
        tenv.createTemporaryView("res",resStream);

        tenv.executeSql("select * from res").print();


    }
}
