package cn.doitedu.cn.doitedu.rtdw.etl;

import cn.doitedu.cn.doitedu.rtdw.ddl.CatalogDatabaseSwitch;
import cn.doitedu.cn.doitedu.rtdw.pojo.SearchOlapBean;
import cn.doitedu.cn.doitedu.rtdw.pojo.SearchOlapResultBean;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.util.EntityUtils;

import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;

public class Job5_SearchAnalysisOlapSupport {
    public static void main(String[] args) throws Exception {
        // 编程环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:/d:/ckpt");
        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 切换catalog为hiveCatalog
        CatalogDatabaseSwitch.switchCatalogDatabase("doit40-hive","doit40_rtdw",tenv);


        // 写聚合逻辑：一个用户的一次搜索行为中的多条事件，聚合成一行
        Table table = tenv.sqlQuery(
                " WITH tmp AS (                                                     "+
                        "     SELECT                                                        "+
                        "         user_id,                                                  "+
                        "     	event_id,                                                   "+
                        "     	properties['keyword'] as keyword,                           "+
                        "     	properties['search_id'] as search_id,                       "+
                        "     	cast(properties['res_cnt'] as bigint) as res_cnt,           "+
                        "     	properties['item_seq'] as click_item_seq,                   "+
                        "     	event_time,                                                 "+
                        "     	rt                                                          "+
                        "     FROM dwd_kafka                                                "+
                        "     WHERE event_id in ('search','search_return','search_click')   "+
                        " )                                                                 "+
                        "                                                                   "+
                        " SELECT                                                            "+
                        " 	user_id,                                                        "+
                        " 	keyword,                                                        "+
                        " 	search_id,                                                      "+
                        " 	min(event_time) as search_start_time,                           "+
                        " 	max(res_cnt) as res_cnt,                                        "+
                        "     count(click_item_seq) as click_cnt                            "+
                        " FROM TABLE(                                                       "+
                        "     TUMBLE(TABLE tmp , DESCRIPTOR(rt), INTERVAL '5' MINUTE)       "+
                        " )                                                                 "+
                        " GROUP BY                                                          "+
                        "     window_start,                                                 "+
                        " 	window_end,                                                     "+
                        " 	user_id,                                                        "+
                        " 	keyword,                                                        "+
                        " 	search_id                                                       "
        );

        DataStream<SearchOlapBean> dataStream = tenv.toDataStream(table, SearchOlapBean.class);

        SingleOutputStreamOperator<SearchOlapResultBean> resultStream = dataStream.keyBy(SearchOlapBean::getSearch_id)
                .process(new KeyedProcessFunction<String, SearchOlapBean, SearchOlapResultBean>() {
                    CloseableHttpClient client;
                    HttpPost post;
                    Map<String, String> mp;
                    SearchOlapResultBean searchOlapResultBean;

                    @Override
                    public void open(Configuration parameters) throws Exception {

                        // 构造http客户端
                        client = HttpClientBuilder.create().build();

                        // 构造post请求
                        post = new HttpPost("http://doitedu:8081/api/post/simwords");
                        // 设置请求头参数
                        post.addHeader("Content-type", "application/json; charset=utf-8");
                        post.addHeader("Accept", "application/json");

                        mp = new HashMap<String, String>();

                        searchOlapResultBean = new SearchOlapResultBean();

                    }

                    @Override
                    public void processElement(SearchOlapBean bean, KeyedProcessFunction<String, SearchOlapBean, SearchOlapResultBean>.Context ctx, Collector<SearchOlapResultBean> out) throws Exception {

                        // 取出数据中的搜索原词
                        String originWord = bean.getKeyword();

                        // 根据原词，去请求算法服务，得到该词的分词结果和近义词
                        mp.put("origin", originWord);
                        post.setEntity(new StringEntity(JSON.toJSONString(mp), StandardCharsets.UTF_8));

                        // 执行请求
                        CloseableHttpResponse response = client.execute(post);

                        // 获取结果
                        HttpEntity responseEntity = response.getEntity();
                        // {"origin":"醇品 咖啡","words":"咖啡|","similarWord":"醇品黑咖啡"}
                        String responseJson = EntityUtils.toString(responseEntity, "utf-8");

                        JSONObject jsonObject = JSON.parseObject(responseJson);
                        String similarWord = jsonObject.getString("similarWord");
                        String splitWords = jsonObject.getString("words");

                        // 封装完整信息的结果，输出
                        searchOlapResultBean.setData(bean, similarWord, splitWords);

                        out.collect(searchOlapResultBean);
                    }
                });


        // 创建doris映射表
        tenv.executeSql(
                " create table search_ana_agg_doris(    "
                        + "     user_id            BIGINT,       "
                        + "     search_id          VARCHAR(20),  "
                        + "     keyword            VARCHAR(60),  "
                        + "     split_words        VARCHAR(60),  "
                        + "     similar_word       VARCHAR(60),  "
                        + "     search_time        BIGINT,       "
                        + "     return_item_count  BIGINT,       "
                        + "     click_item_count   BIGINT        "
                        + " ) WITH (                             "
                        + "    'connector' = 'doris',            "
                        + "    'fenodes' = 'doitedu:8030',       "
                        + "    'table.identifier' = 'dws.search_ana_agg',  "
                        + "    'username' = 'root',                "
                        + "    'password' = 'root',                "
                        + "    'sink.label-prefix' = 'doris_tl" + System.currentTimeMillis()+"')");

        // 将结果写入doris
        tenv.createTemporaryView("res",resultStream);


        tenv.executeSql(
                " INSERT INTO  search_ana_agg_doris	    "+
                        " SELECT                                "+
                        "     user_id,                          "+
                        " 	search_id,                          "+
                        " 	keyword,                            "+
                        " 	split_words,                        "+
                        " 	similar_word,                       "+
                        " 	search_start_time as search_time,   "+
                        " 	res_cnt as return_item_count,       "+
                        " 	click_cnt as click_item_count       "+
                        " FROM res                              "
        );



        env.execute();

    }


}
