package cn.doitedu.rtdw.dataetl;

import cn.doitedu.rtdw.beans.SearchAggBean;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.util.EntityUtils;

public class JOB05_搜索行为分析olap支撑 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.enableCheckpointing(5000);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 建kafka明细宽表的逻辑映射表

        // 建表，映射 kafka中 dwd层 行为日志明细宽表
        tenv.executeSql(
                " create table dwd_events_kafka (                       "+
                        "     release_channel string                             "+
                        "     ,device_type string                                 "+
                        "     ,session_id  string                                 "+
                        "     ,lat         double                                 "+
                        "     ,lng         double                                 "+
                        "     ,username    string                                 "+
                        "     ,event_id     string                                "+
                        "     ,action_time  bigint                                "+
                        "     ,properties  map<string,string>                     "+
                        "     ,user_id  bigint                                    "+
                        " 	  ,member_level_id bigint                             "+
                        "     ,password string                                    "+
                        "     ,nickname string                                    "+
                        "     ,phone string                                       "+
                        "     ,status int                                         "+
                        "     ,create_time timestamp(3)                           "+
                        "     ,icon string                                        "+
                        "     ,gender int                                         "+
                        "     ,birthday date                                      "+
                        "     ,register_city string                               "+
                        "     ,job string                                         "+
                        "     ,personalized_signature string                      "+
                        "     ,source_type int                                    "+
                        "     ,integration int                                    "+
                        "     ,growth int                                         "+
                        "     ,lucky_count int                                    "+
                        "     ,history_integration int                            "+
                        "     ,modify_time timestamp(3)                           "+
                        "     ,province string                                    "+
                        "     ,city string                                        "+
                        "     ,region string                                      "+
                        "     ,page_type string                                   "+
                        "     ,page_service string                                "+
                        "     ,page_lanmu string                                  "+
                        "     ,page_pindao string                                 "+
                        "     ,pt AS proctime()                                   "+   // 处理时间语义
                        "     ,rt AS to_timestamp_ltz(action_time,3)              "+
                        "     ,watermark for rt as rt - interval '0' second       "+   // 事件时间语义
                        " ) with (                                                "+
                        "     'connector' = 'kafka',                              "+
                        "     'topic' = 'dwd-events',                             "+
                        "     'properties.bootstrap.servers' = 'doitedu:9092',    "+
                        "     'properties.group.id' = 'goo2',                     "+
                        "     'scan.startup.mode' = 'latest-offset',              "+
                        "     'value.format'='json',                              "+
                        "     'value.json.fail-on-missing-field'='false',         "+
                        "     'value.fields-include' = 'EXCEPT_KEY'               "+
                        " )                                                       "
        );


        // 先写sql，按照设计的模型进行聚合：粒度就是一次搜索行为的数据聚合成一行
        Table table = tenv.sqlQuery(
                "WITH tmp AS (                                                           " +
                        "SELECT                                                                  " +
                        "   user_id,                                                            " +
                        "	event_id,                                                            " +
                        "	action_time,                                                         " +
                        "	properties['search_id'] as search_id,                                " +
                        "	properties['keyword'] as keyword,                                    " +
                        "	cast(properties['res_cnt'] as bigint) as res_cnt,                    " +
                        "	rt                                                                   " +
                        "FROM dwd_events_kafka                                                   " +
                        "WHERE event_id in ('search','search_return','search_click')             " +
                        ")                                                                       " +
                        "                                                                        " +
                        "select                                                                  " +
                        "    user_id,                                                            " +
                        "	 keyword,                                                            " +
                        "	 search_id,                                                          " +
                        "	 min(action_time) as search_time,                                    " +
                        "	 max(res_cnt) as res_cnt,                                            " +
                        "	 count(1) filter(where event_id = 'search_click')  as click_cnt,     " +
                        "	 '' as  similar_word,                                                " +
                        "	 '' as  split_words                                                  " +
                        "from TABLE(                                                             " +
                        "    TUMBLE(TABLE tmp,DESCRIPTOR(rt),INTERVAL '1' MINUTE)                " +
                        ")                                                                       " +
                        "group by                                                                " +
                        "   window_start,                                                        " +
                        "   window_end,                                                          " +
                        "   user_id,                                                             " +
                        "	keyword,                                                             " +
                        "	search_id                                                            "
        );


        // 表转流
        DataStream<SearchAggBean> dataStream = tenv.toDataStream(table, SearchAggBean.class);

        SingleOutputStreamOperator<SearchAggBean> resultStream = dataStream.process(new ProcessFunction<SearchAggBean, SearchAggBean>() {


            CloseableHttpClient client;
            HttpPost post;

            @Override
            public void open(Configuration parameters) throws Exception {
                // 构造http客户端
                client = HttpClientBuilder.create().build();
                post = new HttpPost("http://192.168.77.88:8081/api/post/simwords");
                post.setHeader("Content-Type", "application/json");

            }

            @Override
            public void processElement(SearchAggBean inputBean, ProcessFunction<SearchAggBean, SearchAggBean>.Context ctx, Collector<SearchAggBean> out) throws Exception {

                String keyword = inputBean.getKeyword();
                post.setEntity(new StringEntity("{\"origin\": \"" + keyword + "\" }", "utf-8"));

                // 请求http接口，获取搜索词的“近义词”和分词
                CloseableHttpResponse response = client.execute(post);
                HttpEntity entity = response.getEntity();
                String resJson = EntityUtils.toString(entity);

                // json解析响应结果
                JSONObject jsonObject = JSON.parseObject(resJson);
                String splitWords = jsonObject.getString("words");
                String similarWord = jsonObject.getString("similarWord");

                // 补全数据
                inputBean.setSplit_words(splitWords);
                inputBean.setSimilar_word(similarWord);

                // 输出补全后的数据
                out.collect(inputBean);
            }
        });


        // 将结果流，再转成表
        tenv.createTemporaryView("res",resultStream);

        // 创建doris逻辑映射表
        tenv.executeSql(
                " create table search_agg_doris(                   "+
                        "   user_id    bigint,                               "+
                        " 	keyword varchar(100),                           "+
                        " 	similar_word varchar(100),                      "+
                        " 	split_words varchar(100),                       "+
                        " 	search_id varchar(20),                          "+
                        " 	search_time bigint  ,                           "+
                        " 	res_cnt bigint ,                                "+
                        " 	click_cnt bigint                                "+
                        " )  WITH (                                         "+
                        "        'connector' = 'doris',                     "+
                        "        'fenodes' = 'doitedu:8030',                "+
                        "        'table.identifier' = 'dws.search_agg',     "+
                        "        'username' = 'root',                       "+
                        "        'password' = 'root',                       "+
                        "        'sink.label-prefix' = 'doris_label_988'    "+
                        " );                                                "
        );


        // 插入
        tenv.executeSql("insert into search_agg_doris select * from res");






        env.execute();

    }
}
