package com.es;

import com.baomidou.mybatisplus.core.toolkit.Sequence;
import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.dictionary.py.Pinyin;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.elasticsearch7.ElasticsearchSink;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition;
import org.apache.http.HttpHost;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.reindex.BulkByScrollResponse;
import org.elasticsearch.index.reindex.UpdateByQueryRequest;
import org.elasticsearch.script.Script;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.util.*;

/**
 * 从kafka消费数据
 */
public class HotelSuggestJob {

    private static final long checkpointInterval = 30000;

    /**
     * topic name
     */
    private static final String topicName = "hotel_suggest";

    /**
     * kafka server
     */
    private static final String bootstrapServer = "192.168.204.117:9092";

    /**
     * group name
     */
    private static final String groupName = "HotelSuggest";

    /**
     * es配置
     */
    private static final String esHost = "192.168.219.115";
    private static final Integer esPort = 9200;

    /**
     * job的名字
     */
    private static final String esJobName = "HotelSuggestJob";

    /**
     * es索引名称
     */
    private static final String esIndexName = "t_hotel_suggest";

    private static final Logger log = LoggerFactory.getLogger(HotelSuggestJob.class);

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointInterval(checkpointInterval);
        env.setStateBackend(new FsStateBackend("file:///opt/flink-1.12.2/checkpoints"));

        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupName);
        properties.put(ConsumerConfig.CLIENT_ID_CONFIG, "cns-consumer-" + UUID.randomUUID().toString());
        FlinkKafkaConsumer consumer = new FlinkKafkaConsumer(topicName, new SimpleStringSchema(), properties);

        // 从零分区开始
        Map<KafkaTopicPartition, Long> topicMap = new HashMap<>();
        topicMap.put(new KafkaTopicPartition(topicName, 0), 0L);
        topicMap.put(new KafkaTopicPartition(topicName, 1), 0L);
        topicMap.put(new KafkaTopicPartition(topicName, 2), 0L);
        consumer.setStartFromSpecificOffsets(topicMap);

        DataStreamSource source = env.addSource(consumer);

        ElasticsearchSink<String> esSink = new ElasticsearchSink.Builder<String>(Arrays.asList(new HttpHost(esHost, esPort)), new ElasticsearchSinkFunction<String>() {

            private Sequence sequence;

            private RestHighLevelClient client;

            @Override
            public void open() throws Exception {
                sequence = new Sequence();
                client = new RestHighLevelClient(RestClient.builder(new HttpHost(esHost, esPort)));
            }

            @Override
            public void process(String item, RuntimeContext runtimeContext, RequestIndexer requestIndexer) {
                UpdateByQueryRequest request = new UpdateByQueryRequest(esIndexName);
                request.setQuery(QueryBuilders.termQuery("chinese", item));
                request.setScript(new Script("def weight = ctx._source['chinese'].weight + 1;ctx._source['chinese'].weight = weight;ctx._source['full_pinyin'].weight = weight;ctx._source['head_pinyin'].weight = weight;"));
                try {
                    BulkByScrollResponse response = client.updateByQuery(request, RequestOptions.DEFAULT);
                    long updated = response.getUpdated();
                    if (updated == 0) {
                        // 没有找到，需要插入
                        requestIndexer.add(createRequest(item));
                    }
                } catch (IOException e) {
                    log.error(e.getMessage(), e);
                }
            }

            public IndexRequest createRequest(String text) {
                IndexRequest indexRequest = new IndexRequest(esIndexName);
                Long id = sequence.nextId();
                Map<String, Object> source = processBy(id, text, 1);
                indexRequest.id(String.valueOf(id)).source(source);
                return indexRequest;
            }

            public Map<String, Object> processBy(long id, String text, int weight) {
                Map<String, Object> map = new HashMap<>();
                // 利用hanlp对中文进行处理
                List<Pinyin> pinyinList = HanLP.convertToPinyinList(text);
                // 提示词全拼
                StringBuffer fullPinyinBuffer = new StringBuffer();
                // 提示词拼音首字母
                StringBuffer headPinyinBuffer = new StringBuffer();
                pinyinList.stream().forEach(item -> {
                    fullPinyinBuffer.append(item.getPinyinWithoutTone());
                    headPinyinBuffer.append(item.getHead());
                });
                map.put("id", id);
                Map<String, Object> chineseMap = new HashMap<>();
                chineseMap.put("input", text);
                chineseMap.put("weight", weight);
                map.put("chinese", chineseMap);

                Map<String, Object> fullPinyinMap = new HashMap<>();
                fullPinyinMap.put("input", fullPinyinBuffer.toString());
                fullPinyinMap.put("weight", weight);
                map.put("full_pinyin", fullPinyinMap);

                Map<String, Object> headPinyin = new HashMap<>();
                headPinyin.put("input", headPinyinBuffer.toString());
                headPinyin.put("weight", weight);
                map.put("head_pinyin", headPinyin);
                return map;
            }

            @Override
            public void close() throws Exception {
                client.close();
            }
        }).build();

        source.addSink(esSink);

        env.execute(esJobName);
    }
}
