package com.booter.ws.es;

import cn.hutool.core.lang.Snowflake;
import cn.hutool.core.util.IdUtil;
import cn.hutool.core.util.RandomUtil;
import co.elastic.clients.elasticsearch.ElasticsearchClient;
import co.elastic.clients.elasticsearch._types.FieldValue;
import co.elastic.clients.elasticsearch._types.Refresh;
import co.elastic.clients.elasticsearch._types.Result;
import co.elastic.clients.elasticsearch._types.SortOrder;
import co.elastic.clients.elasticsearch._types.SuggestMode;
import co.elastic.clients.elasticsearch._types.query_dsl.Query;
import co.elastic.clients.elasticsearch._types.query_dsl.RangeQuery;
import co.elastic.clients.elasticsearch.core.BulkRequest;
import co.elastic.clients.elasticsearch.core.BulkResponse;
import co.elastic.clients.elasticsearch.core.IndexRequest;
import co.elastic.clients.elasticsearch.core.IndexResponse;
import co.elastic.clients.elasticsearch.core.SearchRequest;
import co.elastic.clients.elasticsearch.core.SearchResponse;
import co.elastic.clients.elasticsearch.core.bulk.BulkResponseItem;
import co.elastic.clients.elasticsearch.core.search.CompletionSuggestOption;
import co.elastic.clients.elasticsearch.core.search.CompletionSuggester;
import co.elastic.clients.elasticsearch.core.search.DirectGenerator;
import co.elastic.clients.elasticsearch.core.search.Hit;
import co.elastic.clients.elasticsearch.core.search.PhraseSuggestOption;
import co.elastic.clients.elasticsearch.core.search.PhraseSuggester;
import co.elastic.clients.elasticsearch.core.search.SuggestSort;
import co.elastic.clients.elasticsearch.core.search.Suggester;
import co.elastic.clients.elasticsearch.core.search.Suggestion;
import co.elastic.clients.elasticsearch.core.search.TermSuggestOption;
import co.elastic.clients.elasticsearch.core.search.TermSuggester;
import co.elastic.clients.elasticsearch.indices.CreateIndexResponse;
import co.elastic.clients.elasticsearch.indices.GetIndexResponse;
import co.elastic.clients.json.JsonData;
import co.elastic.clients.util.ObjectBuilder;
import com.booter.ws.es.model.Accounts;
import com.booter.ws.es.model.BaseEsModel;
import com.google.common.collect.ImmutableList;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;

import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import java.io.IOException;
import java.io.StringReader;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;

@Slf4j
@Component
public class EsRestClient {

    @Resource
    private ElasticsearchClient client;

    private Snowflake snowflake;

    public static void main(String[] args) {
        String input = "{\n" +
                "  \"settings\": {\n" +
                "    \"number_of_shards\": 2,\n" +
                "    \"number_of_replicas\": 3\n" +
                "  },\n" +
                "  \"mappings\": {\n" +
                "    \"properties\": {\n" +
                "      \"title\":{\n" +
                "        \"type\": \"text\",\n" +
                "        \"analyzer\": \"ik_max_word\"\n" +
                "      },\n" +
                "      \"publish\":{\n" +
                "        \"type\": \"keyword\"\n" +
                "      },\n" +
                "      \"date\":{\n" +
                "        \"type\": \"date\",\n" +
                "        \"format\": \"yyyy-MM-dd\"\n" +
                "      }\n" +
                "    }\n" +
                "  },\n" +
                "  \"aliases\": {\n" +
                "    \"my_blog\": {\n" +
                "      \"is_write_index\": false\n" +
                "    }\n" +
                "  }\n" +
                "}";

        System.out.println(input);
    }

    @PostConstruct
    public void init() {
        snowflake = IdUtil.getSnowflake(RandomUtil.randomLong(0, 31), RandomUtil.randomLong(0, 31));
    }

    /**
     * @param idx
     * @param properties {"settings":{"number_of_shards":2,"number_of_replicas":3},"mappings":{"properties":{"title":{"type":"text","analyzer":"ik_max_word"},"publish":{"type":"keyword"},"date":{"type":"date","format":"yyyy-MM-dd"}}},"aliases":{"my_blog":{"is_write_index":false}}}
     * @return
     */
    public String createIndex(String idx, String properties) {
//        CreateIndexResponse response = client.indices().create(builder -> {
//            builder.index(idx)
//                    .mappings(m -> m
//                            .properties("title", t -> t.text(tt -> tt.analyzer("ik_max_word")))
//                            .properties("publish", p -> p.keyword(pp -> pp.index(true)))
//                            .properties("date", d -> d.date(dd -> dd.format("yyyy-MM-dd")))
//                    )
//                    .settings(s -> s
//                            .numberOfShards("1")
//                            .numberOfReplicas("1")
//                    );
//            return builder;
//        });
        StringReader input = new StringReader(properties);
        try {
            CreateIndexResponse response = client.indices().create(i -> i.index(idx).withJson(input));
            log.info("创建索引:{}", response.acknowledged());
            return response.index();
        } catch (Exception ex) {
            throw new RuntimeException(ex);
        }
    }

    public boolean exist(String idx) {
        try {
            return client.indices().exists(t -> t.index(idx)).value();
        } catch (Exception ex) {
            throw new RuntimeException(ex);
        }
    }

    public boolean deleteIndex(String idx) {
        try {
            if (!exist(idx)) {
                return false;
            }
            return client.indices().delete(i -> i.index(idx)).acknowledged();
        } catch (Exception ex) {
            throw new RuntimeException(ex);
        }
    }

    public Map<String, Object> detail(String idx) {
        try {
            if (!exist(idx)) {
                return Collections.emptyMap();
            }
            GetIndexResponse response = client.indices().get(builder -> builder.index(ImmutableList.of(idx)));
            Map<String, Object> map = new HashMap<>();
            map.put("mapping", response.result().get(idx).mappings().properties());
            map.put("setting", response.result().get(idx).settings());
            map.put("default", response.result().get(idx).defaults());
            return map;
        } catch (Exception ex) {
            throw new RuntimeException(ex);
        }
    }

    public String rebuild(String idx, String properties) {
        try {
            deleteIndex(idx);
            return createIndex(idx, properties);
        } catch (Exception ex) {
            throw new RuntimeException(ex);
        }
    }

    public <T extends BaseEsModel> Long create(String idx, T model) {
        model.setId(snowflake.nextId());
        LocalDateTime now = LocalDateTime.now();
        model.setCreateTime(now.atOffset(ZoneOffset.of("+8")).toInstant().toEpochMilli());
        model.setDataTime(Integer.valueOf(now.format(DateTimeFormatter.ofPattern("yyyyMMddHH"))));
        model.setUpdateTime(model.getUpdateTime());
        IndexRequest<T> request = IndexRequest.of(b -> b
                .index(idx)
                .id(model.getId().toString())
                .document(model)
                .refresh(Refresh.True));  // Make it visible for search
        try {
            IndexResponse response = client.index(request);
            Result result = response.result();
            log.info("result,{}", result);
            return model.getId();
        } catch (Exception ex) {
            log.error("保存数据异常,{}", idx, ex);
            throw new RuntimeException("", ex);
        }
    }

    public <T extends BaseEsModel> void batchCreate(String index, List<T> list) {
        BulkRequest.Builder br = new BulkRequest.Builder();
        for (T t : list) {
            br.operations(op -> op.index(idx -> idx.index(index).id(t.getId() + "").document(t)));
        }
        try {
            BulkResponse result = client.bulk(br.build());
            if (result.errors()) {
                for (BulkResponseItem item : result.items()) {
                    if (item.error() != null) {
                        log.error("批量插入文档异常{},{}", item.get(), item.error().reason());
                    }
                }
            }
        } catch (Exception ex) {
            throw new RuntimeException(ex);
        }
    }


    /**
     * (1)
     * SearchRequest request = SearchRequest.of(r -> r
     * .index(elasticConfig.getSuggestionIndex())
     * .size(elasticConfig.getSearchHitSize())
     * .timeout(String.valueOf(String.format("%sms", elasticConfig.getConnectTimeout())))
     * .suggest(x -> x.text(query).suggesters(suggestersMap))
     * (2)
     * Map<String, FieldSuggester> suggestersMap = new HashMap<>();
     * suggestersMap.put(field + ConstantsHelper.COMPLETION_STANDARD,
     * CompletionSuggester.of(suggester -> suggester
     * .field(field + ConstantsHelper.COMPLETION_STANDARD)
     * .prefix(query)
     * .skipDuplicates(true)
     * .size(ConstantsHelper.FIFTEEN_LITERAL))
     * ._toFieldSuggester());
     * <p>
     * SearchRequest request = SearchRequest.of(r -> r
     * .index(elasticConfig.getSuggestionIndex())
     * .size(elasticConfig.getSearchHitSize())
     * .timeout(String.valueOf(String.format("%sms", elasticConfig.getConnectTimeout())))
     * .suggest(x -> x.text(query).suggesters(suggestersMap))
     * );
     * <p>
     * (3)
     * var suggest = Suggester.of(builder -> {
     * builder.text(request.prefix);
     * for (String field : request.fields) {
     * builder.suggesters(field, s -> s.completion(c -> c.field(field).skipDuplicates(true).size(request.limit)));
     * }
     * return builder;
     * });
     * var response = client.search(builder -> builder.index(index).suggest(suggest).source(s -> s.fetch(false)), documentClass);
     *
     * @param idx
     * @param value
     * @return
     * @throws IOException
     */
    public List<? extends BaseEsModel> completionSuggesterSearch(String idx, String value) throws IOException {
        //  CompletionSuggester
        CompletionSuggester suggester = CompletionSuggester.of(builder -> {
            builder.skipDuplicates(true).size(10);
            builder.field("address");
            builder.analyzer("ik_max_word");
            return builder;
        });

        Suggester of = Suggester.of(builder -> {
            builder.text(value).suggesters("title_suggest", b -> {
                        b.completion(suggester);
                        return b;
                    }
            );
            return builder;
        });

        SearchRequest searchRequest = SearchRequest.of(s -> s
                .index(idx).suggest(of)
        );
        SearchResponse<Accounts> search = client.search(searchRequest, Accounts.class);
        return search.hits().hits().stream().map(Hit::source).collect(Collectors.toList());
    }

    public List<? extends BaseEsModel> search(String idx, String value) throws IOException {
        SearchRequest searchRequest = SearchRequest.of(s -> s
                .index(idx)
                .query(q -> q.bool(b -> b.must(m -> m.term(t -> t.field("firstname").value(FieldValue.of(value)))))

                ));
        SearchResponse<Accounts> search = client.search(searchRequest, Accounts.class);
        return search.hits().hits().stream().map(Hit::source).collect(Collectors.toList());
    }


    public List<? extends BaseEsModel> completionSuggesterSearch(String idx, String field, String value) throws IOException {
        SearchResponse<Accounts> search = client.search(request -> request.
                index(idx).
                suggest(suggest -> suggest.suggesters("completion_suggest", builder -> builder.text(value)
                                .completion(v -> v.field(field)
                                        .size(10)//size参数指定返回建议数（默认为5）
                                        .skipDuplicates(true)  //该参数设置为true的话，可能会降低搜索速度，因为需要访问更多的建议结果项，才能过滤出来前N个
                                        .analyzer("ik_max_word")
                                        //fuzzy参数设置模糊建议,以对拼写进行容错,在搜索阶段提升相关性
                                        .fuzzy(fb -> {
                                            fb.minLength(2);//指定什么长度的输入文本可以开启模糊查询。
                                            fb.prefixLength(2);//假设若干开始的字符是正确的（比如block，如果输入blaw，该字段也认为之前输入的是对的），这样可以通过牺牲灵活性提升性能
                                            fb.fuzziness("2");//指定所允许的最大编辑距离。
                                            return fb;
                                        })
                                )
                        )
                        //source 为了减少不必要的响应,返回指定的字段，提高性能
                ).source(sc -> {
                    sc.filter(sfb -> {
                        sfb.includes(ImmutableList.of("email", "address"));
                        return sfb;
                    });
                    return sc;
                }), Accounts.class);
        List<Suggestion<Accounts>> title_suggest = search.suggest().get("completion_suggest");
        List<Accounts> list = new ArrayList<>();
        for (Suggestion<Accounts> a : title_suggest) {
            list.addAll(a.completion().options().stream().map(CompletionSuggestOption::source).collect(Collectors.toList()));
        }
        return list;
    }


    /**
     * 1、term suggester 应用于text类型的字段搜索提示，纠错补全，输入错误的情况下补全正确的单词
     * 2、当无法搜索到结果时（missing），返回建议的词
     * 3、term suggester首先将输入文本经过分析器（分析结果由于采用的分析器不同而有所不同）分析，处理为单个词条，然后根据单个词条去提供建议，并不会考虑多个词条之间的关系。然后将每个词条的建议结果（有或没有）封装到options列表中。最后由推荐器统一返回。term suggester定位的是term，而不是doc，主要是纠错。
     *
     * @param idx
     * @param field
     * @param value
     * @return
     * @throws IOException
     */
    public List<String> termSuggesterSearch(String idx, String field, String value) throws IOException {
        TermSuggester suggester = TermSuggester.of(builder -> {
            builder.size(10);
            builder.sort(SuggestSort.Frequency);//1. score：先按评分排序，再按文档频率、term顺序排；2. frequency：先按文档频率排序，再按评分、term顺序排
            builder.field(field);//要查询的字段,输入文本（用户的输入），根据此文本查找建议
            builder.analyzer("ik_max_word")//对field进行分词，默认是与field设置的分词器一致
                    .suggestMode(SuggestMode.Missing);
//            建议模式（控制提供建议词的方式）：
//            1. missing：默认方式，仅在‘要搜索词项’不在索引中存在时，才提供建议词；
//            2. popular：仅提供频率比‘要搜索词项’高的建议词；
//            3. always：总是提供建议词；
            builder.maxEdits(1);//suggestions 的最大编辑距离。只能是介于1和2之间的值，任何其他值都会导致抛出错误的请求错误。 默认为2
            //  prefix_length 为了成为候选 suggestions 所必须匹配的最小前缀字符的数量。 默认值为1。增加此数字可提高拼写检查性能。 通常拼写错误不会出现在术语的开头。(Old name "prefix_len" is deprecated)
//            builder.minWordLength();suggest 查询文本必须包含的最小长度。 默认值为4（Old name "min_word_len" is deprecated）
            return builder;
        });

        Suggester of = Suggester.of(builder -> {
            builder.suggesters("term-suggestion", b -> {
                        b.term(suggester).text(value);
                        return b;
                    }
            );
            return builder;
        });

        SearchResponse<String> search = client.search(request -> request.
                index(idx).
                suggest(of).source(sc -> {
                    sc.filter(sfb -> {
                        // sfb.includes(ImmutableList.of("email", "address"));
                        return sfb;
                    });
                    return sc;
                }), String.class);
        List<Suggestion<String>> title_suggest = search.suggest().get("term-suggestion");
        List<String> list = new ArrayList<>();
        for (Suggestion<String> a : title_suggest) {
            list.addAll(a.term().options().stream().map(TermSuggestOption::text).collect(Collectors.toList()));
        }
        return list;
    }

    /**
     * 1、应用于text类型字段，自动纠错补全短语，输入一个单词纠错补全整个短语
     * 2、在Term suggester的基础上添加额外的逻辑以选择整个经校正的短语，不是基于ngram-language模型加权的单个token
     *
     * @param idx
     * @param field
     * @param value
     * @return
     * @throws IOException
     */
    public List<String> phraseSuggesterSearch(String idx, String field, String value) throws IOException {

        PhraseSuggester suggester = PhraseSuggester.of(pb -> {
            //此处不能设置text属性,[es/search] failed: [parsing_exception] suggester[phrase] doesn't support field [text]
            pb.field(field);
            pb.maxErrors(2.0).directGenerator(ImmutableList.of(DirectGenerator.of(builder -> {
                builder.field(field).suggestMode(SuggestMode.Always);
                return builder;
            })));
            pb.highlight(builder -> {
                builder.postTag("</em>").preTag("<em>");
                return builder;
            });

//            Suggeset Mode ： missing,popular ,always
//            Max Errors: 最多可以拼错的 Terms 数
//            Condfidence ： 限制返回结果数，默认为 1

            pb.smoothing(builder -> {
                builder.laplace(lsb -> {
                    lsb.alpha(0.7);
                    return lsb;
                });

                builder.stupidBackoff(sob -> {
                    sob.discount(1.0);
                    return sob;
                });//退避模型，如果高阶计数为0，则退回到低阶n-gram模型，并将低阶n-gram模型以恒定因子折现。默认折扣为0.4。傻瓜式的退避是默认模型

                return builder;
            });
            return pb;
        });

        Suggester of = Suggester.of(builder -> {
            builder.suggesters("phrase-suggestion", b -> {
                        b.phrase(suggester).text(value);
                        return b;
                    }
            );
            return builder;
        });

        SearchResponse<String> response = client.search(request -> request.
                index(idx).
                suggest(of).source(sc -> {
                    sc.filter(sfb -> {
                        // sfb.includes(ImmutableList.of("email", "address"));
                        return sfb;
                    });
                    return sc;
                }), String.class);


        List<Suggestion<String>> suggestions = response.suggest().get("phrase-suggestion");
        List<String> list = new ArrayList<>();
        for (Suggestion<String> a : suggestions) {
            //s.text()
            list.addAll(a.phrase().options().stream().map(PhraseSuggestOption::highlighted).collect(Collectors.toList()));
        }
        return list;
    }


    public void contextSuggester(String idx, String field, String value) throws IOException {
        SearchResponse<Accounts> response = client.search(request -> request.
                index(idx).
                suggest(suggest -> suggest.suggesters("context_suggest", builder -> builder.text(value)
                                        .completion(v -> v.field(field)
                                                        .size(10)//size参数指定返回建议数（默认为5）
                                                        .skipDuplicates(true)  //该参数设置为true的话，可能会降低搜索速度，因为需要访问更多的建议结果项，才能过滤出来前N个
                                                        .analyzer("ik_max_word")
                                                        //fuzzy参数设置模糊建议,以对拼写进行容错,在搜索阶段提升相关性
                                                        .fuzzy(fb -> {
                                                            fb.minLength(2);//指定什么长度的输入文本可以开启模糊查询。
                                                            fb.prefixLength(2);//假设若干开始的字符是正确的（比如block，如果输入blaw，该字段也认为之前输入的是对的），这样可以通过牺牲灵活性提升性能
                                                            fb.fuzziness("2");//指定所允许的最大编辑距离。
                                                            return fb;
                                                        })
//                                        .contexts()
                                        )
                        )
                        //source 为了减少不必要的响应,返回指定的字段，提高性能
                ).source(sc -> {
                    sc.filter(sfb -> {
                        sfb.includes(ImmutableList.of("email", "address"));
                        return sfb;
                    });
                    return sc;
                }), Accounts.class);
        List<Suggestion<Accounts>> title_suggest = response.suggest().get("context_suggest");
        List<Accounts> list = new ArrayList<>();
        for (Suggestion<Accounts> a : title_suggest) {
            list.addAll(a.completion().options().stream().map(CompletionSuggestOption::source).collect(Collectors.toList()));
        }
    }

    /**
     * 分页排序查询
     */
    public List<Accounts> page(String idx, int page, int size) throws IOException {
        SearchResponse<Accounts> search = client.search(s -> s
                        .index(idx).from(page).size(size)
                        .query(new Function<Query.Builder, ObjectBuilder<Query>>() {
                            @Override
                            public ObjectBuilder<Query> apply(Query.Builder qb) {
//                                qb.bool(new Function<BoolQuery.Builder, ObjectBuilder<BoolQuery>>() {
//                                    @Override
//                                    public ObjectBuilder<BoolQuery> apply(BoolQuery.Builder bb) {
//                                        return bb;
//                                    }
//                                });
                                //范围
                                qb.range(new Function<RangeQuery.Builder, ObjectBuilder<RangeQuery>>() {
                                    @Override
                                    public ObjectBuilder<RangeQuery> apply(RangeQuery.Builder rb) {
                                        rb.field("age").lte(JsonData.of(39)).gte(JsonData.of(38));
                                        return rb;
                                    }
                                });
                                return qb;
                            }
                        })
                        .sort(so -> so // 排序操作项
                                .field(f -> f // 排序字段规则
                                        .field("age")
                                        .order(SortOrder.Desc)
                                )
                        ),
                Accounts.class
        );
        return search.hits().hits().stream().map(Hit::source).collect(Collectors.toList());
    }


}