//package com.yuntsg.paperana.utils.esutils;
//
//import co.elastic.clients.elasticsearch.ElasticsearchClient;
//import co.elastic.clients.elasticsearch._types.query_dsl.*;
//import co.elastic.clients.elasticsearch.core.SearchResponse;
//import co.elastic.clients.elasticsearch.core.search.Hit;
//import co.elastic.clients.elasticsearch.indices.CreateIndexResponse;
//import lombok.SneakyThrows;
//
//import java.util.ArrayList;
//import java.util.Arrays;
//import java.util.List;
//import java.util.Map;
//
///**
// * @Author: gcr
// * @Date: 2023/9/19 16:54
// */
//public class EsUtil {
//    @SneakyThrows
//    public static void main(String[] args) {
//        // 创建索引主要用于多文本临时比对
////        createIndex("text_str_prod_test");
////        ElasticsearchClient client = EsClient.client();
//        Integer fid = 2;
//        Integer uid = 2;
//        String str = "adadad";
//        BoolQuery.Builder allQueryBuilder = new BoolQuery.Builder();
//        Query fidsQuery = TermQuery.of(t -> t.field("file_id").value(fid))._toQuery();
//        Query tidsQuery = TermQuery.of(t -> t.field("table_id").value(uid))._toQuery();
//        List<Query> ls = new ArrayList<>();
//        ls.add(fidsQuery);
//        ls.add(tidsQuery);
//        allQueryBuilder.must(ls);
//        Query query = MatchQuery.of(t -> t.field("str").query(str).minimumShouldMatch("45%"))._toQuery();
//        allQueryBuilder.must(query);
//
//        SearchResponse<EsServerEntity> response = client.search(s -> s
//                        .index(IndexConfig.UNIT_INDEXTEST)
//                        .fields(Arrays.asList(
//                                FieldAndFormat.of(builder -> builder.field("file_id")),
//                                FieldAndFormat.of(builder -> builder.field("sentence")),
//                                FieldAndFormat.of(builder -> builder.field("database_type")),
//                                FieldAndFormat.of(builder -> builder.field("str")),
//                                FieldAndFormat.of(builder -> builder.field("sub_project")),
//                                FieldAndFormat.of(builder -> builder.field("table_id"))))
//                        .from(0)
//                        .size(5)
//                        .highlight(h -> h.preTags("<span style=\"color:red\">").postTags("</span>").fields("str", highlightFieldBuilder -> highlightFieldBuilder))
//                        .query(allQueryBuilder.build()._toQuery()),
//                EsServerEntity.class
//        );
//        List<Hit<EsServerEntity>> hits = response.hits().hits();
//        for (Hit<EsServerEntity> hit : hits) {
//            EsServerEntity source = hit.source();
//            Map<String, List<String>> highlight = hit.highlight();
//            StringBuffer news = new StringBuffer();
//            highlight.forEach((k, v) -> {
//                for (String s : v) {
//                    news.append(s+" ");
//                }
//            });
//            source.setESid(hit.id());
//            source.setStrMatchRed(news.toString());
//        }
//
//
//
//    }
//
//
//    // 创建es 新的索引
//    @SneakyThrows
//    public static void createIndex(String index) {
//        // UNIT_INDEXTE
//        ElasticsearchClient client = EsClient.client();
//        CreateIndexResponse createIndexResponse =
//                client.indices().create(c -> c.index(index)//库名表
//                        /*这个是Index名字,可以直接字符串*/
//                        .settings(indexSettingsBuilder -> indexSettingsBuilder.numberOfReplicas("0").numberOfShards("3"))
//                        .mappings(o -> o
//                                //设置字段名称和类型 当前主要是分词就够用了
//                                .properties("file_id", i -> i.text(p -> p))
//                                .properties("sentence", i -> i.text(p -> p))
//                                .properties("database_type", i -> i.text(p -> p))
//                                .properties("sub_project", i -> i.text(p -> p))
//                                .properties("table_id", i -> i.text(p -> p))
//                                .properties("str", i -> i.text(p -> p
//                                        .analyzer("ik_max_word")
//                                        .searchAnalyzer("ik_max_word")
//                                        .fields("keyword", r -> r.keyword(m -> m.normalizer("lowercase")))))
//                        ));
//    }
//}
//
//
//
//
