package per.wxp.elasticsearchDemo.Service;


import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.MultiGetItemResponse;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.*;
import org.elasticsearch.index.reindex.BulkByScrollResponse;
import org.elasticsearch.index.reindex.DeleteByQueryAction;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import per.wxp.elasticsearchDemo.entity.IndexInfo;

import javax.annotation.PostConstruct;
import java.io.IOException;
import java.io.StringReader;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.ExecutionException;


@Service
public class ElasticSearchService {





    @Autowired
    private TransportClient client;


    private BulkProcessor bulkProcessor;

    @PostConstruct
    public void init() {
        this.bulkProcessor = BulkProcessor.builder(client,new BulkProcessor.Listener(){
            @Override
            public void beforeBulk(long l, BulkRequest bulkRequest) {

            }
            @Override
            public void afterBulk(long l, BulkRequest bulkRequest, BulkResponse bulkResponse) {

            }
            @Override
            public void afterBulk(long l, BulkRequest bulkRequest, Throwable throwable) {

            }
        } )
                .setBulkActions(100)
                .setBulkSize(new ByteSizeValue(5, ByteSizeUnit.MB))
                .setFlushInterval(TimeValue.timeValueSeconds(5))
                .setConcurrentRequests(2).build();
    }
/*

--------------------------基本类型操作 index type mapping document	 --------------------------
*/
    /**
     * 新建 index  默认值配置
     *
     * @param indexName
     */
    public void createIndex(String indexName) {
        CreateIndexResponse indexResponse = client.admin().indices().prepareCreate(indexName).get();
        System.out.println("索引名字=" + indexResponse.index() + "---" + indexResponse.isShardsAcked());
    }
    /**
     * 新建 index 、  type 、mapping
     */
    public void createIndexWithType(String indexName, String typeName) throws IOException {
        XContentBuilder mapping = XContentFactory.jsonBuilder()
                .startObject()
                .startObject("properties") //设置之定义字段
                .startObject("author")
                .field("type", "string") //设置数据类型
                .endObject()
                .startObject("title")
                .field("type", "string")
                .endObject()
                .startObject("content")
                .field("type", "string")
                .endObject()
                .startObject("price")
                .field("type", "string")
                .endObject()
                .startObject("view")
                .field("type", "string")
                .endObject()
                .startObject("tag")
                .field("type", "string")
                .endObject()
                .startObject("date")
                .field("type", "date")  //设置Date类型
                .field("format", "yyyy-MM-dd HH:mm:ss") //设置Date的格式
                .endObject()
                .endObject()
                .endObject();
        client.admin().indices().prepareCreate(indexName)
                .addMapping(typeName, mapping)
                .get();
    }

    /**
     * 在存在的index内新建 type
     * The PUT mapping API also allows to add a new type to an existing index:
     */
    public void createTypeWithMapping(String indexName, String typeName) throws IOException {
        XContentBuilder mapping = XContentFactory.jsonBuilder()
                .startObject()
                .startObject("properties") //设置之定义字段
                .startObject("author")
                .field("type", "string") //设置数据类型
                .endObject()
                .startObject("title")
                .field("type", "string")
                .endObject()
                .startObject("content")
                .field("type", "string")
                .endObject()
                .startObject("view")
                .field("type", "string")
                .endObject()
                .startObject("tag")
                .field("type", "string")
                .endObject()
                .startObject("date")
                .field("type", "date")  //设置Date类型
                .field("format", "yyyy-MM-dd HH:mm:ss") //设置Date的格式
                .endObject()
                .endObject()
                .endObject();

        client.admin().indices().preparePutMapping(indexName)
                .setType(typeName)
                .setSource(mapping)
                .get();
    }


    /**
     * index 索引是否存在
     */
    public boolean inExistsRequest(String indexName) {
        IndicesExistsRequest inExistsRequest = new IndicesExistsRequest(indexName);
        IndicesExistsResponse inExistsResponse = client
                .admin()
                .indices()
                .exists(inExistsRequest)
                .actionGet();
        System.out.println("是否存在:" + inExistsResponse.isExists());
        return inExistsResponse.isExists();
    }

    /**
     * 删除 索引
     */
    public boolean deleteIndex(String indexName) {
        IndicesExistsRequest inExistsRequest = new IndicesExistsRequest(indexName);
        IndicesExistsResponse inExistsResponse = client
                .admin()
                .indices()
                .exists(inExistsRequest)
                .actionGet();
        DeleteIndexResponse dResponse = null;
        if (inExistsResponse.isExists()) {
            dResponse = client.admin().indices().prepareDelete(indexName).execute().actionGet();
        }
        System.out.println("是否删除成功:" + dResponse.isAcknowledged());
        return dResponse.isAcknowledged();
    }


/*
------------------------insert Document-------------------------------------------------------
*/

    /**
     * 插入  Document
     */
    public void insertDocument( String indexName, String typeName,String id,XContentBuilder source) throws IOException {
       IndexResponse response = client.prepareIndex(indexName, typeName, id).setSource(source).get();
        System.out.println(response.status());
        System.out.println(response.getId());
    }

/*
------------------------update Document-------------------------------------------------------
*/
    public void updateDocumentById(String indexName,String typeName,String id,XContentBuilder source) {
        UpdateResponse response = client
                .prepareUpdate(indexName, typeName, id)
                .setDoc(source)
                .get();
    }

    public void upsertDocument(String indexName,String typeName,String id,XContentBuilder source) throws IOException, InterruptedException, ExecutionException {
        IndexRequest indexRequest = new IndexRequest(indexName, typeName, id)
                .source(source);
        UpdateRequest updateRequest = new UpdateRequest(indexName, typeName, id)
                .doc(source)
                .upsert(indexRequest);
        client.update(updateRequest).get();
    }

/*
----------------------------del Document-------------------------------------------------------
*/
    /**
     * 通过 id   删除  document
     */
    public void deleteById(IndexInfo info) {
        DeleteResponse response = client.prepareDelete(info.getIndexName(), info.getTypeName(), info.getId())
                .get();
        System.out.println(response.toString());
        System.out.println("状态" + response.status());
    }

    /**
     *  查询条件  删除  document
     *
     */
    public void deleteByQuery() {
        BulkByScrollResponse response =
                DeleteByQueryAction.INSTANCE.newRequestBuilder(client)
                        .filter(QueryBuilders.matchQuery("gender", "male"))
                        .source("persons") //index name
                        .get();
        //number of deleted documents
        long deleted = response.getDeleted();

        System.out.println(response.getStatus());

    }

/*
----------------------------view Document-------------------------------------------------------
*/

    /**
     * 根据  id 查看 Document
     */
    public Map<String, Object> getDocumentById(IndexInfo info) {
        if (info.getId().isEmpty()) {
            return null;
        }
        GetResponse response = client.prepareGet(info.getIndexName(),info.getTypeName(), info.getId())
                .setOperationThreaded(false)    // 线程安全
                .get();
        if (!response.isExists()) {
            System.out.println("查询为空");
            return null;
        }
        return response.getSource();
    }
    /**
     * 查询 index 下所有数据
     */
    public void getAllDocumentByIndex(String indexName) {
        QueryBuilder queryBuilders=QueryBuilders.matchAllQuery();
        SearchResponse response=client.prepareSearch(indexName).setQuery(queryBuilders).get();

        printlnResouce(response);
    }
    /**
     * 查询 type 下所有数据
     */
    public void getAllDocumentByType(String indexName,String type) {
        SearchResponse response=client.prepareSearch(indexName).setTypes(type).get();
        printlnResouce(response);
    }
    /**
     * bool 组合 条件查询
     * @param indexName
     * @param type
     */
    public void queryDocument(String indexName,String type) {
        BoolQueryBuilder query=QueryBuilders.boolQuery();
        query.must(QueryBuilders.matchQuery("title","spark机器学习"));
        query.must(QueryBuilders.matchQuery("author","Nick Pentreath"));
        SearchResponse response=client.prepareSearch(indexName).setTypes(type).setQuery(query).get();
        printlnResouce(response);

    }

    /**
     * 查询方法
     * @param indexName
     * @param type
     * @param queryBuilder
     */
    public void QueryBuilderFuction(String indexName,String type,QueryBuilder queryBuilder){
        SearchResponse response = client.prepareSearch(indexName).setTypes(type).setQuery(queryBuilder).get();
        System.out.println(response.getHits().totalHits);
        printlnResouce(response);
    }

    // Span First
    public void spanFirstQuery(String indexName){
        QueryBuilder queryBuilder =QueryBuilders.spanFirstQuery(
                QueryBuilders.spanTermQuery("title", "spark"),  // Query
                10000                                             // Max查询范围的结束位置
        );
        SearchResponse response = client.prepareSearch(indexName).setQuery(queryBuilder).get();
        System.out.println(response.getHits().totalHits);
        printlnResouce(response);
    }

    /**
     * 查询遍历抽取
     * @param queryBuilder
     */
    private void searchFunction(QueryBuilder queryBuilder) {
        SearchResponse response = client.prepareSearch("twitter")
                .setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
                .setScroll(new TimeValue(60000))
                .setQuery(queryBuilder)
                .setSize(100).execute().actionGet();
        printlnResouce(response);
    }
   /**
     * ---------------------------分词器
     */

    /**
     * AnalyzeRequest 分词器
     * <a href='https://www.elastic.co/guide/cn/elasticsearch/guide/current/standard-tokenizer.html'>
     * @throws Exception
     */

    public void AnalyzeRequest() throws Exception {
        AnalyzeRequest analyzeRequest = new AnalyzeRequest();
        analyzeRequest.text("My œsophagus caused a débâcle");
        /**
         * whitespace （空白字符）分词器按空白字符 —— 空格、tabs、换行符等等进行简单拆分
         * letter 分词器 ，采用另外一种策略，按照任何非字符进行拆分
         * standard 分词器使用 Unicode 文本分割算法
         */
        analyzeRequest.addTokenFilter("standard");
        analyzeRequest.addCharFilter("asciifolding");
        ActionFuture<AnalyzeResponse> analyzeResponseActionFuture =  client.admin().indices().analyze(analyzeRequest);
        List<AnalyzeResponse.AnalyzeToken> analyzeTokens =  analyzeResponseActionFuture.actionGet().getTokens();
        for (AnalyzeResponse.AnalyzeToken analyzeToken : analyzeTokens){
            System.out.println(analyzeToken.getTerm());
        }
    }

    /**
     *------------------------------- bulk 批量执行--------------------------------
     */
    /**
     * 批量插入
     * @param list
     * @throws IOException
     */
    public void insertBulk(List<IndexRequest> list) throws IOException {
        BulkRequestBuilder bulk=client.prepareBulk();
        for (int i = 0; i <list.size(); i++) {
            bulk.add(list.get(i));
        }
        BulkResponse bulkResponse=bulk.get();
        System.out.println(bulkResponse.hasFailures());
    }

    /**
     * Bulk 处理器
     * @param list
     * @throws IOException
     */
    public void insertBulkByBulk(List<IndexRequest> list) throws IOException {

        for (int i = 0; i <list.size(); i++) {
            this.bulkProcessor.add(list.get(i));
        }

    }

    /**
     * 遍历方法
     */
    private void printlnResouce(SearchResponse response){

        System.out.println("total="+response.getHits().totalHits);
        Map<String,Object> map=new HashMap<String,Object>();
        for(SearchHit searchHit:response.getHits()){
            System.out.println(searchHit.toString());
            map=searchHit.getSourceAsMap();
            for (Map.Entry<String, Object> entry : map.entrySet()) {
                System.out.println("Key = " + entry.getKey() + ", Value = " + entry.getValue());
            }
        }
    }

}