package com.elitel.common.elasticSearch;

import com.elitel.frame.main.entity.vo.DocumentESIndexVo;
import net.sf.json.JSONObject;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.bulk.BackoffPolicy;
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.DisMaxQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.reindex.BulkByScrollResponse;
import org.elasticsearch.index.reindex.DeleteByQueryAction;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.transport.client.PreBuiltTransportClient;

import java.io.IOException;
import java.lang.reflect.Field;
import java.net.InetAddress;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;

/**
 * elasticsearch工具类
 * created by guoyanfei on 2018/3/20
 */
public class ESClient {
    /**
     * TransportClient传输客户端
     */
    private TransportClient client = null;

    /**
     * 批量处理器
     */
    private BulkProcessor bulkProcessor = null;

    public ESClient(String address, Integer port, String clustername) {
        try {
            Settings settings = Settings.builder().put("cluster.name", clustername).build();
            //1、初始化客户端
            client = new PreBuiltTransportClient(settings)
                    .addTransportAddress(
                            new TransportAddress(InetAddress.getByName(address), port));
            //2、初始化批量处理器
            bulkProcessor = BulkProcessor.builder(
                    client,
                    new BulkProcessor.Listener() {
                        @Override
                        public void beforeBulk(long l, BulkRequest bulkRequest) {
                            System.out.println("---尝试操作" + bulkRequest.numberOfActions() + "条数据---");
                        }

                        @Override
                        public void afterBulk(long l, BulkRequest bulkRequest, BulkResponse bulkResponse) {
                            System.out.println("---尝试操作" + bulkRequest.numberOfActions() + "条数据成功---");
                        }

                        @Override
                        public void afterBulk(long l, BulkRequest bulkRequest, Throwable throwable) {
                            System.out.println("---尝试操作" + bulkRequest.numberOfActions() + "条数据失败---");
                        }
                    })
                    .setBulkActions(10000)
                    .setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB))
                    .setFlushInterval(TimeValue.timeValueSeconds(3600))
                    .setConcurrentRequests(2)
                    .setBackoffPolicy(BackoffPolicy.exponentialBackoff(TimeValue.timeValueMillis(100), 3))
                    .build();

        } catch (Exception e) {
            e.printStackTrace();
            System.out.println("创建连接客户端失败!");
        }
    }

    /**
     * 关闭客户端连接
     * created by guoyanfei on 2018/03/20
     */
    public void closeClient() {
        try {
            //关闭批量处理器
            bulkProcessor.awaitClose(10, TimeUnit.MINUTES);
            //关闭客户端
            if (null != client) {
                client.close();
            }

        } catch (Exception e) {
            e.printStackTrace();
        }

    }

    /**
     * show 将BulkProcessor的缓冲内容进行立即提交.
     * created by guoyanfei on 2018/03/20
     */
    public void flushBulk() {
        bulkProcessor.flush();
    }



    /**
     * 创建索引，采用ik分词器写入分词
     *
     * @param indexName
     * @param indexType
     * @param documentESIndexVo
     */
    public void createIndexUseIK(String indexName, String indexType, DocumentESIndexVo documentESIndexVo) {
        ArrayList<String> fieldnames = new ArrayList<>();
        String analyzer = "ik_max_word";
        Class cla = documentESIndexVo.getClass();
        Field[] fields = cla.getDeclaredFields();
        for (Field field : fields) {
            field.setAccessible(true);
            fieldnames.add(field.getName());
        }
        client.admin().indices().prepareCreate(indexName).execute().actionGet();

        try {
            XContentBuilder builder = XContentFactory.jsonBuilder()
                    .startObject()
                    .startObject(indexType)
                    .startObject("properties")
                    .startObject(fieldnames.get(2)).field("type", "text").field("analyzer", analyzer).endObject()//filename
                    .startObject(fieldnames.get(6)).field("type", "text").field("analyzer", analyzer).endObject()//modifytime
                    .startObject(fieldnames.get(1)).field("type", "text").field("analyzer", analyzer).endObject()//docSourceDescribe
                    .startObject(fieldnames.get(4)).field("type", "text").field("analyzer", analyzer).endObject()//filecontext
                    .startObject(fieldnames.get(5)).field("type", "long").endObject()//filesize
                    .startObject(fieldnames.get(3)).field("type", "text").field("analyzer", analyzer).endObject()//docpath
                    .startObject(fieldnames.get(0)).field("type", "long").endObject()//docSource
                    .endObject()
                    .endObject()
                    .endObject();

            PutMappingRequest mapping = Requests.putMappingRequest(indexName).type(indexType).source(builder);
            client.admin().indices().putMapping(mapping).get();
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } catch (ExecutionException e) {
            e.printStackTrace();
        }
    }

    /**
     * show 新增一条数据
     * created by guoyanfei on 2018/03/20
     *
     * @param index 索引库名称
     * @param type  索引库类型
     * @param json  Map<String,Object>数据
     */
    public void insertData(String index, String type, Map<String, Object> json) throws Exception {
        IndexResponse indexResponse = client.prepareIndex(index, type)
                .setSource(json).get();
    }

    /**
     * show 批量新增数据,不会马上提交,而是会等待到达bulk设置的阈值后进行提交.<br/>
     * 最后客户端需要调用{@link #flushBulk()}方法.
     * created by guoyanfei on 2018/03/28
     *
     * @param index 索引库名称
     * @param type  索引库类型
     * @param json  Map<String,Object>数据
     */
    public void bulkInsertData(String index, String type, Map<String, Object> json) throws Exception {

        IndexRequest indexRequest = new IndexRequest(index, type)
                .source(json);
        bulkProcessor.add(indexRequest);
    }

    /**
     * 批量更新index数据
     * @param index
     * @param type
     * @param id
     * @param json
     * @throws Exception
     */
    public void bulkUpdateData(String index, String type, String id, Map<String, Object> json) throws Exception {
        IndexRequest indexRequest = new IndexRequest(index, type, id)
                .source(json);
        bulkProcessor.add(indexRequest);
    }

    /**
     * show 通过文档id更新一条数据
     * created by guoyanfei on 2018/03/20
     *
     * @param index 索引库名称
     * @param type  索引库类型
     * @param id    文档ID
     * @param json  Map<String,Object>数据
     */
    public void updateData(String index, String type, String id, Map<String, Object> json) throws Exception {
        UpdateResponse updateResponse = client.prepareUpdate(index, type, id)
                .setDoc(json).get();
    }

    /**
     * show 通过文档id删除一条数据
     * created by guoyanfei on 2018/03/20
     *
     * @param index 索引库名称
     * @param type  索引库类型
     * @param id    文档id
     */
    public void deleteData(String index, String type, String id) throws Exception {
        DeleteResponse deleteResponse = client.prepareDelete(index, type, id)
                .get();
    }

    /**
     * show 批量删除数据,不会马上提交,而是会等待到达bulk设置的阈值后进行提交.<br/>
     * 最后客户端需要调用{@link #flushBulk()}方法.
     * created by guoyanfei on 2018/03/23
     *
     * @param index
     * @param type
     * @param id
     */
    public void bulkDeleteData(String index, String type, String id) throws Exception {
        DeleteRequest deleteRequest = new DeleteRequest(index, type, id);
        bulkProcessor.add(deleteRequest);
    }

    /**
     * show 查询删除索引库信息
     * created by guoyanfei on 2018/03/30
     *
     * @param index      索引库名称
     * @param conditions 查询条件
     */
    public void delteByQuery(String index, String conditions) {
        BoolQueryBuilder qb = QueryBuilders.boolQuery();
        String[] con = conditions.split(",");
        for (String str : con) {
            String[] keyvalue = str.split(":");
            QueryBuilder queryBuilder = QueryBuilders.matchPhraseQuery(keyvalue[0], keyvalue[1]);
            qb.must(queryBuilder);
        }
        DeleteByQueryAction.INSTANCE.newRequestBuilder(client)
                .filter(qb)
                .source(index)
                .execute(new ActionListener<BulkByScrollResponse>() {
                    @Override
                    public void onResponse(BulkByScrollResponse response) {
                        System.out.println("共删除" + response.getDeleted() + "条记录!");
                    }

                    @Override
                    public void onFailure(Exception e) {
                        System.out.println("删除失败!");
                    }
                });
    }


    /**
     * show 查询索引是否存在
     * created by guoyanfei on 2018/03/30
     *
     * @param index 索引库名称
     * @return true或false
     */
    public boolean indexExists(String index) {
        IndicesExistsRequest request = new IndicesExistsRequest(index);
        IndicesExistsResponse response = client.admin().indices().exists(request).actionGet();
        if (response.isExists()) {
            return true;
        }
        return false;
    }

    /**
     * show 通过elasticsearch索引库名称删除索引库
     * created by guoyanfei on 2018/03/28
     *
     * @param index 索引库名称
     */
    public void deleteIndex(String index) {
        try {
            DeleteIndexResponse dResponse = client.admin().indices().prepareDelete(index)
                    .execute().actionGet();
        } catch (Exception e) {
        }
    }

    /**
     * show 通过文档id查询一条数据
     * create by guoyanfei on 2018/03/20
     *
     * @param index 索引名称
     * @param type  索引库类型
     * @param id    文档id
     * @return 数据结果
     */
    public String getDatabyId(String index, String type, String id) throws Exception {
        String result = "";
        GetResponse getResponse = client.prepareGet(index, type, id)
                .get();
        result = getResponse.getSourceAsString();
        return result;
    }

    /**
     * show 通过条件查询文档ID，不分词完全匹配</br>
     * 返回文档ID集合 created by guoyanfei on 2018/03/20
     *
     * @param index      索引库名称
     * @param type       索引库类型
     * @param pageNo     开始位置
     * @param pageSize   大小
     * @param conditions 条件集合,格式key:value,key:value
     * @return 文档ID集合
     */
    public List<String> searchDocumentId(String index, String type,
                                         Integer pageNo, Integer pageSize,
                                         String conditions) throws Exception {
        List<String> idlist = new ArrayList<String>();
        String[] con = conditions.split("\\|");
        BoolQueryBuilder qb = QueryBuilders.boolQuery();
        for (String str : con) {
            String[] keyvalue = str.split(":");
            QueryBuilder queryBuilder = QueryBuilders.matchPhraseQuery(keyvalue[0], keyvalue[1]);
            qb.must(queryBuilder);
        }
        SearchResponse searchResponse = client.prepareSearch(index)
                .setTypes(type)
                .setQuery(qb)
                .setFrom((pageNo - 1) * pageSize).setSize(pageSize)
                .get();
        if (searchResponse.getHits().getHits().length != 0) {
            for (SearchHit hit : searchResponse.getHits().getHits()) {
                idlist.add(hit.getId());
            }
        }
        return idlist;
    }

    /**
     * show 分页检索信息
     * created by guoyanfei on 2018/03/20
     *
     * @param index       索引库名称
     * @param type        索引库类型
     * @param pageNo      当前页
     * @param pageSize    页大小
     * @param searchField 需检索的字段,多个字段以','分割
     * @param searchText  需检索的值
     */
    public ESPageResponse<Map<String, Object>> searchDocument(String index, String type, Integer pageNo,
                                                              Integer pageSize, String searchField, String searchText) {
        List<Map<String, Object>> datamaps = new ArrayList<Map<String, Object>>();
        String[] fields = searchField.split(",");
        DisMaxQueryBuilder qb = QueryBuilders.disMaxQuery();
        for (String str : fields) {
            QueryBuilder queryBuilder = QueryBuilders.matchQuery(str, searchText);
            qb.add(queryBuilder);
        }

        SearchResponse searchResponse = client.prepareSearch(index)
                .setTypes(type)
                .setQuery(qb)
                .setFrom((pageNo - 1) * pageSize).setSize(pageSize)
                .get();

        long count = 0;

        if (searchResponse.getHits().getHits().length != 0) {
            //总条数
            count = searchResponse.getHits().getTotalHits();
            for (SearchHit hit : searchResponse.getHits().getHits()) {
                Map<String, Object> dataMap = hit.getSourceAsMap();
                datamaps.add(dataMap);
            }
        }

        ESPageResponse<Map<String, Object>> esPageResponse = new ESPageResponse<Map<String, Object>>();
        esPageResponse.setRows(datamaps);
        esPageResponse.setTotal(count);
        esPageResponse.setDescription("从" + searchResponse.getSuccessfulShards() + "个分片查询，耗时:" + searchResponse.getTook().toString());
        return esPageResponse;
    }


    /**
     * show 分页检索信息关键字代码高亮
     * created by guoyanfei on 2018/03/29
     *
     * @param index       索引库名称
     * @param type        索引库类型
     * @param pageNo      当前页
     * @param pageSize    页大小
     * @param searchField 需检索的字段,多个字段以','分割
     * @param searchText  需检索的值
     * @param hightstyle  关键字代码高亮样式
     * @return
     */
    public ESPageResponse<Map<String, Object>> searchDocumentHighlight(String index, String type, Integer pageNo,
                                                                       Integer pageSize, String searchField, String searchText,
                                                                       String hightstyle) {
        List<Map<String, Object>> datamaps = new ArrayList<Map<String, Object>>();
        String[] fields = searchField.split(",");
        DisMaxQueryBuilder qb = QueryBuilders.disMaxQuery();
        for (String str : fields) {
            QueryBuilder queryBuilder = QueryBuilders.matchQuery(str, searchText);
            qb.add(queryBuilder);
        }

        SearchRequestBuilder responsebuilder = client.prepareSearch(index)
                .setTypes(type).setFrom((pageNo - 1) * pageSize).setSize(pageSize)
                .setExplain(true)
                .setQuery(qb);

        HighlightBuilder highlightBuilder = new HighlightBuilder();
        // 设置搜索关键字高亮
        if (fields != null && fields.length > 0) {
            for (String strfield : fields) {
                highlightBuilder.field(strfield);
            }
        }
        highlightBuilder.preTags("<em style='" + hightstyle + "'>");
        responsebuilder.highlighter(highlightBuilder);

        SearchResponse searchResponse = responsebuilder.execute().actionGet();

        long count = 0;

        if (searchResponse.getHits().getHits().length != 0) {
            //总条数
            count = searchResponse.getHits().getTotalHits();
            for (SearchHit hit : searchResponse.getHits().getHits()) {
                Map<String, Object> dataMap = hit.getSourceAsMap();
                //设置关键字代码高亮
                if (fields != null && fields.length > 0) {
                    for (String fieldname : fields) {
                        HighlightField hField = hit.getHighlightFields().get(fieldname);
                        String name = "";

                        if (hField != null) {
                            if (hField.fragments() != null) {
                                for (Text text : hField.fragments()) {
                                    name += text;
                                }
                                dataMap.put(fieldname, name);
                            }
                        }

                    }
                }

                datamaps.add(dataMap);
            }
        }

        ESPageResponse<Map<String, Object>> esPageResponse = new ESPageResponse<Map<String, Object>>();
        esPageResponse.setRows(datamaps);
        esPageResponse.setTotal(count);
        esPageResponse.setDescription("从" + searchResponse.getSuccessfulShards() + "个分片查询，耗时:" + searchResponse.getTook().toString());
        return esPageResponse;
    }

    /**
     * show 指定返回结果field，分页检索信息关键字代码高亮
     * created by guoyanfei on 2018/04/24
     *
     * @param index        索引库名称
     * @param type         索引库类型
     * @param pageNo       当前页
     * @param pageSize     页大小
     * @param searchField  需检索的字段，多个字段以','分割
     * @param searchText   需检索的值
     * @param hightstyle   关键字代码高亮样式
     * @param showFields   关键字代码高亮样式
     * @param isFieldhight 是否字段代码高亮
     * @return
     */
    public ESPageResponse<Map<String, Object>> searchDocumentToField(String index, String type, Integer pageNo,
                                                                     Integer pageSize, String searchField, List<String> filename, String searchText,
                                                                     String hightstyle, String[] showFields, Boolean isFieldhight) {
        List<Map<String, Object>> datamaps = new ArrayList<Map<String, Object>>();
        String[] fields = searchField.split(",");
        DisMaxQueryBuilder qb = QueryBuilders.disMaxQuery();
        for (String str : fields) {
            QueryBuilder queryBuilder = QueryBuilders.matchQuery(str, searchText);
//            QueryBuilder queryBuilder = QueryBuilders.termQuery(str, searchText);
            qb.add(queryBuilder);
        }
        // 查询条件
        SearchRequestBuilder responsebuilder = client.prepareSearch(index)
                .setTypes(type)
                .setFrom((pageNo - 1) * pageSize).setSize(pageSize)
                .setExplain(true)
                .setFetchSource(showFields, null)
                .setQuery(qb);
        DisMaxQueryBuilder filterqb = QueryBuilders.disMaxQuery();
        //添加过滤查询条件filename
        if (!filename.isEmpty()) {
            for (String fn : filename) {
                //添加过滤条件
                BoolQueryBuilder fqb = QueryBuilders.boolQuery().should(QueryBuilders.matchPhraseQuery("filename", fn));
                filterqb.add(fqb);
            }
            //过滤查询当filename为指定filename列表里文件名的文件
            responsebuilder.setPostFilter(filterqb);
        }

        if (isFieldhight) {
            // 设置搜索关键字高亮
            HighlightBuilder highlightBuilder = new HighlightBuilder();
            if (fields != null && fields.length > 0) {
                for (String strfield : fields) {
                    highlightBuilder.field(strfield);
                }
            }
            highlightBuilder.preTags("<em style='" + hightstyle + "'>");
            responsebuilder.highlighter(highlightBuilder);
        }

        SearchResponse sResponse = responsebuilder.execute().actionGet();

        long count = 0;

        if (sResponse.getHits().getHits().length != 0) {
            //总条数
            count = sResponse.getHits().getTotalHits();
            SearchHit[] hits = sResponse.getHits().getHits();
            for (SearchHit hit : hits) {
                Map<String, Object> dataMap = hit.getSourceAsMap();
                if (isFieldhight) {
                    //设置关键字代码高亮
                    if (fields != null && fields.length > 0) {
                        for (String fieldname : fields) {
                            HighlightField hField = hit.getHighlightFields().get(fieldname);
                            String name = "";
                            if (hField != null) {
                                if (hField.fragments() != null) {
                                    for (Text text : hField.fragments()) {
                                        name += text;
                                    }
                                    dataMap.put(fieldname, name);
                                }
                            }
                        }
                    }
                }
                datamaps.add(dataMap);
            }
        }

        //消除返回结果中的\t和\n、\r
        List<Map<String, Object>> searchValue = new ArrayList<Map<String, Object>>();
        for (Map<String, Object> map : datamaps) {
            String filecontext = String.valueOf(map.get("filecontext"));
            filecontext = filecontext.replaceAll("\\n+|\\t+|\\r+", " ");
            map.put("filecontext", filecontext);
            searchValue.add(map);
        }

        ESPageResponse<Map<String, Object>> esPageResponse = new ESPageResponse<Map<String, Object>>();
        esPageResponse.setRows(searchValue);
        esPageResponse.setTotal(count);
        esPageResponse.setDescription("从" + sResponse.getSuccessfulShards() + "个分片查询，耗时:" + sResponse.getTook().toString());
        return esPageResponse;

    }
}
