package com.example.testspringes.service;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.example.testspringes.modul.GeneticTestingPackage;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.get.GetRequestBuilder;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.rest.RestStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestTemplate;

import javax.annotation.Resource;
import java.lang.reflect.Field;
import java.util.*;

/**
 * @Description: //TODO
 * @ProjectName: testspringes
 * @Package: com.example.testspringes.service
 * @Author: FX
 * @CreateDate: 2018/11/3 10:16
 * @Version: 1.0
 * Copyright: Copyright (c) 2018
 */
@Service
public class EsService {
    public static final String IK_SMART = "ik_smart";
    public static final String type = "package";
    @Resource
    private Client client;
    @Autowired
    private ElasticsearchTemplate elasticsearchTemplate;
    @Resource
    RestTemplate restTemplate;

    /**
     * 标准分词方法(会把字符串拆分成每个字符)
     */
    private final static String STANDARD = "standard";

    /**
     * ik_max_word分词方式(最细粒度的拆分)
     */
    private final static String IK_MAX_WORD = "ik_max_word";
    public static final String index = "testes";
    public static final String url = "http://172.30.10.149:8541/v1.0/genetic/testing/packages?page=1&size=15&platform=2&geneMedicine=ca4419625a9a41ae922771abd5f00642";

    public RestStatus add(GeneticTestingPackage geneticTestingPackage) {
        IndexRequestBuilder indexRequestBuilder = client.prepareIndex(index, type, geneticTestingPackage.getId());
        String s = JSONObject.toJSONString(geneticTestingPackage);
        IndexResponse indexResponse = indexRequestBuilder.setSource(geneticTestingPackage).execute().actionGet();
        return indexResponse.status();
    }

    public Map<String, Object> get() {
        GetRequestBuilder getRequestBuilder = client.prepareGet(index, type, "1");
        Map<String, Object> sourceAsMap = getRequestBuilder.get().getSourceAsMap();
        return sourceAsMap;
    }

    //添加条件
    public void queryBuilderDto(BoolQueryBuilder boolQueryBuilder, String condition ){
        Class<GeneticTestingPackage> geneticTestingPackageClass = GeneticTestingPackage.class;
        Field[] declaredFields = geneticTestingPackageClass.getDeclaredFields();
        for(Field field:declaredFields){
            boolQueryBuilder.should(QueryBuilders.matchPhraseQuery(field.getName(), condition));
        }
    }

    /**
     * 三种分词合并
     * @param condition
     * @return
     */
    private Set<String> mergeIkSamrtAndIkSmartAndStanderd(String condition) {
        List<AnalyzeResponse.AnalyzeToken> analyzeTokens = splitConditionByIK(condition);
        List<AnalyzeResponse.AnalyzeToken> analyzeTokens1 = splitConditionByIKSmart(condition);
        List<AnalyzeResponse.AnalyzeToken> analyzeTokens2 = splitConditionBySTANDARD(condition);
        List<AnalyzeResponse.AnalyzeToken> tokensList = new ArrayList<>();
        tokensList.addAll(analyzeTokens);
        tokensList.addAll(analyzeTokens1);
        tokensList.addAll(analyzeTokens2);
        Set<String> tokens = new HashSet<>();
        for (AnalyzeResponse.AnalyzeToken token : tokensList) {
            String term = token.getTerm();
            if(tokens.contains(term)){
                continue;
            }
            tokens.add(term);
        }
        return tokens;
    }

    private List<AnalyzeResponse.AnalyzeToken> splitConditionByIK(String condition) {
        AnalyzeRequestBuilder requestBuilder = new AnalyzeRequestBuilder(client, AnalyzeAction.INSTANCE,
                index, condition).setTokenizer(IK_MAX_WORD);
        return requestBuilder.execute().actionGet().getTokens();
    }

    private List<AnalyzeResponse.AnalyzeToken> splitConditionByIKSmart(String condition) {
        AnalyzeRequestBuilder requestBuilder = new AnalyzeRequestBuilder(client, AnalyzeAction.INSTANCE,
                index, condition).setTokenizer(IK_SMART);
        return requestBuilder.execute().actionGet().getTokens();
    }

    private List<AnalyzeResponse.AnalyzeToken> splitConditionBySTANDARD(String condition) {
        AnalyzeRequestBuilder requestBuilder = new AnalyzeRequestBuilder(client, AnalyzeAction.INSTANCE,
                index, condition).setTokenizer(STANDARD);
        return requestBuilder.execute().actionGet().getTokens();
    }
}


