package net.app.zoneland.search.controller;

import com.google.common.collect.Lists;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import net.app.zoneland.common.core.vo.Result;
import net.app.zoneland.search.domain.NovelEntity;
import net.app.zoneland.search.service.EsNovalService;
import org.apache.commons.lang.StringUtils;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.SearchHit;
import org.springframework.data.elasticsearch.core.SearchHits;
import org.springframework.data.elasticsearch.core.query.NativeSearchQuery;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.web.PageableDefault;
import org.springframework.web.bind.annotation.*;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;

/**
 * @Description
 * @Author LENOVO
 * @CreateTime 2021/2/14 9:49
 * @Version: 1.0
 */
@Api(value = "小说数据操作接口", tags = {"小说数据操作接口"})
@RequestMapping("/noval")
@RestController
public class NovalController extends BaseController{

    @Autowired
    private ElasticsearchRestTemplate elasticsearchTemplate;

    @Autowired
    private EsNovalService esNovalService;

    @Autowired
    private RestHighLevelClient restHighLevelClient;

    @ApiOperation(value = "批量保存小说数据接口", notes = "批量保存小说数据接口")
    @ApiImplicitParams({
            @ApiImplicitParam(name = "indexName", value = "索引库名称", required = true),
            @ApiImplicitParam(name = "novals",  value = "小说数据列表", dataType = "List", required = true)
    })
    @PostMapping("save")
    public Result saveNavol(String indexName, List<NovelEntity> novals){
        // 1.先校验索引是否存在
        if (indexExists(indexName)){
            // 2.添加索引mapping,索引会自动创建但mapping自只用默认的这会导致分词器不生效,所以这里我们手动导入mapping。
            elasticsearchTemplate.putMapping(NovelEntity.class);
            if(novals!=null && !novals.isEmpty()){
                // 3.存入库中
                Iterable<NovelEntity> noval = esNovalService.saveAll(novals);
                return Result.success(noval);
            }
            return Result.error("参数不可为空");
        }
       return Result.error("索引不存在");
    }

    @ApiOperation(value = "根据小说ID查询数据接口", notes = "根据小说ID查询数据接口")
    @ApiImplicitParam(name = "id", value = "小说ID",dataType="string", required = true)
    @GetMapping("/findById/{id}")
    public Result findById(@PathVariable("id") String id){
        if(!"".equals(id)){
           return Result.success(esNovalService.findById(id));
        }
        return Result.error("id参数不可为空");
    }

    /**
     * 单字符串模糊查询，默认排序。将从所有字段中查找包含传来的word分词后字符串的数据集
     * @param
     * @return
     */
    @ApiOperation(value = "分页查询小说数据接口", notes = "分页查询小说数据接口")
    @ApiImplicitParams({
            @ApiImplicitParam(name = "page", value = "当前页",dataType = "Integer", required = true),
            @ApiImplicitParam(name = "pageSize",  value = "每页显示记录数", dataType = "Integer", required = true),
            @ApiImplicitParam(name = "novel",  value = "查询数据实体类", dataType = "NovelEntity", required = true)
    })
    @GetMapping("/findByPage")
    public Result findByPage(Integer page, Integer pageSize, String keyword){

        // 先构建查询条件
        BoolQueryBuilder defaultQueryBuilder = QueryBuilders.boolQuery()
                .should(QueryBuilders.matchQuery("author",keyword))
                .should(QueryBuilders.matchQuery("describe",keyword))
                .should(QueryBuilders.matchQuery("title",keyword));

        // 分页条件
        PageRequest pageRequest = PageRequest.of(page,pageSize);

        // 高亮条件
        HighlightBuilder highlightBuilder = getHighlightBuilder("author", "describe", "title");

        // 排序条件
        // FieldSortBuilder sortBuilder = SortBuilders.fieldSort("author").order(SortOrder.DESC);

        //组装条件
        NativeSearchQuery searchQuery = new NativeSearchQueryBuilder()
                .withQuery(defaultQueryBuilder)
                .withHighlightBuilder(highlightBuilder)
                .withPageable(pageRequest).build();
                //.withSort(sortBuilder).build();
        SearchHits<NovelEntity> search = elasticsearchTemplate.search(searchQuery, NovelEntity.class);
        List<SearchHit<NovelEntity>> searchHits = search.getSearchHits();

        // 高亮字段映射
        List<NovelEntity> userVoList = Lists.newArrayList();
        for (SearchHit<NovelEntity> searchHit : searchHits) {
            NovelEntity content = searchHit.getContent();
            NovelEntity noval = new NovelEntity();
            BeanUtils.copyProperties(content,noval);
            Map<String, List<String>> highlightFields = searchHit.getHighlightFields();
            for (String highlightField : highlightFields.keySet()) {
                if (StringUtils.equals(highlightField,"author")){
                    noval.setAuthor(highlightFields.get(highlightField).get(0));
                }else if(StringUtils.equals(highlightField,"describe")){
                    noval.setDescribe(highlightFields.get(highlightField).get(0));
                }else if(StringUtils.equals(highlightField,"title")){
                    noval.setTitle(highlightFields.get(highlightField).get(0));
                }
            }
            userVoList.add(noval);
        }

        // 组装分页对象
        Page<NovelEntity> dataPage = new PageImpl<>(userVoList, pageRequest, search.getTotalHits());
        return Result.success(dataPage);
    }

    /**
     * 查询高亮显示
     * @param value
     * @return
     */
    @ApiOperation(value = "分页查询小说数据高亮显示接口", notes = "分页查询小说数据高亮显示接口")
    @ApiImplicitParams({
            @ApiImplicitParam(name = "page", value = "当前页",dataType = "Integer", required = true),
            @ApiImplicitParam(name = "pageSize",  value = "每页显示记录数", dataType = "Integer", required = true),
            @ApiImplicitParam(name = "value",  value = "小说查询条件", dataType = "string", required = true)
    })
    @GetMapping("getHightByNoval")
    public Result getHightByNoval(Integer page,Integer pageSize,String value){
        //根据一个值查询多个字段  并高亮显示  这里的查询是取并集，即多个字段只需要有一个字段满足即可
        //需要查询的字段
        BoolQueryBuilder boolQueryBuilder= QueryBuilders.boolQuery()
                .should(QueryBuilders.matchQuery("title",value))
                .should(QueryBuilders.matchQuery("describe",value));
        //构建高亮查询
        NativeSearchQuery searchQuery = new NativeSearchQueryBuilder()
                .withQuery(boolQueryBuilder)
                .withHighlightFields(
                        new HighlightBuilder.Field("title")
                        ,new HighlightBuilder.Field("describe"))
                .withHighlightBuilder(new HighlightBuilder().preTags("<span style='color:red'>").postTags("</span>"))
                .withPageable(PageRequest.of(page,pageSize))
                .build();
        //查询
        SearchHits<NovelEntity> search = elasticsearchTemplate.search(searchQuery, NovelEntity.class);
        //得到查询返回的内容
        List<SearchHit<NovelEntity>> searchHits = search.getSearchHits();

        //设置一个最后需要返回的实体类集合
        List<NovelEntity> users = new ArrayList<NovelEntity>();
        //遍历返回的内容进行处理
        for(SearchHit<NovelEntity> searchHit:searchHits){
            //高亮的内容
            Map<String, List<String>> highlightFields = searchHit.getHighlightFields();
            //将高亮的内容填充到content中
            searchHit.getContent().setTitle(highlightFields.get("title")==null ? searchHit.getContent().getTitle():highlightFields.get("title").get(0));
            searchHit.getContent().setDescribe(highlightFields.get("describe")==null ? searchHit.getContent().getDescribe():highlightFields.get("describe").get(0));
            //放到实体类中
            users.add(searchHit.getContent());
        }
        return Result.success(users);
    }

    @ApiOperation(value = "更新小说数据接口", notes = "更新小说数据接口")
    @ApiImplicitParams({
            @ApiImplicitParam(name = "id", value = "小说ID",dataType = "string", required = true),
            @ApiImplicitParam(name = "index",  value = "索引库名", dataType = "string", required = true),
            @ApiImplicitParam(name = "title",  value = "小说查询条件", dataType = "string", required = true)
    })
    @PostMapping(value = "/update/data")
    public Result updateNoval(String id, String index, String title) {
        UpdateRequest updateRequest = new UpdateRequest(index, id);
        Map<String, Object> map = new HashMap<>();
        map.put("title", title);
        updateRequest.doc(map);
        try {
            UpdateResponse updateResponse = restHighLevelClient.update(updateRequest, RequestOptions.DEFAULT);
            if (updateResponse.getResult() == DocWriteResponse.Result.UPDATED) {
                return Result.success("更新成功");
            } else {
                return Result.success("更新失败");
            }
        } catch (IOException e) {
            e.printStackTrace();
            return Result.success("更新异常");
        }
    }


    @ApiOperation(value = "根据ID删除小说数据接口", notes = "根据ID删除小说数据接口")
    @ApiImplicitParams({
            @ApiImplicitParam(name = "id", value = "小说id",dataType = "string", required = true),
            @ApiImplicitParam(name = "indexName",  value = "索引库名", dataType = "string", required = true),
    })
    @PostMapping(value = "/delete/{id}/{indexName}")
    public Result testESDelete(@RequestParam("id") String id, @RequestParam("indexName") String indexName) {
        if (indexExists(indexName)){
            DeleteRequest deleteRequest = new DeleteRequest(indexName);
            deleteRequest.id(id);
            try {
                DeleteResponse deleteResponse = restHighLevelClient.delete(deleteRequest, RequestOptions.DEFAULT);
                if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) {
                    return Result.success("删除失败");
                } else {
                    return Result.success("删除成功");
                }
            } catch (IOException e) {
                e.printStackTrace();
                return Result.error("删除异常");
            }
        }
        return Result.error("索引库不存在");
    }

    @ApiOperation(value = "小说聚合查询接口", notes = "小说聚合查询接口")
    @RequestMapping(value = "/query/{index}", method = RequestMethod.GET)
    public Result findByTilte(@PathVariable("index") String index) {
        SearchRequest searchRequest = new SearchRequest(index);
        SearchSourceBuilder sourceBuilder = new SearchSourceBuilder();

        TermsAggregationBuilder termsAggregationBuilder = AggregationBuilders.terms("by_title").field("title");
        sourceBuilder.aggregation(termsAggregationBuilder);

        sourceBuilder.timeout(new TimeValue(60, TimeUnit.SECONDS));
        searchRequest.source(sourceBuilder);

        try {
            SearchResponse searchResponse = restHighLevelClient.search(searchRequest, RequestOptions.DEFAULT);
            Aggregations aggregations = searchResponse.getAggregations();
            Map<String, Aggregation> stringAggregationMap = aggregations.asMap();
            ParsedLongTerms parsedLongTerms = (ParsedLongTerms) stringAggregationMap.get("by_title");
            List<? extends Terms.Bucket> buckets = parsedLongTerms.getBuckets();
            Map<Integer, Long> map = new HashMap<>();
            for (Terms.Bucket bucket : buckets) {
                long docCount = bucket.getDocCount();//个数
                Number keyAsNumber = bucket.getKeyAsNumber();//年龄
                System.err.println(keyAsNumber + "岁的有" + docCount + "个");
                map.put(keyAsNumber.intValue(), docCount);
            }
            return Result.success(map);
        } catch (IOException e) {
            e.printStackTrace();
        }
        return Result.error("查询失败");
    }
}
