package com.lagou.service.impl;

import com.alibaba.druid.pool.DruidPooledConnection;
import com.lagou.service.PositionService;
import com.lagou.utils.DruidUtils;
import org.elasticsearch.action.bulk.BackoffPolicy;
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

@Service
public class PositionServiceImpl implements PositionService {
    @Autowired
    private RestHighLevelClient restHighLevelClient;

    /**
     * 数据导入：mysql->elasticsearch
     */
    @Override
    public void importPosition() throws Exception {
        /**
         * 获取BulkProcessor
         */
        BulkProcessor bulkProcessor = getBulkProcessor(restHighLevelClient);
        /**
         * 查取mysql数据，写入elasticsearch
         */
        DruidPooledConnection connection = DruidUtils.getInstance().getConnection();
        PreparedStatement preparedStatement = connection.prepareStatement("select * from position");
        ResultSet resultSet = preparedStatement.executeQuery();
        ResultSetMetaData metaData = resultSet.getMetaData();
        int count = 0;
        Map<String, String> map = null;
        String column = null;
        String value = null;
        List<Map<String, String>> list = new ArrayList<>();
        while (resultSet.next()) {
            count++;
            map = new HashMap<>();
            for (int i = 1; i <= metaData.getColumnCount(); i++) {
                column = metaData.getColumnName(i);
                value = resultSet.getString(column);
                map.put(column, value);
            }
            list.add(map);
            //每1万条数据写入elasticsearch一次
            if (count % 10000 == 0) {
                for (Map<String, String> stringMap : list) {
                    /**
                     * 写入数据
                     */
                    bulkProcessor.add(new IndexRequest("position").source(stringMap));
                }
                list.clear();
            }
        }
        //处理最后一批数据
        if (list != null && !list.isEmpty()) {
            for (Map<String, String> stringMap : list) {
                /**
                 * 写入数据
                 */
                bulkProcessor.add(new IndexRequest("position").source(stringMap));
            }
        }
    }

    /**
     * 数据搜索
     */
    @Override
    public List<Map<String, Object>> searchPosition(String keyword, Integer pageNum, Integer pageSize) throws Exception {
        /**
         * search请求
         */
        SearchRequest searchRequest = new SearchRequest("position");
        /**
         * 设置查询方式为query_string
         */
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
        searchSourceBuilder.query(QueryBuilders.queryStringQuery(keyword));
        //创建时间倒序，最新的放最前面
        searchSourceBuilder.sort("generateTime.keyword", SortOrder.DESC);
        //分页
        searchSourceBuilder.from((pageNum - 1) * pageSize);
        searchSourceBuilder.size(pageSize);
        searchRequest.source(searchSourceBuilder);
        /**
         * 搜索执行
         */
        SearchResponse searchResponse = restHighLevelClient.search(searchRequest, RequestOptions.DEFAULT);
        /**
         * 处理查询结果，组织返回参数
         */
        SearchHit[] hits = searchResponse.getHits().getHits();
        List<Map<String, Object>> list = new ArrayList<>();
        for (SearchHit searchHit : hits) {
            list.add(searchHit.getSourceAsMap());
        }
        return list;
    }

    /**
     * elasticsearch官方提供的批量导入工具：BulkProcessor
     */
    private BulkProcessor getBulkProcessor(RestHighLevelClient restHighLevelClient) throws Exception {
        return BulkProcessor.builder(
                (bulkRequest, bulkResponseActionListener) -> {
                    restHighLevelClient.bulkAsync(bulkRequest, RequestOptions.DEFAULT, bulkResponseActionListener);
                },
                new BulkProcessor.Listener() {
                    @Override
                    public void beforeBulk(long executionId,
                                           BulkRequest request) {
                    }

                    @Override
                    public void afterBulk(long executionId,
                                          BulkRequest request,
                                          BulkResponse response) {
                    }

                    @Override
                    public void afterBulk(long executionId,
                                          BulkRequest request,
                                          Throwable failure) {
                    }
                })
                .setBulkActions(5000)
                .setBulkSize(new ByteSizeValue(100, ByteSizeUnit.MB))
                .setFlushInterval(TimeValue.timeValueSeconds(5))
                .setConcurrentRequests(10)
                .setBackoffPolicy(BackoffPolicy.exponentialBackoff(TimeValue.timeValueMillis(100), 3))
                .build();
    }
}
