package org.example.position.service.impl;

import lombok.extern.java.Log;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.bulk.BackoffPolicy;
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.example.position.service.PositionService;
import org.example.position.util.DBHelper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;

@Service
@Log
public class PositionServiceImpl implements PositionService {

    @Autowired
    private RestHighLevelClient restHighLevelClient;

    @Autowired
    private DBHelper dbHelper;

    @Override
    public void importToEs() {
        String sql = "SELECT * from position";
        try (BulkProcessor bulkProcessor = getBulkProcessor();
                Connection  conn = dbHelper.getConnection();
                PreparedStatement ps = conn.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
                ResultSet rs = ps.executeQuery();){
            // 根据自己需要 设置
            ps.setFetchSize(20);

            ResultSetMetaData colData = rs.getMetaData();
            List<HashMap<String, String>> dataList = new ArrayList<HashMap<String, String>>();
            // bulkProcessor 添加的数据支持的方式并不多，查看其api发现其支持map键值对的 方式，故笔者在此将查出来的数据转换成hashMap方式
            HashMap<String, String> map = null;
            int count = 0;
            while (rs.next()) {
                count++;
                map = new HashMap<String, String>(128);
                for (int i = 1; i <= colData.getColumnCount(); i++) {
                    String c = colData.getColumnName(i);
                    String v = rs.getString(c);
                    map.put(c, v);
                }
                dataList.add(map);
                // 每1万条写一次，不足的批次的最后再一并提交
                if (count % 10000 == 0) {
                    log.info("Mysql handle data number : " + count);
                    // 将数据添加到 bulkProcessor 中
                    addIndexRequest(bulkProcessor, dataList);
                    // 每提交一次便将map与list清空
                    map.clear();
                    dataList.clear();
                }
            }// 处理未提交的数据
            addIndexRequest(bulkProcessor, dataList);
            log.info("-------------------------- Finally insert number total : " + count);
            // 将数据刷新到es, 注意这一步执行后并不会立即生效，取决于bulkProcessor设置的 刷新时间
            bulkProcessor.flush();
        } catch (Exception e) {
            log.severe(e.getMessage());
        }
    }

    private void addIndexRequest(BulkProcessor bulkProcessor, List<HashMap<String, String>> dataList){
        for (HashMap<String, String> hashMap : dataList) {
            bulkProcessor.add( new IndexRequest("position").source(hashMap));
            System.out.println(hashMap);
        }
    }

    private BulkProcessor getBulkProcessor() {
        BulkProcessor bulkProcessor = null;
        try {
            BulkProcessor.Listener listener = new BulkProcessor.Listener() {
                @Override
                public void beforeBulk(long executionId, BulkRequest request) {
                    log.info("Try to insert data number : " + request.numberOfActions());
                }

                @Override
                public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
                    log.info("************** Success insert data number : " + request.numberOfActions() + " , id: " + executionId);
                }

                @Override
                public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
                    log.severe("Bulk is unsuccess : " + failure + ", executionId: " + executionId);
                }
            };

            BiConsumer<BulkRequest, ActionListener<BulkResponse>> bulkConsumer = (request, bulkListener) -> restHighLevelClient.bulkAsync(request, RequestOptions.DEFAULT, bulkListener);
            BulkProcessor.Builder builder = BulkProcessor.builder(bulkConsumer, listener);
            builder.setBulkActions(5000);
            builder.setBulkSize(new ByteSizeValue(100L, ByteSizeUnit.MB));
            builder.setConcurrentRequests(10);
            builder.setFlushInterval(TimeValue.timeValueSeconds(100L));
            builder.setBackoffPolicy(BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(1L), 3));
            // 注意点：让参数设置生效
            bulkProcessor = builder.build();
        } catch (Exception e) {
            e.printStackTrace();
            try {
                bulkProcessor.awaitClose(100L, TimeUnit.SECONDS);
            } catch (Exception e1) {
                log.severe(e1.getMessage());
            }
        }
        return bulkProcessor;
    }



    @Override
    public List<Map<String, Object>> searchPos(String keyword) throws IOException {
        // 条件搜索
        SearchRequest searchRequest = new SearchRequest("position");
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();

        QueryBuilder builder = QueryBuilders.matchQuery("positionName", keyword);
        searchSourceBuilder.query(builder);
        searchSourceBuilder.timeout(new TimeValue(60, TimeUnit.SECONDS));
        //执行搜索
        searchRequest.source(searchSourceBuilder);
        SearchResponse searchResponse = restHighLevelClient.search(searchRequest, RequestOptions.DEFAULT);
        List<Map<String, Object>> list = new ArrayList<>();
        SearchHit[] hits = searchResponse.getHits().getHits();
        for (SearchHit hit : hits) {
            list.add(hit.getSourceAsMap());
        }
        //数据不足5 补充 美女多、员工福利好数据
        if(list.size() < 5){
            supplementData(list, keyword);
        }
        return list;
    }

    //数据不足5 补充 美女多、员工福利好数据
    private void supplementData(List<Map<String, Object>> list, String keyword) throws IOException {
        SearchRequest searchRequest = new SearchRequest("position");
        SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();

        BoolQueryBuilder builder = QueryBuilders.boolQuery();
        builder.must().add(QueryBuilders.matchQuery("positionAdvantage", "美女多 员工福利好"));
        builder.mustNot().add(QueryBuilders.matchQuery("positionName", keyword));

        searchSourceBuilder.query(builder);
        searchSourceBuilder.from(0);
        searchSourceBuilder.size(5 - list.size());
        searchSourceBuilder.timeout(new TimeValue(60, TimeUnit.SECONDS));
        //执行搜索
        searchRequest.source(searchSourceBuilder);
        SearchResponse searchResponse = restHighLevelClient.search(searchRequest, RequestOptions.DEFAULT);
        SearchHit[] hits = searchResponse.getHits().getHits();
        for (SearchHit hit : hits) {
            list.add(hit.getSourceAsMap());
        }
    }


}
