package org.arch_learn.elastic_search.springboot_es.service.impl;

import lombok.extern.java.Log;
import org.arch_learn.elastic_search.springboot_es.service.IPositionService;
import org.arch_learn.elastic_search.springboot_es.util.DBHelper;
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.action.index.IndexRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.sql.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.TimeUnit;

@Log
@Service
public class PositionService implements IPositionService {
    @Autowired
    private BulkProcessor bulkProcessor;

    private static final String POSITION_INDEX = "position_index";

    @Override
    public void importAll() throws SQLException {
        Connection conn = null;
        PreparedStatement preparedStatement = null;
        ResultSet resultSet = null;
        try {
            conn = DBHelper.getConn();
            String sql = "select * from position";
            preparedStatement = conn.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
            //根据需要设置批量大小
            preparedStatement.setFetchSize(20);
            resultSet = preparedStatement.executeQuery();
            ResultSetMetaData columnData = resultSet.getMetaData();
            ArrayList<HashMap<String, String>> allRecords = new ArrayList<>();
            HashMap<String, String> record = null;
            int count = 0;
            String columnName = null;
            String columnValue = null;
            while (resultSet.next()) {
                count++;
                record = new HashMap<>(16);
                //jdbc查询得到的结果集的列索引是从1开始的
                for (int i = 1; i < columnData.getColumnCount(); i++) {
                    columnName = columnData.getColumnName(i);
                    columnValue = resultSet.getString(columnName);
                    record.put(columnName, columnValue);
                }
                allRecords.add(record);
                //每10000条写一次 不足数量的批次最后一次提交处理
                if (count % 10000 == 0) {
                    //将数据添加到bulkProcessor
                    allRecords.forEach(r -> {
                        IndexRequest indexRequest = new IndexRequest(POSITION_INDEX);
                        indexRequest.source(r);
                        bulkProcessor.add(indexRequest);
                    });
                    //每提交一次清空record和allRecords
                    record.clear();
                    allRecords.clear();
                }
            }
            //处理未提交的数据
            for (HashMap<String,String> r : allRecords) {
                IndexRequest indexRequest = new IndexRequest(POSITION_INDEX);
                indexRequest.source(r);
                bulkProcessor.add(indexRequest);
            }
            bulkProcessor.flush();
        } catch (Exception e) {
            log.info(e.getMessage());
        } finally {
            conn.close();
            preparedStatement.close();
            resultSet.close();
            try {
                bulkProcessor.awaitClose(150L, TimeUnit.SECONDS);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }

    }
}
