package com.pingan.haofang.searchcloud.common.fetch;

import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.pingan.haofang.searchcloud.api.IndexRowData;
import com.pingan.haofang.searchcloud.api.matedata.IndexMeta;
import com.pingan.haofang.searchcloud.common.constants.Constants;
import com.pingan.haofang.searchcloud.common.constants.QueryOptimizeEnum;
import com.pingan.haofang.searchcloud.common.dto.FetchDataRetryDTO;
import com.pingan.haofang.searchcloud.common.dto.FetchDatasDTO;
import com.pingan.haofang.searchcloud.common.dto.PageDTO;
import com.pingan.haofang.searchcloud.common.exception.SystemException;
import com.pingan.haofang.searchcloud.index.constants.IndexBuildConstants;
import org.apache.commons.collections.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.web.client.ResourceAccessException;
import org.springframework.web.client.RestClientException;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.BiFunction;

import static com.pingan.haofang.searchcloud.index.constants.IndexBuildConstants.EXPIRE_IN_DAYS;

/**
 * 多线程-流式结果集抓取迭代器
 *
 * @author SONGBANGYONG396
 * @date 2018/07/03
 * @since 1.0.0
 */
public class MultiThreadStreamFetchIterator<E, M extends IndexMeta, R extends IndexRowData> extends
        AbstractStreamFetchIterator<FetchDatasDTO<R>> {

    private static final Logger logger = LoggerFactory.getLogger(MultiThreadStreamFetchIterator.class);

    private ThreadLocal<Gson> GSON = ThreadLocal.withInitial(() -> new GsonBuilder().setDateFormat("yyyy-MM-dd " +
            "HH:mm:ss").create());


    /**
     * 空循环计数
     */
    private static final int NULL_LOOP_COUNT = -1;


    /**
     * 线程数量倍数
     */
    private static final Integer FACTOR_THRESHOLD = 1;

    /**
     * 第一页
     */
    private static final Integer FIRST_PAGE = 1;

    /**
     * 流式抓取
     */
    private final StreamFetcher<E> fetcher;

    /**
     * 启用字段
     */
    private String indexField;

    /**
     * 每页大小
     */
    private final int pageSize;

    /**
     * 页码
     */
    private AtomicInteger pageNo = new AtomicInteger(0);

    /**
     * 线程数量
     */
    private Integer threadNum;

    /**
     * 线程池
     */
    private ExecutorService executor;

    /**
     * 正在抓取数据
     */
    private volatile boolean fetching;


    /**
     * 转换后的抓取的数据缓存
     */
    private ConcurrentLinkedQueue<FetchDatasDTO<R>> convertedDataStore = new ConcurrentLinkedQueue<>();

    /**
     * 锁
     */
    private final ReentrantLock lock = new ReentrantLock();

    private final Condition notEmptyCondition = lock.newCondition();

    private final Condition notFullCondition = lock.newCondition();

    /**
     * 数据转换方法
     */
    private BiFunction<List<E>, M, List<R>> dataConvertFunction;

    /**
     * 索引元数据
     */
    private M meta;


    private RedisTemplate redisTemplate;

    /**
     * solr索引构建进度key前缀
     */
    private final String indexSolrProgressKeyPrefix;

    /**
     * 是否查询优化
     */
    private final QueryOptimizeEnum queryOptimize;

    /**
     * progressId
     */
    private final Long progressId;

    /**
     * 抓取数据缓存页数由threadNum * FACTOR_THRESHOLD 决定
     */
    private final Integer defaultFetchCachePageCapacity;

    /***
     *  异常
     */
    private volatile Throwable throwable;

    /**
     * rpc 请求消耗时间
     */
    private final AtomicLong rpcCostTime = new AtomicLong(0);

    /**
     * 保存所有查询累计出现的异常次数
     */
    private final AtomicInteger allRetryTime = new AtomicInteger(0);

    public MultiThreadStreamFetchIterator(int pageSize, StreamFetcher<E> fetcher, Integer threadNum,
                                          BiFunction<List<E>, M, List<R>> dataConvertFunction, M meta,
                                          RedisTemplate redisTemplate, String indexSolrProgressKeyPrefix,
                                          QueryOptimizeEnum queryOptimize, Long progressId, String indexField,
                                          FetchDataRetryDTO retryDTO) {
        this.pageSize = pageSize;
        this.fetcher = fetcher;
        this.threadNum = threadNum;
        this.dataConvertFunction = dataConvertFunction;
        this.meta = meta;
        this.redisTemplate = redisTemplate;
        this.indexSolrProgressKeyPrefix = indexSolrProgressKeyPrefix;
        this.queryOptimize = queryOptimize;
        this.progressId = progressId;
        this.indexField = indexField;
        super.setRetryDTO(retryDTO);
        defaultFetchCachePageCapacity = threadNum * FACTOR_THRESHOLD;

        executor = Executors.newFixedThreadPool(threadNum + 1);

        fetching = true;

        //开启线程开始抓取数据
        Runnable fetchDataTask = () -> {
            if (QueryOptimizeEnum.ENABLE.equals(queryOptimize)) {
                logger.info("fetch datas task start,query optimize enable!");
                fetchDataWithQueryOptimize();
            } else {
                logger.info("fetch datas task start!");
                fetchData();
            }
        };

        new Thread(fetchDataTask).start();

    }

    /**
     * 当某一页抓取出现异常超过3次，或者所有抓取（即不同页的查询）累计出现异常超过1000次
     * 抓取数据为空或者出现异常时进行一定次数的重试，如果最终返回结果为空，则表示失败
     * @param invokeCount 调用次数(用于记录单次查询异常重试次数，如果是外部调用则传0，如果是递归调用，则就传当前值)
     * @param pageNo
     * @param pageSize
     * @param lastMaxId 该参数可以为空
     * @return
     */
    private PageDTO<E> getPageData(int invokeCount, int pageNo,int pageSize, Long lastMaxId) {
        PageDTO<E> pageData = null;
        Boolean hasException = false;
        Exception fetchException = null;
        try {
            pageData = lastMaxId == null ? fetcher.fetch(pageNo, pageSize) : fetcher.fetch(pageNo, pageSize, lastMaxId);
            if (pageData == null) {
                hasException = true;
            }
        } catch (Exception e) {
            hasException = true;
            fetchException = e;
        }
        if (hasException) {
            invokeCount++;
            int all = allRetryTime.incrementAndGet();
            if (invokeCount > singleQueryMaxRetryTime || all > allQueryMaxRetryTime) {
                logger.error("The number of attempts exceeded the maximum limit,SINGLE_QUERY_MAX_RETRY_TIME:{}," +
                                "ALL_QUERY_MAX_RETRY_TIME:{}" + "The actual value is invokeCount:{},allRetryTime:{},progressId:{}",
                        singleQueryMaxRetryTime,allQueryMaxRetryTime,invokeCount,allRetryTime,progressId);
                //为null或者出现异常直接中止
                throwable = new SystemException("Fetch Data Exception",fetchException);
            } else {
                logger.warn("There is a problem with the data fetching. The program is retrying it now. " +
                        "invokeCount:{},allRetryTime:{},exception:{}",invokeCount,allRetryTime,throwable);
                try {
                    Thread.sleep(retryTimeInterval);
                } catch (InterruptedException e) {
                    logger.error("Thread InterruptedException",e);
                }
                return getPageData(invokeCount,pageNo,pageSize,lastMaxId);
            }
        }
        return pageData;
    }

    /**
     * 抓取数据
     */
    private void fetchData() {
        try {
            MDC.put(Constants.REQUEST_ID, String.valueOf(progressId));

            //通过RPC获取第一页
            //PageDTO<E> firstPage = fetcher.fetch(FIRST_PAGE, pageSize);
            PageDTO<E> firstPage = getPageData(INIT_INVOKE_COUNT_VALUE,FIRST_PAGE, pageSize, null);
            if (firstPage == null) {
                //错误信息已经在getPageData方法内记录下了
                /*throwable = new SystemException("progressId:" + progressId + " firstPage is null , please check rpc " +
                        "status.");*/
                return;
            }

            //总数量
            final long total = firstPage.getTotal();

            //线程计数器
            final CountDownLatch latch = new CountDownLatch(threadNum);


            List<Future<TaskResult>> futures = new ArrayList<>(threadNum);

            //多线程执行RPC获取数据
            for (int i = 0; i < threadNum; i++) {
                //执行任务
                Future<TaskResult> taskFuture = executor.submit(new Task(total, latch));

                futures.add(taskFuture);
            }

            //等待所有线程执行完毕
            latch.await();

            //任务结束后，查看是否发生异常
            findExceptionAfterTask(futures);

        } catch (SystemException e) {
            if (e.getCause() != null && e.getCause() instanceof ResourceAccessException) {
                throwable = new SystemException("progressId:" + progressId + " invoke rpc failed, please check the " +
                        "rpc url is available. ",
                        e.getCause());
            } else {
                throwable = e;
            }
        } catch (Exception e) {
            throwable = new SystemException(e);
        } finally {
            //抓取数据结束后关闭线程池
            afterFetchDatasComplete();
            MDC.clear();
        }

    }


    /**
     * 抓取数据提供查询优化支持
     */
    private void fetchDataWithQueryOptimize() {
        MDC.put(Constants.REQUEST_ID, String.valueOf(progressId));
        final String totalKey = indexSolrProgressKeyPrefix + IndexBuildConstants.TOTAL_KEY_SUFFIX;

        try {

            //从redis中查询总数
            String totalVal = (String) redisTemplate.opsForValue().get(totalKey);

            //总数量
            final Long total;
            if (totalVal != null) {
                total = Long.valueOf(totalVal);
            } else {
                //通过RPC获取第一页（总记录数，用于后面分页）
                //PageDTO<E> firstPage = fetcher.fetch(FIRST_PAGE, pageSize);
                PageDTO<E> firstPage = getPageData(INIT_INVOKE_COUNT_VALUE,FIRST_PAGE, pageSize ,null);

                if (firstPage == null) {
                    /*throwable = new SystemException("progressId:" + progressId + " firstPage is null , please check " +
                            "rpc status.");*/
                    return;
                }

                total = firstPage.getTotal();
                //记录总数
                redisTemplate.opsForValue().set(totalKey, total.toString(), EXPIRE_IN_DAYS, TimeUnit.DAYS);
            }

            // 一共分了多少页
            final long pieces = total / pageSize;

            //每个线程分配的页数
            final long eachThreadPiece = pieces / threadNum;

            //最后一个线程分配的页数
            final long lastThreadPiece = pieces / threadNum + pieces % threadNum;

            //分配后的数据剩余数据数量
            final Long remain = total % pageSize;

            //线程计数器 为threadNum + 1
            final int count = threadNum + 1;
            final CountDownLatch latch = new CountDownLatch(count);


            logger.info("total:{} pieces:{} eachThreadPiece:{} lastThreadPiece:{} remain:{} ",
                    total,
                    pieces, eachThreadPiece, lastThreadPiece, remain);


            List<Future<TaskResult>> futures = new ArrayList<>(threadNum + 1);

            //多线程执行RPC获取数据
            for (int i = 0; i < threadNum; i++) {

                //线程序号
                final int threadSeq = i;

                //执行任务
                Future<TaskResult> taskFuture = executor.submit(new TaskForQueryOptimize(threadSeq, total, pieces,
                        eachThreadPiece,
                        lastThreadPiece, latch));

                futures.add(taskFuture);

            }

            //单独开启一个线程来进行剩余数据的导入
            Future<TaskResult> remainTaskFuture = executor.submit(new RemainTask(total, latch));
            futures.add(remainTaskFuture);

            //等待所有线程执行完毕
            latch.await();

            //任务结束后，查看是否发生异常
            findExceptionAfterTask(futures);

        } catch (SystemException e) {
            if (e.getCause() != null && e.getCause() instanceof ResourceAccessException) {
                throwable = new SystemException("progressId:" + progressId + " invoke rpc failed, please check the " +
                        "rpc url is available. ",
                        e.getCause());
            } else {
                throwable = e;
            }

        } catch (ExecutionException e) {
            Throwable t = e.getCause();
            throwable = t;
        } catch (Exception e) {
            throwable = new SystemException(e);
        } finally {
            //抓取数据结束后关闭线程池
            afterFetchDatasComplete();
            MDC.clear();
        }

    }


    /**
     * 任务结束后，查看是否发生异常
     *
     * @param futures
     * @throws InterruptedException
     * @throws ExecutionException
     */
    private void findExceptionAfterTask(List<Future<TaskResult>> futures) throws InterruptedException,
            ExecutionException {
        //异常信息
        StringBuilder errorMsg = new StringBuilder();
        for (Future<TaskResult> f : futures) {
            //如果有异常则抛出异常
            TaskResult taskResult = f.get();
            if (taskResult != null && !taskResult.isSuccess()) {
                final Throwable t = taskResult.getThrowable();
                errorMsg.append(System.lineSeparator()).append(t.getMessage());
                logger.error(t.getMessage(), t);
            }
        }

        if (errorMsg.length() > 0) {
            throwable = new SystemException(errorMsg.toString());
        }
    }

    /**
     * 抓取数据结束后关闭线程池
     */
    private void afterFetchDatasComplete() {
        //抓取数据结束
        fetching = false;

        if (executor != null && !executor.isShutdown()) {
            executor.shutdown();
        }
        lock.lock();
        try {
            notEmptyCondition.signalAll();
            logger.debug("notEmptyCondition unlock, fetch data complete!");
        } finally {
            lock.unlock();
        }
        logger.info("threadPool closed, fetch data complete!");
    }


    /**
     * 多线程抓取时是放在每一个任务里往生产者写入数据，单线程抓取时放在遍历时往生产者写入数据 *Added on 2018-8-16*
     * @return
     */
    @Override
    public boolean hasNext() {
        lock.lock();
        try {
            //正在抓取中，且convertedDataStore为空则等待
            while (fetching && CollectionUtils.isEmpty(convertedDataStore)) {
                try {
                    logger.debug("notEmptyCondition locked , convertedDataStore is empty.");
                    notEmptyCondition.await();
                } catch (InterruptedException e) {
                    throw new SystemException(e);
                }
            }
        } finally {
            lock.unlock();
        }

        if (throwable != null) {
            throw new SystemException(throwable);
        }

        //抓取数据结束，并且convertedDataStore为空
        if (!fetching && CollectionUtils.isEmpty(convertedDataStore)) {
            return false;
        }
        return true;
    }

    @Override
    public FetchDatasDTO<R> next() {
        FetchDatasDTO<R> fetchDatasDTO;
        lock.lock();
        try {
            fetchDatasDTO = convertedDataStore.poll();
            if (convertedDataStore.size() < defaultFetchCachePageCapacity) {
                notFullCondition.signalAll();
                logger.debug(" notFullCondition unlock , convertedDataStore being consumed.");
            }
        } finally {
            lock.unlock();
        }

        if (throwable != null) {
            throw new SystemException(throwable);
        }

        return fetchDatasDTO;
    }


    @Override
    public void setIgnoreError(boolean ignoreError) {
    }

    @Override
    public void stop() {
        fetching = false;
        lock.lock();
        try {
            convertedDataStore.clear();
            notFullCondition.signalAll();
        } finally {
            lock.unlock();
        }

    }

    @Override
    public Long getRpcCostTime() {
        return rpcCostTime.get();
    }

    /**
     * 是否是最后一个线程
     *
     * @param threadSeq
     * @return
     */
    private boolean isLastThread(int threadSeq) {
        return threadSeq == threadNum - 1;
    }

    /**
     * 多线程Task
     */
    private class Task implements Callable<TaskResult> {

        final Long total;

        //线程计数器
        final CountDownLatch latch;

        public Task(Long total, CountDownLatch latch) {
            this.total = total;
            this.latch = latch;
        }

        @Override
        public TaskResult call() {
            MDC.put(Constants.REQUEST_ID, String.valueOf(progressId));

            //当前进度信息，用于记录日志
            String currentProgressInfo = "";

            try {

                // solr构建进度key
                final String indexSolrProgressKey = indexSolrProgressKeyPrefix + IndexBuildConstants.DEFAULT_THREAD_SEQ;

                final Set<String> progresses = redisTemplate.opsForHash().keys(indexSolrProgressKey);

                final Map<String, String> indexSolrProgressMap = redisTemplate.opsForHash().entries
                        (indexSolrProgressKey);

                StreamFetchGatherInfo gatherInfo = new StreamFetchGatherInfo();

                StreamFetchGatherInfo correctStreamFetchGatherInfo = new StreamFetchGatherInfo();


                //需要跳过的导入成功的数量
                Integer skipedImportSuccessCount = 0;
                //需要跳过的抓取数据的数量
                Integer skipedIndexDatasSize = 0;

                while (true) {

                    if (!fetching) {
                        logger.info("job interrupted or pause, thread stop!");
                        break;
                    }

                    //页码自增
                    final int currentPageNo = pageNo.incrementAndGet();

                    //当前进度信息
                    final String currentProgress = getProgress(NULL_LOOP_COUNT, total, currentPageNo,
                            pageSize, queryOptimize);


                    currentProgressInfo = getProgressInfo(total, currentPageNo, pageSize);

                    //如果redis中记录了进度信息，则跳过当前循环，并设置下一个循环需要使用的lastMaxId
                    if (progresses != null && progresses.contains(currentProgress)) {
                        String fetchDatasProgressStr = indexSolrProgressMap.get(currentProgress);
                        FetchDatasDTO.FetchDatasProgress fetchDatasProgress = GSON.get().fromJson
                                (fetchDatasProgressStr, FetchDatasDTO.FetchDatasProgress.class);


                        Integer successCount = Optional.of(fetchDatasProgress).map(FetchDatasDTO
                                .FetchDatasProgress::getStreamFetchGatherInfo).map
                                (StreamFetchGatherInfo::getSuccessCount).map(Long::intValue).orElse(0);

                        Integer importSuccessCount = Optional.of(fetchDatasProgress).map(FetchDatasDTO
                                .FetchDatasProgress::getImportSuccessCount).orElse(0);

                        Integer indexDatasSize = Optional.of(fetchDatasProgress).map(FetchDatasDTO
                                .FetchDatasProgress::getIndexDatasSize).orElse(0);

                        correctStreamFetchGatherInfo.addFetchCount(successCount);

                        skipedImportSuccessCount += importSuccessCount;

                        skipedIndexDatasSize += indexDatasSize;

                        logger.info("Skip current page! pageNo:{} pageSize:{} currentProgress:{} ",
                                currentPageNo, pageSize, currentProgress);

                        continue;
                    }

                    //通过RPC抓取数据（用于获取当前页的数据）
                    Long rpcBegin = System.currentTimeMillis();
                    //PageDTO<E> page = fetcher.fetch(currentPageNo, pageSize);
                    PageDTO<E> page = getPageData(INIT_INVOKE_COUNT_VALUE,currentPageNo, pageSize,null);
                    rpcCostTime.addAndGet(System.currentTimeMillis() - rpcBegin);

                    //抓取的数据
                    List<E> datas = (page == null) ? null : page.getDatas();

                    if (datas == null || datas.isEmpty()) {
                        logger.info("datas is empty ,current thread stop. {}",
                                currentProgressInfo);
                        break;
                    }

                    final int fetchCount = datas.size();

                    //数据转换
                    List<R> convertedDatas = dataConvertFunction.apply(datas, meta);

                    //手动置空
                    datas = null;

                    lock.lock();
                    try {
                        while (convertedDataStore.size() >= defaultFetchCachePageCapacity) {
                            logger.debug("notFullCondition lock, convertedDataStore is full ,size :{} ",
                                    convertedDataStore.size());
                            notFullCondition.await();
                        }

                        // 设置预估记录大小，预估总记录大小永远取较大值
                        if (gatherInfo.getEstimateTotalCount() < page.getTotal()) {
                            gatherInfo.setEstimateTotalCount(page.getTotal());
                        }
                        gatherInfo.addFetchCount(fetchCount);
                        correctStreamFetchGatherInfo.addFetchCount(fetchCount);

                        FetchDatasDTO<R> fetchDatasDTO = FetchDatasDTO.newBuilder().indexDatas(convertedDatas)
                                .fetchDatasProgress(FetchDatasDTO.FetchDatasProgress.newBuilder()
                                        .total(total).pageNo(currentPageNo).pageSize(pageSize)
                                        .threadNum(threadNum).threadSeq(IndexBuildConstants.DEFAULT_THREAD_SEQ)
                                        .queryOptimize(QueryOptimizeEnum.DISABLE.code())
                                        .streamFetchGatherInfo(gatherInfo)
                                        .correctStreamFetchGatherInfo(correctStreamFetchGatherInfo)
                                        .loopCount(NULL_LOOP_COUNT).indexDatasSize(fetchCount)
                                        .build())
                                .skipedImportSuccessCount(skipedImportSuccessCount)
                                .skipedIndexDatasSize(skipedIndexDatasSize)
                                .build();
                        //生产者将结果放到仓库中
                        convertedDataStore.add(fetchDatasDTO);
                        notEmptyCondition.signalAll();
                        logger.debug("notEmptyCondition unlock, after put datas. ");
                    } finally {
                        lock.unlock();
                    }

                    //重置
                    gatherInfo = new StreamFetchGatherInfo();
                    correctStreamFetchGatherInfo = new StreamFetchGatherInfo();
                    skipedImportSuccessCount = 0;
                    skipedIndexDatasSize = 0;

                    logger.info("fetch current page complete ! {} fetchCount:{} currentProgress:{} ",
                            currentProgressInfo, fetchCount, currentProgress);
                }

                if (skipedIndexDatasSize != 0) {

                    lock.lock();
                    try {
                        while (fetching && convertedDataStore.size() >= defaultFetchCachePageCapacity) {
                            logger.debug("notFullCondition lock, convertedDataStore is full ,size :{} ",
                                    convertedDataStore.size());
                            notFullCondition.await();
                        }
                        final FetchDatasDTO<R> fetchDatasDTO = FetchDatasDTO.newBuilder().indexDatas
                                (Collections.emptyList())
                                .fetchDatasProgress(FetchDatasDTO.FetchDatasProgress.newBuilder()
                                        .total(total).pageSize(pageSize)
                                        .threadNum(threadNum).threadSeq(IndexBuildConstants.DEFAULT_THREAD_SEQ)
                                        .queryOptimize(QueryOptimizeEnum.ENABLE.code())
                                        .streamFetchGatherInfo(gatherInfo)
                                        .correctStreamFetchGatherInfo(correctStreamFetchGatherInfo)
                                        .loopCount(NULL_LOOP_COUNT).build())
                                .skipedImportSuccessCount(skipedImportSuccessCount)
                                .skipedIndexDatasSize(skipedIndexDatasSize)
                                .build();
                        //生产者将结果放到仓库中
                        convertedDataStore.add(fetchDatasDTO);
                        notEmptyCondition.signalAll();
                        logger.debug("notEmptyCondition unlock, after put datas. ");
                    } finally {
                        lock.unlock();
                    }
                }

            } catch (SystemException e) {
                if (e.getCause() != null && e.getCause() instanceof RestClientException) {
                    e = new SystemException("progressId:" + progressId + currentProgressInfo + " invoke rpc failed, " +
                            "please check the rpc url is available. ", e.getCause());
                }
                return new TaskResult(false, e);
            } catch (Exception e) {
                e = new SystemException("progressId:" + progressId + currentProgressInfo + " " + e.getMessage(), e);
                return new TaskResult(false, e);
            } finally {
                latch.countDown();
                MDC.clear();
            }

            return new TaskResult(true);

        }
    }

    /**
     * 查询优化Task
     */
    private class TaskForQueryOptimize implements Callable<TaskResult> {

        final int threadSeq;

        final Long total;

        // 一共分了多少页
        final long pieces;

        //每个线程分配的页数
        final long eachThreadPiece;

        //最后一个线程分配的页数
        final long lastThreadPiece;

        //线程计数器
        final CountDownLatch latch;


        public TaskForQueryOptimize(int threadSeq, Long total, long pieces, long eachThreadPiece, long
                lastThreadPiece, CountDownLatch latch) {
            this.threadSeq = threadSeq;
            this.total = total;
            this.pieces = pieces;
            this.eachThreadPiece = eachThreadPiece;
            this.lastThreadPiece = lastThreadPiece;
            this.latch = latch;
        }

        @Override
        public TaskResult call() {
            MDC.put(Constants.REQUEST_ID, String.valueOf(progressId));

            //当前进度信息，用于记录日志
            String currentProgressInfo = "";

            try {

                // solr构建进度key
                final String indexSolrProgressKey = indexSolrProgressKeyPrefix + threadSeq;

                final Set<String> progresses = redisTemplate.opsForHash().keys(indexSolrProgressKey);

                final Map<String, String> indexSolrProgressMap = redisTemplate.opsForHash().entries
                        (indexSolrProgressKey);

                //上一次查询最大的ID
                Long lastMaxId = null;

                //当前线程抓取次数计数器
                int currentLoopCount = 0;

                //当前线程抓取的第一页
                final Long currentFirstPage = eachThreadPiece * threadSeq + 1;

                //循环计数器
                int loopCount = -1;


                StreamFetchGatherInfo gatherInfo = new StreamFetchGatherInfo();
                StreamFetchGatherInfo correctStreamFetchGatherInfo = new StreamFetchGatherInfo();

                //需要跳过的导入成功的数量
                Integer skipedImportSuccessCount = 0;
                //需要跳过的抓取数据的数量
                Integer skipedIndexDatasSize = 0;

                while (true) {

                    if (!fetching) {
                        logger.info("job interrupted or pause, thread stop ! ");
                        break;
                    }

                    loopCount++;
                    currentLoopCount++;

                    //抓取完毕则跳出循环
                    if ((!isLastThread(threadSeq) && currentLoopCount > eachThreadPiece) || (isLastThread
                            (threadSeq) && currentLoopCount > lastThreadPiece)) {
                        logger.info("thread: {} fetch complete ! currentLoopCount :{} ",
                                Thread.currentThread().getName(), currentLoopCount);
                        break;
                    }

                    PageDTO<E> page;
                    final Integer pageNo;

                    //当前需要使用的lastMaxId
                    final Long currentLastMaxId = lastMaxId;

                    if (lastMaxId != null) {
                        pageNo = FIRST_PAGE;
                    } else {
                        //没有lastMaxId则设置第一页
                        pageNo = currentFirstPage.intValue();
                    }

                    //当前进度信息
                    final String currentProgress = getProgress(loopCount, total, pageNo, pageSize,
                            queryOptimize);


                    currentProgressInfo = getProgressInfo(threadSeq, loopCount, total, pageNo, pageSize,
                            currentLastMaxId);

                    //如果redis中记录了进程信息，则跳过当前循环，并设置下一个循环需要使用的lastMaxId
                    if (progresses != null && progresses.contains(currentProgress)) {
                        String fetchDatasProgressStr = indexSolrProgressMap.get(currentProgress);
                        FetchDatasDTO.FetchDatasProgress fetchDatasProgress = GSON.get().fromJson
                                (fetchDatasProgressStr, FetchDatasDTO.FetchDatasProgress.class);

                        //记录已经抓取的数据的当前循环的lastMaxId
                        Long loggedCurrentLastMaxId = 0L;

                        if (fetchDatasProgress != null) {
                            //设置下一个循环需要使用的lastMaxId
                            lastMaxId = fetchDatasProgress.getNextLastMaxId();

                            loggedCurrentLastMaxId = fetchDatasProgress.getCurrentLastMaxId();
                        }

                        Integer successCount = Optional.of(fetchDatasProgress).map(FetchDatasDTO
                                .FetchDatasProgress::getStreamFetchGatherInfo).map
                                (StreamFetchGatherInfo::getSuccessCount).map(Long::intValue).orElse(0);

                        Integer importSuccessCount = Optional.of(fetchDatasProgress).map(FetchDatasDTO
                                .FetchDatasProgress::getImportSuccessCount).orElse(0);

                        Integer indexDatasSize = Optional.of(fetchDatasProgress).map(FetchDatasDTO
                                .FetchDatasProgress::getIndexDatasSize).orElse(0);

                        correctStreamFetchGatherInfo.addFetchCount(successCount);

                        skipedImportSuccessCount += importSuccessCount;

                        skipedIndexDatasSize += indexDatasSize;

                        logger.info("Skip current page! theadSeq:{} pageNo:{} pageSize:{} lastMaxId:{} " +
                                        "currentProgress:{}  ",
                                threadSeq, pageNo, pageSize, loggedCurrentLastMaxId, currentProgress);

                        continue;
                    }

                    //本次fetcher用于获取具体页的数据
                    Long rpcBegin = System.currentTimeMillis();

                    /*if (currentLastMaxId != null) {
                        page = fetcher.fetch(pageNo, pageSize, currentLastMaxId);
                    } else {
                        //没有lastMaxId
                        page = fetcher.fetch(pageNo, pageSize);
                    }*/
                    page = getPageData(INIT_INVOKE_COUNT_VALUE,pageNo, pageSize, currentLastMaxId);

                    rpcCostTime.addAndGet(System.currentTimeMillis() - rpcBegin);

                    //RPC抓取的数据
                    List<E> datas = (page == null) ? null : page.getDatas();

                    if (datas == null || datas.isEmpty()) {
                        logger.info("datas is empty ,current thread stop ! {} , currentProgress:{} ",
                                currentProgressInfo, currentProgress);
                        break;
                    }

                    final int fetchCount = datas.size();

                    //数据转换
                    List<R> convertedDatas = dataConvertFunction.apply(datas, meta);

                    //手动置空
                    datas = null;


                    //设置最大ID
                    lastMaxId = getLastMaxId(convertedDatas, meta,indexField);

                    lock.lock();
                    try {
                        while (fetching && convertedDataStore.size() >= defaultFetchCachePageCapacity) {
                            logger.debug("notFullCondition lock, convertedDataStore is full ,size :{} ",
                                    convertedDataStore.size());
                            notFullCondition.await();
                        }

                        // 设置预估记录大小，预估总记录大小永远取较大值
                        if (gatherInfo.getEstimateTotalCount() < page.getTotal()) {
                            gatherInfo.setEstimateTotalCount(page.getTotal());
                        }

                        gatherInfo.addFetchCount(fetchCount);
                        correctStreamFetchGatherInfo.addFetchCount(fetchCount);

                        final FetchDatasDTO<R> fetchDatasDTO = FetchDatasDTO.newBuilder().indexDatas(convertedDatas)
                                .fetchDatasProgress(FetchDatasDTO.FetchDatasProgress.newBuilder()
                                        .total(total).pageNo(pageNo).pageSize(pageSize)
                                        .threadNum(threadNum).threadSeq(threadSeq)
                                        .queryOptimize(QueryOptimizeEnum.ENABLE.code())
                                        .currentLastMaxId(currentLastMaxId).nextLastMaxId(lastMaxId)
                                        .streamFetchGatherInfo(gatherInfo)
                                        .correctStreamFetchGatherInfo(correctStreamFetchGatherInfo)
                                        .loopCount(loopCount).indexDatasSize(fetchCount).build())
                                .skipedImportSuccessCount(skipedImportSuccessCount)
                                .skipedIndexDatasSize(skipedIndexDatasSize)
                                .build();

                        //生产者将结果放到仓库中 可能会超出一点无需处理
                        convertedDataStore.add(fetchDatasDTO);
                        notEmptyCondition.signalAll();
                        logger.debug("notEmptyCondition unlock, after put datas. ", progressId);
                    } finally {
                        lock.unlock();
                    }

                    //重置
                    gatherInfo = new StreamFetchGatherInfo();
                    correctStreamFetchGatherInfo = new StreamFetchGatherInfo();
                    skipedImportSuccessCount = 0;
                    skipedIndexDatasSize = 0;

                    logger.info("fetch current page complete! {} fetchCount:{} currentProgress:{} ",
                            progressId, currentProgressInfo, fetchCount, currentProgress);

                }

                if (skipedIndexDatasSize != 0) {

                    lock.lock();
                    try {
                        while (fetching && convertedDataStore.size() >= defaultFetchCachePageCapacity) {
                            logger.debug("notFullCondition lock, convertedDataStore is full ,size :{} ",
                                    convertedDataStore.size());
                            notFullCondition.await();
                        }
                        final FetchDatasDTO<R> fetchDatasDTO = FetchDatasDTO.newBuilder().indexDatas
                                (Collections.emptyList())
                                .fetchDatasProgress(FetchDatasDTO.FetchDatasProgress.newBuilder()
                                        .total(total).pageSize(pageSize)
                                        .threadNum(threadNum).threadSeq(threadSeq)
                                        .queryOptimize(QueryOptimizeEnum.ENABLE.code())
                                        .streamFetchGatherInfo(gatherInfo)
                                        .correctStreamFetchGatherInfo(correctStreamFetchGatherInfo)
                                        .loopCount(loopCount).build())
                                .skipedImportSuccessCount(skipedImportSuccessCount)
                                .skipedIndexDatasSize(skipedIndexDatasSize)
                                .build();

                        //生产者将结果放到仓库中 可能会超出一点无需处理
                        convertedDataStore.add(fetchDatasDTO);

                        notEmptyCondition.signalAll();
                        logger.debug("notEmptyCondition unlock, after put datas. {}",
                                currentProgressInfo);
                    } finally {
                        lock.unlock();
                        MDC.clear();
                    }
                }

            } catch (SystemException e) {
                if (e.getCause() != null && e.getCause() instanceof RestClientException) {
                    e = new SystemException("progressId:" + progressId + currentProgressInfo + " invoke rpc failed, " +
                            "please check the rpc url is available. ", e.getCause());
                }
                return new TaskResult(false, e);
            } catch (Exception e) {
                e = new SystemException("progressId:" + progressId + currentProgressInfo + " " + e.getMessage(), e);
                return new TaskResult(false, e);
            } finally {
                latch.countDown();
            }

            return new TaskResult(true);
        }

    }


    /**
     * 多线程查询优化Task-处理剩下的数据
     */
    private class RemainTask implements Callable<TaskResult> {

        private long total;

        private CountDownLatch latch;

        public RemainTask(long total, CountDownLatch latch) {
            this.total = total;
            this.latch = latch;
        }

        @Override
        public TaskResult call() {
            MDC.put(Constants.REQUEST_ID, String.valueOf(progressId));
            //当前进度信息，用于记录日志
            String currentProgressInfo = "";

            try {

                StreamFetchGatherInfo gatherInfo = new StreamFetchGatherInfo();
                StreamFetchGatherInfo correctStreamFetchGatherInfo = new StreamFetchGatherInfo();

                // 最后一页
                Long lastPageNo = total / pageSize + 1;

                int pageNo = lastPageNo.intValue();

                while (true) {

                    //页数自增，防止导入期间有新增的数据
                    final int currentPageNo = pageNo++;

                    //当前进度信息
                    currentProgressInfo = getProgressInfo(total, currentPageNo, pageSize);

                    if (!fetching) {
                        logger.info("job interrupted or pause, thread stop ! {}",
                                currentProgressInfo);
                        break;
                    }

                    Long rpcBegin = System.currentTimeMillis();
                    //PageDTO<E> page = fetcher.fetch(currentPageNo, pageSize);
                    PageDTO<E> page = getPageData(INIT_INVOKE_COUNT_VALUE,currentPageNo, pageSize,null);
                    rpcCostTime.addAndGet(System.currentTimeMillis() - rpcBegin);

                    //RPC抓取的数据
                    List<E> datas = (page == null) ? null : page.getDatas();

                    if (datas == null || datas.isEmpty()) {
                        logger.info("remain task datas is empty ,current thread stop ! {}",
                                currentProgressInfo);
                        break;
                    }

                    final int fetchCount = datas.size();


                    //数据转换
                    List<R> convertedDatas = dataConvertFunction.apply(datas, meta);

                    //手动置空
                    datas = null;

                    lock.lock();
                    try {
                        while (fetching && convertedDataStore.size() >= defaultFetchCachePageCapacity) {
                            logger.debug("notFullCondition lock, convertedDataStore is full ,size :{} {}",
                                    convertedDataStore.size(), currentProgressInfo);
                            notFullCondition.await();
                        }

                        if (gatherInfo.getEstimateTotalCount() < page.getTotal()) {
                            gatherInfo.setEstimateTotalCount(page.getTotal());
                        }

                        gatherInfo.addFetchCount(fetchCount);
                        correctStreamFetchGatherInfo.addFetchCount(fetchCount);

                        FetchDatasDTO<R> fetchDatasDTO = FetchDatasDTO.newBuilder().indexDatas(convertedDatas)
                                .fetchDatasProgress(FetchDatasDTO.FetchDatasProgress.newBuilder()
                                        .total(total).pageNo(currentPageNo).pageSize(pageSize)
                                        .threadNum(threadNum).threadSeq(IndexBuildConstants.DEFAULT_THREAD_SEQ)
                                        .queryOptimize(QueryOptimizeEnum.ENABLE.code())
                                        .loopCount(NULL_LOOP_COUNT).streamFetchGatherInfo(gatherInfo)
                                        .correctStreamFetchGatherInfo(correctStreamFetchGatherInfo)
                                        .indexDatasSize(fetchCount).build()).build();

                        //生产者将结果放到仓库中
                        convertedDataStore.add(fetchDatasDTO);

                        notEmptyCondition.signalAll();
                        logger.debug("notEmptyCondition unlock, after put datas. {}",
                                currentProgressInfo);

                    } finally {
                        lock.unlock();
                    }

                    logger.info(" progressId:{} fetch current page complete ! {} fetchCount:{} ",
                            progressId, currentProgressInfo, fetchCount);
                }

            } catch (SystemException e) {
                if (e.getCause() != null && e.getCause() instanceof RestClientException) {
                    e = new SystemException("progressId:" + progressId + currentProgressInfo + " invoke rpc failed, " +
                            "please check the rpc url is available. ",
                            e.getCause());
                }
                return new TaskResult(false, e);
            } catch (Exception e) {
                e = new SystemException("progressId:" + progressId + currentProgressInfo + " " + e.getMessage(), e);
                return new TaskResult(false, e);
            } finally {
                latch.countDown();
                MDC.clear();
            }

            return new TaskResult(true);

        }
    }


    /**
     * 多线程返回的结果
     */
    private static class TaskResult {

        private boolean success;

        private Throwable throwable;


        public TaskResult(boolean success) {
            this.success = success;
        }

        public TaskResult(boolean success, Throwable throwable) {
            this.success = success;
            this.throwable = throwable;
        }

        public boolean isSuccess() {
            return success;
        }

        public Throwable getThrowable() {
            return throwable;
        }
    }

}
