package com.pingan.haofang.searchcloud.common.fetch;

import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.pingan.haofang.searchcloud.api.IndexRowData;
import com.pingan.haofang.searchcloud.api.matedata.IndexMeta;
import com.pingan.haofang.searchcloud.common.constants.QueryOptimizeEnum;
import com.pingan.haofang.searchcloud.common.dto.FetchDataRetryDTO;
import com.pingan.haofang.searchcloud.common.dto.FetchDatasDTO;
import com.pingan.haofang.searchcloud.common.dto.PageDTO;
import com.pingan.haofang.searchcloud.common.exception.SystemException;
import com.pingan.haofang.searchcloud.index.constants.IndexBuildConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.web.client.RestClientException;

import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.BiFunction;

import static com.pingan.haofang.searchcloud.index.constants.IndexBuildConstants.EXPIRE_IN_DAYS;

/**
 * 流式结果集抓取迭代器
 *
 * @author LUYI374
 * @date 2017年3月23日
 * @since 1.0.0
 */
public class StreamFetchIterator<E, M extends IndexMeta, R extends IndexRowData> extends
        AbstractStreamFetchIterator<FetchDatasDTO<R>> {

    private static final Logger LOG = LoggerFactory.getLogger(StreamFetchIterator.class);

    private static final int DEFAULT_MIN_FETCH_SIZE = 10;

    private ThreadLocal<Gson> GSON = ThreadLocal.withInitial(() -> new GsonBuilder().setDateFormat("yyyy-MM-dd " +
            "HH:mm:ss").create());

    /**
     * 当前页
     */
    private volatile int pageNo;

    /**
     * 每页抓取数量
     */
    private int pageSize;

    /**
     * 临时缓存
     */
    private volatile FetchDatasDTO<R> cache;

    /**
     * 抓取实现
     */
    private StreamFetcher<E> fetcher;

    /**
     * 汇总信息
     */
    private StreamFetchGatherInfo gatherInfo;

    /**
     * 忽略异常
     */
    private boolean ignoreError;

    /**
     * 最小抓取记录数
     */
    private int minFetchSize = DEFAULT_MIN_FETCH_SIZE;

    /**
     * 总数量
     */
    private final long total;

    /**
     * 是否查询优化
     */
    private final QueryOptimizeEnum queryOptimize;

    /**
     * 数据转换方法
     */
    private BiFunction<List<E>, M, List<R>> dataConvertFunction;

    /**
     * 索引元数据
     */
    private M meta;

    private RedisTemplate redisTemplate;

    private String indexSolrProgressKeyPrefix;

    /**
     * 上一次数据的最大ID
     */
    private Long lastMaxId;


    private final String indexSolrProgressKey;

    private final Set<String> progresses;

    private final Map<String, String> indexSolrProgressMap;

    private final int FIRST_PAGE = 1;

    private static final int NULL_LOOP_COUNT = -1;


    private static final int THREAD_NUM_ONE = 1;

    /**
     * 跳过的条目中的导入成功数量
     */
    private Integer skipedImportSuccessCount = 0;

    /**
     * 跳过的条目中获取数据的数量
     */
    private Integer skipedIndexDatasSize = 0;

    /**
     * 校准后的记录
     */
    private StreamFetchGatherInfo correctStreamFetchGatherInfo;

    /**
     * progressId
     */
    private final Long progressId;

    private String indexField;

    /**
     * 存放rpc请求时间
     */
    private AtomicLong rpcCostTime = new AtomicLong(0);

    /**
     * 保存所有查询累计出现的异常次数
     */
    private final AtomicInteger allRetryTime = new AtomicInteger(0);

    private StreamFetchIterator(StreamFetcher<E> fetcher, int pageSize, BiFunction<List<E>, M, List<R>>
            dataConvertFunction, M meta, RedisTemplate redisTemplate, String indexSolrProgressKeyPrefix,
                                QueryOptimizeEnum queryOptimize, Long progressId,String indexField,
                                FetchDataRetryDTO retryDTO) {
        this.fetcher = fetcher;
        this.pageSize = pageSize;

        this.dataConvertFunction = dataConvertFunction;
        this.meta = meta;
        this.redisTemplate = redisTemplate;
        this.indexSolrProgressKeyPrefix = indexSolrProgressKeyPrefix;
        this.queryOptimize = queryOptimize;
        this.progressId = progressId;
        this.indexField = indexField;
        super.setRetryDTO(retryDTO);
        // solr构建进度key
        indexSolrProgressKey = indexSolrProgressKeyPrefix + IndexBuildConstants.DEFAULT_THREAD_SEQ;

        progresses = redisTemplate.opsForHash().keys(indexSolrProgressKey);

        indexSolrProgressMap = redisTemplate.opsForHash().entries(indexSolrProgressKey);

        this.gatherInfo = new StreamFetchGatherInfo();
        this.correctStreamFetchGatherInfo = new StreamFetchGatherInfo();

        final String totalKey = indexSolrProgressKeyPrefix + IndexBuildConstants.TOTAL_KEY_SUFFIX;
        //从redis中查询总数
        String totalVal = (String) redisTemplate.opsForValue().get(totalKey);

        if (totalVal != null) {
            total = Long.valueOf(totalVal);
        } else {
            try {
                //通过RPC获取第一页
               //PageDTO<E> firstPage = fetcher.fetch(FIRST_PAGE, pageSize);
                PageDTO<E> firstPage = getPageData(INIT_INVOKE_COUNT_VALUE,FIRST_PAGE, pageSize,null);

                if (firstPage == null) {
                    throw new SystemException("progressId:" + progressId + "firstPage is null , please check rpc " +
                            "status.");
                }
                //总数量
                total = firstPage.getTotal();
                //记录总数
                redisTemplate.opsForValue().set(totalKey, String.valueOf(total),EXPIRE_IN_DAYS, TimeUnit.DAYS); //TODO
            } catch (SystemException e) {
                if (e.getCause() != null && e.getCause() instanceof RestClientException) {
                    throw new SystemException("progressId:" + progressId + " invoke rpc failed, please check the rpc " +
                            "url is available. ", e.getCause());
                }
                throw e;
            }

        }


    }

    /**
     * 当某一页抓取出现异常超过3次，或者所有抓取（即不同页的查询）累计出现异常超过1000次
     * 抓取数据为空或者出现异常时进行一定次数的重试，如果最终返回结果为空，则表示失败
     * @param invokeCount 调用次数(用于记录单次查询异常重试次数，如果是外部调用则传0，如果是递归调用，则就传当前值)
     * @param pageNo
     * @param pageSize
     * @param lastMaxId 该参数可以为空
     * @return
     */
    private PageDTO<E> getPageData(int invokeCount, int pageNo, int pageSize, Long lastMaxId) {
        PageDTO<E> pageData = null;
        Boolean hasException = false;
        //保存抓取数据时出现的异常，以区别于取数据时为null
        Exception fetchException = null;
        try {
            pageData = lastMaxId == null ? fetcher.fetch(pageNo, pageSize) : fetcher.fetch(pageNo, pageSize, lastMaxId);
            if (pageData == null) {
                hasException = true;
                LOG.warn("progressId: " + progressId +" , firstPage is null , please check rpc status.");
            }
        } catch (Exception e) {
            LOG.error("Fetch Data Exception",e);
            hasException = true;
            fetchException = e;
        }
        if (hasException) {
            invokeCount++;
            int all = allRetryTime.incrementAndGet();
            if (invokeCount > singleQueryMaxRetryTime || all > allQueryMaxRetryTime) {
                LOG.error("The number of attempts exceeded the maximum limit,SINGLE_QUERY_MAX_RETRY_TIME:{}," +
                                "ALL_QUERY_MAX_RETRY_TIME:{}" + "The actual value is invokeCount:{},allRetryTime:{},progressId:{}",
                        singleQueryMaxRetryTime,allQueryMaxRetryTime,invokeCount,allRetryTime,progressId);
                ////为空和出现异常直接终止
                throw new SystemException("Fetch Data Exception",fetchException);
            } else {
                LOG.warn("There is a problem with the data fetching. The program is retrying it now. " +
                        "invokeCount:{},allRetryTime:{}",invokeCount,allRetryTime);
                try {
                    Thread.sleep(retryTimeInterval);
                } catch (InterruptedException e) {
                    LOG.error("Thread InterruptedException",e);
                }
                return getPageData(invokeCount,pageNo,pageSize,lastMaxId);
            }
        }
        return pageData;
    }

    /**
     * 多线程抓取时是放在每一个任务里往生产者写入数据，单线程抓取时放在遍历时往生产者写入数据 *Added on 2018-8-16*
     * @return
     */
    @Override
    public boolean hasNext() {

        //当前进度信息，用于记录日志
        String currentProgressInfo = "";

        Long curLastMaxId = null;

        // 额外抓取
        try {
            while (true) {
                pageNo++;
                //当前进度信息
                final String currentProgress = getProgress(NULL_LOOP_COUNT, total, pageNo, pageSize, queryOptimize);

                //当前进度信息
                currentProgressInfo = getProgressInfo(total, pageNo, pageSize);

                //如果redis中记录了进程信息，则跳过当前循环，并设置下一个循环需要使用的lastMaxId
                if (progresses != null && progresses.contains(currentProgress)) {
                    String fetchDatasProgressStr = indexSolrProgressMap.get(currentProgress);

                    //进度信息
                    FetchDatasDTO.FetchDatasProgress fetchDatasProgress = GSON.get().fromJson
                            (fetchDatasProgressStr, FetchDatasDTO.FetchDatasProgress.class);
                    if (fetchDatasProgress != null) {
                        //设置下一个循环需要使用的lastMaxId
                        lastMaxId = fetchDatasProgress.getNextLastMaxId();
                    }

                    //进度信息中获取成功数量
                    Integer successCount = Optional.of(fetchDatasProgress).map(FetchDatasDTO
                            .FetchDatasProgress::getStreamFetchGatherInfo).map
                            (StreamFetchGatherInfo::getSuccessCount).map(Long::intValue).orElse(0);

                    // 从进度信息中获取solr导入成功数量
                    Integer lastImportSuccessCount = Optional.of(fetchDatasProgress).map(FetchDatasDTO
                            .FetchDatasProgress::getImportSuccessCount).orElse(0);

                    Integer lastIndexDatasSize = Optional.of(fetchDatasProgress).map(FetchDatasDTO
                            .FetchDatasProgress::getIndexDatasSize).orElse(0);

                    skipedImportSuccessCount += lastImportSuccessCount;

                    skipedIndexDatasSize += lastIndexDatasSize;

                    correctStreamFetchGatherInfo.addFetchCount(successCount);

                    LOG.info("progressId:{} Skip current page! pageNo: {} pageSize:{} successCount:{} " +
                                    "currentProgress:{} ",
                            progressId, pageNo, pageSize, successCount, currentProgress);
                } else {
                    break;
                }
            }


            PageDTO<E> page;

            final Long currentLastMaxId = lastMaxId;

            curLastMaxId = currentLastMaxId;

            Long rpcBegin = System.currentTimeMillis();
            if (currentLastMaxId == null) {
                //page = fetcher.fetch(pageNo, pageSize);
                page = getPageData(INIT_INVOKE_COUNT_VALUE,pageNo, pageSize,null);
            } else {
               // page = fetcher.fetch(FIRST_PAGE, pageSize, currentLastMaxId);
                page = getPageData(INIT_INVOKE_COUNT_VALUE,FIRST_PAGE, pageSize, currentLastMaxId);
            }
            rpcCostTime.addAndGet(System.currentTimeMillis() - rpcBegin);

            //RPC抓取的数据
            List<E> datas = (page == null) ? null : page.getDatas();

            if (datas == null || datas.isEmpty()) {
                LOG.info("progressId:{} datas is empty ,fetch datas stop ! {}", progressId, currentProgressInfo);
                return false;
            }

            // 设置预估记录大小，预估总记录大小永远取较大值
            if (gatherInfo.getEstimateTotalCount() < page.getTotal()) {
                gatherInfo.setEstimateTotalCount(page.getTotal());
            }

            final int fetchCount = datas.size();

            //数据转换
            List<R> convertedDatas = dataConvertFunction.apply(datas, meta);

            //手动置空
            datas = null;

            if (QueryOptimizeEnum.ENABLE.equals(queryOptimize)) {
                lastMaxId = getLastMaxId(convertedDatas, meta,indexField);
            }

            gatherInfo.addFetchCount(fetchCount);

            correctStreamFetchGatherInfo.addFetchCount(convertedDatas.size());

            final FetchDatasDTO<R> fetchDatasDTO = FetchDatasDTO.newBuilder().indexDatas(convertedDatas)
                    .fetchDatasProgress(FetchDatasDTO.FetchDatasProgress.newBuilder()
                            .total(total).pageNo(pageNo).pageSize(pageSize)
                            .threadNum(THREAD_NUM_ONE).threadSeq(IndexBuildConstants.DEFAULT_THREAD_SEQ)
                            .currentLastMaxId(currentLastMaxId)
                            .nextLastMaxId(lastMaxId).queryOptimize(queryOptimize.code())
                            .streamFetchGatherInfo(gatherInfo)
                            .correctStreamFetchGatherInfo(correctStreamFetchGatherInfo)
                            .loopCount(NULL_LOOP_COUNT).indexDatasSize(convertedDatas.size()).build())
                    .skipedImportSuccessCount(skipedImportSuccessCount)
                    .skipedIndexDatasSize(skipedIndexDatasSize)
                    .build();

            cache = fetchDatasDTO;

            // 重置
            gatherInfo = new StreamFetchGatherInfo();
            correctStreamFetchGatherInfo = new StreamFetchGatherInfo();
            skipedImportSuccessCount = 0;
            skipedIndexDatasSize = 0;

            LOG.info("progressId:{} fetch current page complete ! {} lastMaxId:{} fetchCount:{}  ",
                    progressId, currentProgressInfo, currentLastMaxId, fetchCount);

            return true;

        } catch (SystemException e) {
            LOG.error(e.getMessage(), e);
            if (e.getCause() != null && e.getCause() instanceof RestClientException) {
                throw new SystemException("progressId:" + progressId + " invoke rpc failed, please check the rpc " +
                        "url is available. ", e.getCause());
            }
            throw e;
        } catch (Exception e) {
            LOG.error(e.getMessage(), e);
            throw new SystemException("progressId:" + progressId + currentProgressInfo + "lastMaxId:+" + curLastMaxId
                    + " " + e.getMessage(), e);
        }

    }

    @Override
    public FetchDatasDTO<R> next() {
        final FetchDatasDTO<R> temp = cache;
        cache = null;
        return temp;
    }


    @Override
    public void setIgnoreError(boolean ignoreError) {
        this.ignoreError = ignoreError;
    }

    @Override
    public void stop() {
        cache = null;
    }

    @Override
    public Long getRpcCostTime() {
        return rpcCostTime.get();
    }

    /**
     * 创建迭代器
     *
     * @param pageSize
     * @param fetcher
     * @param dataConvertFunction
     * @param meta
     * @param redisTemplate
     * @param indexSolrProgressKeyPrefix
     * @param queryOptimize
     * @param <E>
     * @param <M>
     * @param <R>
     * @return
     */
    public static <E, M extends IndexMeta, R extends IndexRowData> StreamFetchIterator<E, M, R> iterator
    (int pageSize, StreamFetcher<E> fetcher, BiFunction<List<E>, M, List<R>> dataConvertFunction, M meta,
     RedisTemplate redisTemplate, String indexSolrProgressKeyPrefix, QueryOptimizeEnum queryOptimize, Long progressId,
     String indexField, FetchDataRetryDTO retryDTO) {
        return new StreamFetchIterator<E, M, R>(fetcher, pageSize, dataConvertFunction,
                meta, redisTemplate, indexSolrProgressKeyPrefix, queryOptimize, progressId,indexField,retryDTO);
    }


    public int getPageSize() {
        return pageSize;
    }


}