package com.qianbao.finance.sync.modules.elasticsearch.service.impl;

import com.google.common.collect.Maps;

import com.qianbao.finance.sync.modules.elasticsearch.dao.BaseDao;
import com.qianbao.finance.sync.modules.elasticsearch.model.DatabaseTableModel;
import com.qianbao.finance.sync.modules.elasticsearch.model.IndexTypeModel;
import com.qianbao.finance.sync.modules.elasticsearch.model.request.SyncByTableRequest;
import com.qianbao.finance.sync.modules.elasticsearch.service.BaseDaoService;
import com.qianbao.finance.sync.modules.elasticsearch.service.ElasticsearchService;
import com.qianbao.finance.sync.modules.elasticsearch.service.MappingService;
import com.qianbao.finance.sync.modules.elasticsearch.service.SyncService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import javax.annotation.Resource;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.*;
import java.util.stream.Collectors;


@Service
public class SyncServiceImpl implements SyncService, InitializingBean, DisposableBean {
    private static final Logger logger = LoggerFactory.getLogger(SyncServiceImpl.class);
    /**
     * 使用线程池控制并发数量
     */
    private ExecutorService cachedThreadPool;
    @Resource
    private BaseDaoService baseDaoService;

    @Resource
    private ElasticsearchService elasticsearchService;

    @Resource
    private MappingService mappingService;

    @Override
    public boolean syncByTable(SyncByTableRequest request) {
        IndexTypeModel indexTypeModel = mappingService.getIndexType(new DatabaseTableModel(request.getDatabase(), request.getTable()));
        String primaryKey = Optional.ofNullable(mappingService.getTablePrimaryKeyMap().get(request.getDatabase() + "." + request.getTable())).orElse("id");
        if (indexTypeModel == null) {
            throw new IllegalArgumentException(String.format("配置文件中缺失database=%s和table=%s所对应的index和type的映射配置", request.getDatabase(), request.getTable()));
        }

        cachedThreadPool.submit(() -> {
            try {
                long maxPK = baseDaoService.selectMaxPK(primaryKey, request.getDatabase(), request.getTable());
                long minPK = baseDaoService.selectMinPK(primaryKey, request.getDatabase(), request.getTable());
                for (long i = minPK; i < maxPK;) {
                    i = batchInsertElasticsearch(request, primaryKey, i, request.getStepSize(), indexTypeModel);
                    logger.info(String.format("当前同步pk=%s，总共total=%s，进度=%s%%", i, maxPK, new BigDecimal(i * 100).divide(new BigDecimal(maxPK), 3, BigDecimal.ROUND_HALF_UP)));
                    mappingService.updateSyncMappingConfig(request.getDatabase(), request.getTable(), request.getStepSize());
                }
            } catch (Exception e) {
                logger.error("批量转换并插入Elasticsearch异常", e);
            }
        });
        return true;
    }

    @Transactional(readOnly = true, rollbackFor = Exception.class)
    @Override
    public long batchInsertElasticsearch(SyncByTableRequest request, String primaryKey, long from, long stepSize, IndexTypeModel indexTypeModel) {
        //List<Map<String, Object>> dataList = baseDao.selectByPKIntervalLockInShareMode(primaryKey, from, stepSize, request.getDatabase(), request.getTable());
        List<Map<String, Object>> dataList = baseDaoService.selectByPKIntervalLockInShareModeStep(primaryKey, from, stepSize, request.getDatabase(), request.getTable());
        dataList = convertDateType(dataList, primaryKey);
        long maxIdValue = Long.parseLong(dataList.remove(dataList.size()-1).get("maxId").toString())+1;
        Map<String, Map<String, Object>> dataMap = dataList.parallelStream().collect(Collectors.toMap(strObjMap -> String.valueOf(strObjMap.get(primaryKey)), map -> map));
        elasticsearchService.batchInsertById(indexTypeModel.getIndex(), indexTypeModel.getType(), dataMap);
        return maxIdValue;
    }

    @Override
    public boolean syncUnique(SyncByTableRequest request) {
        IndexTypeModel indexTypeModel = mappingService.getIndexType(new DatabaseTableModel(request.getDatabase(), request.getTable()));
        String primaryKey = Optional.ofNullable(mappingService.getTablePrimaryKeyMap().get(request.getDatabase() + "." + request.getTable())).orElse("id");
        if (indexTypeModel == null) {
            throw new IllegalArgumentException(String.format("配置文件中缺失database=%s和table=%s所对应的index和type的映射配置", request.getDatabase(), request.getTable()));
        }
        Map<String, Object> valueMap = baseDaoService.selectByPK(primaryKey, request.getKey(), request.getDatabase(), request.getTable());
        elasticsearchService.upsertById(indexTypeModel.getIndex(), indexTypeModel.getType(), request.getKey(), valueMap);
        mappingService.updateSyncMappingConfig(request.getDatabase(), request.getTable(), 1);
        return true;
    }


    private List<Map<String, Object>> convertDateType(List<Map<String, Object>> source, String primaryKey) {
        Map<String, Object> maxIdMap = Maps.newHashMap();
        maxIdMap.put("maxId", -10000L);
        source.parallelStream().forEach(map -> map.forEach((key, value) -> {
            if (value instanceof Timestamp) {
                map.put(key, LocalDateTime.ofInstant(((Timestamp) value).toInstant(), ZoneId.systemDefault()));
            }
            if (key.equals(primaryKey)){
                if(Long.parseLong(value.toString()) > Long.parseLong(maxIdMap.get("maxId").toString())){
                    maxIdMap.put("maxId", Long.parseLong(value.toString()));
                }
            }
        }));
        source.add(source.size(),maxIdMap);
        return source;
    }


    @Override
    public void afterPropertiesSet() throws Exception {
        cachedThreadPool = new ThreadPoolExecutor(10, 100, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), (ThreadFactory) Thread::new);
    }

    @Override
    public void destroy() throws Exception {
        if (cachedThreadPool != null) {
            cachedThreadPool.shutdown();
        }
    }
}
