package com.gxy.learn.backup.task;

import com.alibaba.fastjson.JSONObject;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.gxy.learn.backup.entity.primary.ConsumptionPrimary;
import com.gxy.learn.backup.entity.second.ConsumptionSecond;
import com.gxy.learn.backup.mapper.primary.ConsumptionPrimaryMapper;
import com.gxy.learn.backup.mapper.second.ConsumptionSecondMapper;
import com.gxy.learn.backup.vo.BatchDelConsumptionVO;
import com.gxy.learn.backup.vo.BatchInsertConsumptionPrimaryVO;
import com.gxy.learn.backup.vo.ConfigVO;
import org.apache.ibatis.session.ExecutorType;
import org.apache.ibatis.session.SqlSession;
import org.mybatis.spring.SqlSessionTemplate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;

import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.*;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.stream.Collectors;

/**
 * 数据备份、清理
 * 插入的表最好不要有外键
 */
@Component
public class BackupDataTask {
    private static final Logger log = LoggerFactory.getLogger(BackupDataTask.class);
    @Resource
    private ConsumptionSecondMapper secondMapper;
    @Resource
    private ConsumptionPrimaryMapper primaryMapper;

    @Autowired
    @Qualifier("sqlSessionTemplate")
    private SqlSessionTemplate sqlSessionTemplate;
    @Autowired
    @Qualifier("sqlSessionTemplateSecond")
    private SqlSessionTemplate sqlSessionTemplateSecond;

    private static final Integer pageSize = 2000;
    //    个基于链接节点的无界线程安全队列
    private ConcurrentLinkedQueue<Long> idsQueue = new ConcurrentLinkedQueue<>();
    //    任务阻塞队列
    private BlockingQueue<String> yearMoth = new ArrayBlockingQueue<String>(1);


    private int countAll = 0;
    private int countRead = 0;
    private int countInsert = 0;
    private int countDel = 0;

    private Lock countAllLock = new ReentrantLock();
    private Lock readLock = new ReentrantLock();
    private Lock insertLock = new ReentrantLock();
    private Lock delLock = new ReentrantLock();

    private ReadWriteLock insertMethodLock = new ReentrantReadWriteLock();
    private ReadWriteLock  delMethodLock = new ReentrantReadWriteLock();

    /**
     * 所有数据统计
     *
     * @param num
     */
    void countAll(int num) {
        countAllLock.lock();
        try {
            countAll += num;
        } finally {
            countAllLock.unlock();
        }
    }

    /**
     * 读取数据
     *
     * @param num
     */
    void countRead(int num) {
        readLock.lock();
        try {
            countRead += num;
        } finally {
            readLock.unlock();
        }
    }

    /**
     * 插入数据
     *
     * @param num
     */
    void countInsert(int num) {
        insertLock.lock();
        try {
            countInsert += num;
        } finally {
            insertLock.unlock();
        }
    }

    /**
     * 清理数据
     *
     * @param num
     */
    void countDel(int num) {
        delLock.lock();
        try {
            countDel += num;
        } finally {
            delLock.unlock();
        }
    }

    /**
     * 任务入口
     *
     * @param configVO
     */
    public void doConsumptionTaskByYearAndMonth(ConfigVO configVO) {
        long allStart = System.currentTimeMillis();
        List<String> errReadList = new ArrayList<>();
        List<String> errWriteList = new ArrayList<>();
        for (Map.Entry<String, Map<String, List<String>>> entry : configVO.getTenantYearMonths().entrySet()) {
            String tenantNo = entry.getKey();
            List<String> years = new ArrayList<>(entry.getValue().keySet());
            years.sort(new Comparator<String>() {
                @Override
                public int compare(String o1, String o2) {
                    return Integer.parseInt(o2) - Integer.parseInt(o1);
                }
            });
            for (String year : years) {
                for (String month : entry.getValue().get(year)) {
                    String backupDate = String.format("%s-%s", year, month);
                    try {
                        yearMoth.put(backupDate);
                    } catch (InterruptedException e) {
                        log.info("err", e);
                    }
                    countRead = 0;
                    countInsert = 0;
                    long monthStart = System.currentTimeMillis();
                    log.info("开始执行租户[{}]的[{}]任务", tenantNo, backupDate);
                    ThreadFactory consumptionThreadFactory = new ThreadFactoryBuilder().setNameFormat(backupDate + "-ci-%d").build();
                    /**
                     * maximumPoolSize 不应该超过线程池最小连接数 （spring.datasource.druid.min-idle） 否则会报 get connection timeout retry : 1 异常
                     */
                    ExecutorService consumptionExecutor = new ThreadPoolExecutor(10, 20,
                            0L, TimeUnit.MILLISECONDS,
                            new LinkedBlockingQueue<Runnable>(10), consumptionThreadFactory, new RejectedExecutionHandler() {
                        @Override
                        public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
                            if (!executor.isShutdown()) {
                                try {
                                    executor.getQueue().put(r);
                                    log.warn("当前队列阻塞中。。。");
                                } catch (InterruptedException e) {
                                    // should not be interrupted
                                }
                            }
                        }
                    });
                    for (int pageNum = 1; pageNum < Integer.MAX_VALUE; pageNum++) {
                        if (consumptionExecutor.isShutdown()) {
                            break;
                        }
                        int finalPageNum = pageNum;
                        consumptionExecutor.execute(new Runnable() {
                            @Override
                            public void run() {
                                long start = System.currentTimeMillis();
                                Integer offset = (finalPageNum - 1) * pageSize;
                                List<ConsumptionSecond> consumptions = new ArrayList<>();
                                int dataSize = 0;
                                try {
                                    consumptions = secondMapper.findByTableNameAndReceivingUnitDateYearAndMonth(tenantNo, backupDate, pageSize, offset);
                                    dataSize = consumptions.size();
                                    log.info("读取租户[{}]的[{}][{}]条数据,当前pageNum = {} offset = {},耗时：{} ", tenantNo, backupDate, dataSize, finalPageNum, offset, System.currentTimeMillis() - start);
                                } catch (Exception e) {
                                    log.error("读取租户[{}]的[{}]数据失败！pageNum = {},offset = {}", tenantNo, backupDate, finalPageNum, offset,e);
                                    errReadList.add(String.format("[%s],[%s],[%d],[%d]", tenantNo, backupDate, finalPageNum, offset));
                                }
                                if (CollectionUtils.isEmpty(consumptions)) {
                                    consumptionExecutor.shutdown();
                                    return;
                                }
                                start = System.currentTimeMillis();
                                List<ConsumptionPrimary> insertList = consumptions.stream().map(m -> {
                                    idsQueue.add(m.getId());
                                    ConsumptionPrimary consumptionPrimary = new ConsumptionPrimary();
                                    BeanUtils.copyProperties(m, consumptionPrimary);
                                    return consumptionPrimary;
                                }).collect(Collectors.toList());
                                log.info("转换租户[{}]的[{}][{}]条数据成功耗时：{}", tenantNo, backupDate, dataSize, System.currentTimeMillis() - start);
                                countRead(dataSize);
                                countAll(dataSize);
                                batchInsert(tenantNo, backupDate, insertList);
                            }
                        });
                    }
                    while (true) {
                        if (consumptionExecutor.isTerminated()) {
                            log.info("执行租户[{}]的[{}]的任务成功,共读取到[{}]条数据，插入[{}]条数据,耗时：{}", tenantNo, backupDate, countRead,countInsert, System.currentTimeMillis() - monthStart);
//                            开始执行清理
                            doConsumptionDelTask(tenantNo, backupDate);
                            break;
                        }
                        try {
                            TimeUnit.SECONDS.sleep(2);
                        } catch (InterruptedException e) {
                            e.printStackTrace();
                        }
                    }
                    log.info("执行租户[{}]的[{}]任务成功,共读取[{}]条数据，共清理[{}]条数据，耗时：{}", tenantNo, backupDate,countRead,countDel, System.currentTimeMillis() - monthStart);
                }
            }
        }
        log.info("所有任务已全部执行,共处理[{}]条数据！,耗时：{}", countAll, System.currentTimeMillis() - allStart);
        if (!CollectionUtils.isEmpty(errReadList)) {
            log.info("在此期间读取失败的任务有{}", JSONObject.toJSON(errReadList));
        }
        if (!CollectionUtils.isEmpty(errWriteList)) {
            log.info("在此期间插入失败的任务有{}", JSONObject.toJSON(errWriteList));
        }
    }

    /**
     * 清理操作
     *
     * @param tenantNo
     * @param yearMonth
     */
    public void doConsumptionDelTask(String tenantNo, String yearMonth) {
        countDel = 0;
        if (idsQueue.isEmpty()) {
            log.info("开始执行租户[{}]的[{}]的清理任务成功，共清理[{}]条数据，耗时{}", tenantNo, yearMonth, countDel, 0);
            try {
                yearMoth.take();
            } catch (InterruptedException e) {
                log.info("err", e);
            }
            return;
        }
        log.info("idsQueue.size = {}", idsQueue.size());
        log.info("开始执行租户[{}]的[{}]的清理任务", tenantNo, yearMonth);
        ThreadFactory consumptionDelThreadFactory = new ThreadFactoryBuilder().setNameFormat(yearMonth + "-cd-%d").build();
        /**
         * maximumPoolSize 不应该超过线程池最小连接数 （spring.datasource.druid.min-idle） 否则会报 get connection timeout retry : 1 异常
         */
        ExecutorService consumptionDelExecutor = new ThreadPoolExecutor(10, 20,
                0L, TimeUnit.MILLISECONDS,
                new LinkedBlockingQueue<Runnable>(10), consumptionDelThreadFactory, new RejectedExecutionHandler() {
            @Override
            public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
                if (!executor.isShutdown()) {
                    try {
                        executor.getQueue().put(r);
                        log.warn("当前队列阻塞中。。。");
                    } catch (InterruptedException e) {
                        log.error("err",e);
                    }
                }
            }
        });
        int bachSize = 5000;
        long allStart = System.currentTimeMillis();
        List<Long> priIds = new ArrayList<>(bachSize);
        while (true) {
            priIds.add(idsQueue.poll());
            if (priIds.size() == bachSize || idsQueue.isEmpty()) {
                List<Long> finalPriIds = new ArrayList<>(priIds);
                priIds.clear();
                consumptionDelExecutor.submit(() -> {
                    batchDel(tenantNo, yearMonth, finalPriIds);
                });
                if (idsQueue.isEmpty()) {
                    consumptionDelExecutor.shutdown();
                    break;
                }
            }
        }
        while (!consumptionDelExecutor.isTerminated()) {
            try {
                log.warn("等待租户[{}]的[{}]的清理任务都执行完毕...", tenantNo, yearMonth);
                TimeUnit.SECONDS.sleep(2);
            } catch (InterruptedException e) {
                log.error("err", e);
            }
        }
        if (countDel == countInsert) {
            log.info("执行租户[{}]的[{}]的清理任务成功，共清理[{}]条数据，耗时{}", tenantNo, yearMonth, countDel, System.currentTimeMillis() - allStart);
        } else {
            log.warn("执行租户[{}]的[{}]的清理任务成功，共清理[{}]条数据，耗时{}", tenantNo, yearMonth, countDel, System.currentTimeMillis() - allStart);
        }
        try {
            yearMoth.take();
        } catch (InterruptedException e) {
            log.info("err", e);
        }
    }

    /**
     * 批量插入方法
     *
     * @param tenantNo
     * @param yearMonth
     * @param insertList
     */
    private void batchInsert(String tenantNo, String yearMonth, List<ConsumptionPrimary> insertList) {
        if (CollectionUtils.isEmpty(insertList)) {
            return;
        }
        SqlSession sqlSession = null;
        while (!insertMethodLock.writeLock().tryLock()){
            try{
                TimeUnit.SECONDS.sleep(2);
            }catch (Exception e){
                log.error("获取插入锁失败！继续重试！");
            }
            continue;
        }
        try {
            sqlSession = sqlSessionTemplate.getSqlSessionFactory().openSession(ExecutorType.BATCH);
            long start = System.currentTimeMillis();
            sqlSession.insert("com.gxy.learn.mapper.primary.ConsumptionPrimaryMapper.batchInsert", new BatchInsertConsumptionPrimaryVO(tenantNo, insertList));
            sqlSession.commit();
            log.info("插入租户[{}]的[{}][{}]条数据，耗时：{}", tenantNo, yearMonth, insertList.size(), System.currentTimeMillis() - start);
            countInsert(insertList.size());
        } catch (Exception e) {
            log.error("数据插入失败！", e);
            sqlSession.rollback();
        } finally {
            insertMethodLock.writeLock().unlock();
            if (null != sqlSession) {
                sqlSession.close();
            }
        }
    }

    /**
     * 批量清理方法
     *
     * @param tenantNo
     * @param yearMonth
     * @param deltList
     */
    private void batchDel(String tenantNo, String yearMonth, List<Long> deltList) {
        if (CollectionUtils.isEmpty(deltList)) {
            return;
        }
        SqlSession sqlSession = null;
        while (!delMethodLock.writeLock().tryLock()){
            try{
                TimeUnit.SECONDS.sleep(2);
            }catch (Exception e){
                log.error("获取清除锁失败！继续重试！");
            }
            continue;
        }
        try {
            sqlSession = sqlSessionTemplateSecond.getSqlSessionFactory().openSession(ExecutorType.BATCH);
            long start = System.currentTimeMillis();
            sqlSession.delete("com.gxy.learn.mapper.second.ConsumptionSecondMapper.batchDel", new BatchDelConsumptionVO(tenantNo, deltList));
            sqlSession.commit();
            log.info("清理租户[{}]的[{}][{}]条数据，耗时：{}", tenantNo, yearMonth, deltList.size(), System.currentTimeMillis() - start);
            countDel(deltList.size());
        } catch (Exception e) {
            log.error("数据清理失败！", e);
            sqlSession.rollback();
        } finally {
            delMethodLock.writeLock().unlock();
            if (null != sqlSession) {
                sqlSession.close();
            }
        }
    }
}