package com.zipking.cloud.springbootmybatis.service.impl;


import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.date.StopWatch;
import com.alibaba.excel.EasyExcel;
import com.alibaba.excel.ExcelWriter;
import com.alibaba.excel.write.metadata.WriteSheet;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.zipking.cloud.springbootmybatis.persistance.entity.MsgLog;
import com.zipking.cloud.springbootmybatis.persistance.mapper.MsgLogMapper;
import com.zipking.cloud.springbootmybatis.service.IMsgLogService;
import com.zipking.cloud.springbootmybatis.util.AsyncBatchProcessorService;
import com.zipking.cloud.springbootmybatis.util.CompletableFutureUtil;
import com.zipking.cloud.springbootmybatis.util.ListUtils;
import com.zipking.cloud.springbootmybatis.util.MultiThread;
import com.zipking.cloud.springbootmybatis.util.excel.BaseExportResultHandler;
import com.zipking.cloud.springbootmybatis.util.excel.ExportListener;
import com.zipking.cloud.springbootmybatis.util.excel.MyExcelUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.poi.ss.formula.functions.T;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.PageRequest;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Service;

import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import javax.smartcardio.Card;
import java.io.IOException;
import java.net.URLEncoder;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;

/**
 * <p>
 * 消息投递日志 服务实现类
 * </p>
 *
 * @author kingbird
 * @since 2024-04-04
 */
@Slf4j
@Service
public class MsgLogServiceImpl extends ServiceImpl<MsgLogMapper, MsgLog> implements IMsgLogService
{
    //newFixedThreadPool 创建一个定长线程池，可控制线程最大并发数，超出的线程会在队列中等待。
    @Autowired
    private ThreadPoolTaskExecutor queryBatchPool;
    //private ExecutorService executor = Executors.newFixedThreadPool(10);  //newFixedThreadPool 创建一个定长线程池，可控制线程最大并发数，超出的线程会在队列中等待。
    @Autowired
    private   MsgLogMapper msgLogMapper;
    @Value("${spring.datasource.url}")
    private String sqlurl;
    @Value("${spring.datasource.username}")
    private String username;
    @Value("${spring.datasource.password}")
    private String password;
    @Autowired
    private AsyncBatchProcessorService asyncBatchProcessorService;
    @Override
    public void jdbcBatchInser(List<MsgLog> electronCodeInfoList) {
        String sql = " INSERT INTO msg_log ( msg_id, msg, exchange, routing_key, status, try_count, next_try_time, create_time, update_time ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ? )";
        Connection connection = null;
        try {
            connection = (Connection) DriverManager.getConnection(sqlurl, username, password);
            connection.setAutoCommit(false);
        } catch (SQLException throwables) {
            throwables.printStackTrace();
        }
        StopWatch s = new StopWatch();
        s.start();
        try {
            PreparedStatement pst = (PreparedStatement) connection.prepareStatement(sql);
            log.info("总条数:{}", electronCodeInfoList.size());
            for (int i = 0; i < electronCodeInfoList.size(); i++) {
                pst.setString(1, electronCodeInfoList.get(i).getMsgId());
                pst.setString(2, electronCodeInfoList.get(i).getMsg());
                pst.setString(3, electronCodeInfoList.get(i).getExchange());
                pst.setString(4, electronCodeInfoList.get(i).getRoutingKey());
                pst.setInt(5, electronCodeInfoList.get(i).getStatus());
                pst.setInt(6, electronCodeInfoList.get(i).getTryCount());
                pst.setDate(7, new java.sql.Date(new java.util.Date().getTime()));
                pst.setDate(8, new java.sql.Date(new java.util.Date().getTime()));
                pst.setDate(9, new java.sql.Date(new java.util.Date().getTime()));
                pst.addBatch();
                if (i % 10000 == 0) {
                    log.info("每一万条批量保存执行一次");
                    pst.executeBatch();
                    pst.clearBatch();
                }
            }


            pst.executeBatch();
            connection.commit();

            pst.close();
            connection.close();

        } catch (SQLException throwables) {
            throwables.printStackTrace();
        }
        s.stop();
        log.info(electronCodeInfoList.size() + "条数据插入花费时间 : " + s.getTotalTimeSeconds() + " s");
    }

    @Override
    public List<MsgLog> logicHandler(Map<String, Object> params) {
        StopWatch s = new StopWatch();
        s.start();
/******返回结果：多线程处理完的最终数据******/
        List<MsgLog> result = new ArrayList<>();

        /******查询数据库总的数据条数******/
        LambdaQueryWrapper<MsgLog> queryWrapper = new LambdaQueryWrapper<MsgLog>();
        queryWrapper.eq(MsgLog::getRoutingKey, "XX");
         int count = this.baseMapper.selectCount(queryWrapper);
        //int count = 10005;
        log.info("总条数:{}", count);
        //DingDingMsgSendUtils.sendDingDingGroupMsg("【系统消息】" + profile + "环境，本次需要同步" + count + "条历史稽查通话记录数据。");

//        int count = 2620266;
        /******限制每次查询的条数******/
        int num = 100;

        /******计算需要查询的次数******/
        int times = count / num;
        if (count % num != 0) {
            times = times + 1;
        }

        /******每个线程开始查询的行数******/
        int offset = 0;

        /******添加任务******/
        List<Callable<List<MsgLog>>> tasks = new ArrayList<>();
        for (int i = 0; i < times; i++) {
            Callable<List<MsgLog>> qfe = new ThredQuery(this.baseMapper, params, offset, num);
            tasks.add(qfe);
            offset = offset + num;
        }

        /******为避免太多任务的最终数据全部存在list导致内存溢出，故将任务再次拆分单独处理******/
        List<List<Callable<List<MsgLog>>>> smallList = ListUtils.partition(tasks, 10);
        for (List<Callable<List<MsgLog>>> callableList : smallList) {
            if (CollectionUtils.isNotEmpty(callableList)) {
                try {
                    List<Future<List<MsgLog>>> futures = queryBatchPool.getThreadPoolExecutor().invokeAll(callableList);
                    /******处理线程返回结果******/
                    if (futures != null && futures.size() > 0) {
                        for (Future<List<MsgLog>> future : futures) {
                            result.addAll(future.get());
                        }
                        log.info("总条数:{}", result.size());

                    }
                } catch (Exception e) {
                    log.warn("任务拆分执行异常，errMsg = {}", e);
                }
            }
        }
        s.stop();
        log.info(result.size() + "条数据花费时间 : " + s.getTotalTimeSeconds() + " s");
        return  result;
    }


    @Async("queryBatchPool")
    @Override
    public CompletableFuture<List<MsgLog>> getExportDataList(int j, int pageSize) {
        List<MsgLog> list = new ArrayList<>();
        //orderInfoStatisticsSearch 对象非安全,
        //data-》线程内创建的对象线性安全，解决并发线程幻读脏读问题，
        Map<@Nullable String, @Nullable Object> map = Maps.newHashMap();
        map.put("limit", (j - 1) * pageSize);
        map.put("num", pageSize);
        //queryWrapper .last("limit "+offset+", "+num);

        try {
            List<MsgLog> duyanCallRecordDetailList = this.baseMapper.selectListByWhere(map);
            if (duyanCallRecordDetailList.size() > 0) {
                list.addAll(duyanCallRecordDetailList);
                duyanCallRecordDetailList.clear();
            }

            //logger.warn("异步读取 pageNo：{},pageSize:{},耗时：{}", j, pageSize, System.currentTimeMillis() / 1000 - start);
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            return CompletableFuture.completedFuture(list);
        }

    }

    public static void main(String[] args) {



    }
    @Override
    public List<MsgLog> getDataListByBatch() {
        List<MsgLog> dataList = new CopyOnWriteArrayList<>();
        LambdaQueryWrapper<MsgLog> queryWrapper = new LambdaQueryWrapper<MsgLog>();
        queryWrapper.eq(MsgLog::getRoutingKey, "XX");
        int totalNum = this.baseMapper.selectCount(queryWrapper);


        StopWatch stopWatch = new StopWatch();
        stopWatch.start();
        CompletableFuture<List<MsgLog>> listCompletableFuture = null;
         if (totalNum > 10) {
            int remainder = totalNum % 10;
            int size = remainder > 0 ? totalNum / 10 + 1 : totalNum / 10;
            for (int i = 0; i < 10; i++) {
                int page = i * size;
                log.info("page:{}", page);
                listCompletableFuture = CompletableFuture.supplyAsync(() -> {
                    Map<@Nullable String, @Nullable Object> map = Maps.newHashMap();
                    map.put("limit", page);
                    map.put("num", size);
                    List<MsgLog> duyanCallRecordDetailList = this.baseMapper.selectListByWhere(map);
                    return Objects.nonNull(duyanCallRecordDetailList) ? duyanCallRecordDetailList : new ArrayList<MsgLog>();
                }, queryBatchPool)
                        .whenComplete((bodyList, throwable) -> {
                            if (!CollectionUtils.isEmpty(bodyList)) {
                                dataList.addAll(bodyList);
                                log.info("长度:" + dataList.size());

                            }
                        });

            }
        }
        log.info("listCompletableFuture:{}", listCompletableFuture);
        CompletableFuture.allOf(listCompletableFuture).join();
        stopWatch.stop();
        log.info("dataList:{}", dataList.size());
        List<MsgLog> msgLogList_ = asyncBatchProcessorService.processInBatch(dataList, 200, x -> {
            this.jdbcBatchInser(x);
            return x;
        });
        System.out.println("msgLogList = " + msgLogList_);
        log.info("++++++++ getDataListByBatch() -> stopWatch: {}ms ++++++++", stopWatch.getTotalTimeMillis());
        return dataList;

    }

    int MAX_BATCH_QUERY_SIZE = 50000;

    /**
     * 处理总页数
     *
     * @param totalSize
     * @return
     */
    private int getTotalPage(int totalSize) {
        return (totalSize % MAX_BATCH_QUERY_SIZE == 0)
                ? (totalSize / MAX_BATCH_QUERY_SIZE)
                : (totalSize / MAX_BATCH_QUERY_SIZE + 1);
    }

    /**
     * 获取页数
     *
     * @param totalPage
     * @return
     */
    private List<Integer> getPageNumbers(int totalPage) {
        int pageNumber = 1;
        List<Integer> pageNumbers = Lists.newArrayList();
        while (pageNumber <= totalPage) {
            pageNumbers.add(pageNumber++);
        }
        return pageNumbers;
    }


    public List<MsgLog> queryAllUseThreadPool(int limit) {

        //还是获取到总记录数，本机是600多W测试数据

        LambdaQueryWrapper<MsgLog> queryWrapper = new LambdaQueryWrapper<MsgLog>();
        queryWrapper.eq(MsgLog::getRoutingKey, "XX");
        int count = this.baseMapper.selectCount(queryWrapper);


        List<FutureTask<List<MsgLog>>> resultList = new ArrayList<>();

        //分段次数
        long cycles = count / limit;
        for (int i = 0; i < cycles; i++) {
            //每一段的起始坐标
            long idx = i * limit;
            log.info("idx: {}", idx);
            //具体的查询任务
            Map<@Nullable String, @Nullable Object> map = Maps.newHashMap();
            map.put("limit", idx);
            map.put("num", limit);
            FutureTask<List<MsgLog>> futureTask = new FutureTask<List<MsgLog>>(() -> this.baseMapper.selectListByWhere(map));

            //把任务丢给线程池调度执行
            queryBatchPool.execute(futureTask);
            //future异步模式，把任务放进去先，先不取结果
            resultList.add(futureTask);
        }

        List<MsgLog> result = new ArrayList<>();
        while (resultList.size() > 0) {
            Iterator<FutureTask<List<MsgLog>>> iterator = resultList.iterator();
            while (iterator.hasNext()) {
                try {
                    result.addAll(iterator.next().get());
                    //获取一个就删除一个任务
                    iterator.remove();
                } catch (InterruptedException | ExecutionException e) {
                    log.error("多线程查询出现异常：{}", e.getMessage());
                }
            }
        }

        //最后一次数据可能不为整，需要额外操作

        if (result.size() != count) {
            log.info("最后一次查询");
            Map<@Nullable String, @Nullable Object> map = Maps.newHashMap();
            map.put("limit", result.size());
            map.put("num", Math.toIntExact(count));
            result.addAll(this.baseMapper.selectListByWhere(map));
        }
        return result;
    }

    //@Async
    @Override
    public void asyncBatchInit() {
        // java 手动分页核心代码
        LambdaQueryWrapper<MsgLog> queryWrapper = new LambdaQueryWrapper<MsgLog>();
        queryWrapper.eq(MsgLog::getRoutingKey, "XX");
        int count = this.baseMapper.selectCount(queryWrapper);
        int pageSize = 1000;
        int pageNum = count / pageSize;
        int surplus = count % pageSize;// 是不是整除
        if (surplus > 0) {
            pageNum = pageNum + 1;
        }
        List<MsgLog> batchList = new ArrayList<MsgLog>();
        for (int i = 1; i < pageNum; i++) {

            LambdaQueryWrapper<MsgLog> logLambdaQueryWrapper = new LambdaQueryWrapper<MsgLog>();
            logLambdaQueryWrapper.eq(MsgLog::getRoutingKey, "XX");
            Page<MsgLog> page = new com.baomidou.mybatisplus.extension.plugins.pagination.Page<MsgLog>(i, pageSize);
            IPage<MsgLog> list = this.baseMapper.selectPage(page, logLambdaQueryWrapper);
            if (CollectionUtils.isNotEmpty(list.getRecords())) {
                list.getRecords().stream().forEach(item -> {
                    //UcasProject object = new UcasProject();
                    batchList.add(item);
                });
            }
            // 批量插入
            if (CollectionUtils.isNotEmpty(batchList)) {
                // ucasProjectService.saveBatch(batchList);
            }
        }
        System.out.println("总共:" + batchList.size() + "条");
    }
    /**
     * 多线程处理批量数据
     *
     * @param splitList 处理数据
     * @param newList  处理后总数据
     *
     * @return
     */
    @Override
    public List<MsgLog> testTime(List<MsgLog> splitList, List<MsgLog> newList) {
        List<MsgLog> list = null;
        try {
            MultiThread<MsgLog, MsgLog> multiThread = new MultiThread<MsgLog, MsgLog>(splitList) {
                @Override
                public MsgLog outExecute(int currentThread, MsgLog data) {
                    //业务处理
                   /* String iccid = data.getIccid();
                    String allOrder = cardServerService.findAllOrder(iccid);
                    String allFlow = cardServerService.allFlowByiccid(iccid);
                    String allUseFlow = cardServerService.allUseFlowByiccid(iccid);
                    Card card = cardMapper.findByIccid(iccid);
                    String monthFlow = card.getMonthFlow();
                    data.setMonthFlow(monthFlow);
                    data.setAllOrder(allOrder);
                    data.setAllFlow(allFlow);
                    data.setAllUseFlow(allUseFlow);
                    return data;*/

                    //业务处理end
                    return data;
                }
            };
            list = multiThread.getResult();//返回结果
            for (MsgLog ccar : list) {
                newList.add(ccar);//批量数据遍历放入总结果
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        return newList;

    }

    @Override
    public void exportExcel(HttpServletResponse response) {
        LambdaQueryWrapper<T> queryWrapper = new LambdaQueryWrapper<>();
        try {
            new ExportListener(msgLogMapper).exportExcel(response, "订单信息", MsgLog.class, queryWrapper);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    @Override
    public void exportExcel8(HttpServletResponse response) {
        try {
            new ExportListener(msgLogMapper,queryBatchPool).export8(response, "订单信息", MsgLog.class);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    @Autowired
private CompletableFutureUtil completableFutureUtil;
    @Override
    public List<MsgLog> logicHandlerExt(HashMap<Object, Object> objectObjectHashMap) {
        StopWatch s = new StopWatch();
        s.start();
/******返回结果：多线程处理完的最终数据******/
        List<MsgLog> result = new ArrayList<>();

        /******查询数据库总的数据条数******/
        LambdaQueryWrapper<MsgLog> queryWrapper = new LambdaQueryWrapper<MsgLog>();
        queryWrapper.eq(MsgLog::getRoutingKey, "XX");
        int count = this.baseMapper.selectCount(queryWrapper);

        log.info("总条数:{}", count);

        int num = 200000;

        /******计算需要查询的次数******/
        int times = count / num;
        if (count % num != 0) {
            times = times + 1;
        }

        /******每个线程开始查询的行数******/
        int offset = 0;

        /******添加任务******/
        Map<String, Object> map = new HashMap<>();
        for (int i = 0; i < times; i++) {
            map.put("pageNo", offset);
            map.put("pageSize", num);
            List<MsgLog> msgLogList = this.baseMapper.selectListByWhereExt(map);
            List<List<MsgLog>> listsMsglog = CollUtil.split(msgLogList, 5000);
            listsMsglog.forEach(splitList->{
                List  numList = completableFutureUtil.parallelFutureJoin(splitList, custPolicyStatExts -> {
                     log.info("推送成功");
                    return custPolicyStatExts;
                }, (e, num_) -> {
                    log.error("我异常了：" + num_ + "，异常原因：" + e);
                    return -1;

                });
                log.info(Thread.currentThread().getName()+"处理数量:{}", numList.size());
            });
            log.info("条数:{}",msgLogList.size());
            msgLogList.clear();
            log.info("清空条数:{}",msgLogList.size());
            offset = offset + num;
            log.info("offset:{}",offset);
            log.info("row:{}",num);
        }
        s.stop();
        log.info("数据花费时间 : " + s.getTotalTimeSeconds() + " s");
        return null;
    }
   /* @Override
    public void testResultHandler() {
        // 每次导出 new 一个 BaseExportResultHandler 对象，参数为 header、 templateName、 isExportZip
        BaseExportResultHandler<MsgLog> handler = new BaseExportResultHandler<MsgLog>(
                MyExcelUtil.getHeader(new MsgLog()), "MsgLog", false
        ) {
            *//**
             * 子类重写 fetchDataByStream ，自定义获取数据的方式
             *//*
            @Override
            public void fetchDataByStream() {
                // 这里的this 指的就是 BaseExportResultHandler<TestVo> handler 这个对象，在这里写 mapper 调用获取数据的调用
                testDao.selectMillionData(this);
            }
        };
        // startExportExcel 方法中调用 fetchDataByStream 方法，
        // 而 fetchDataByStream 方法中 selectMillionData 方法会调用 handler 中的 handleResult 方法
        // 最终 handleResult 方法调用 invoke 处理数据
        handler.startExportExcel();

    }



*/

    /**
     * @param response
     * @param sheetName
     * @param pojoClass
     */
    public void export8(HttpServletResponse response, String sheetName, Class<T> pojoClass) {
        try {
            ServletOutputStream out = getServletOutputStream(response, sheetName);
            //这里指定需要表头，因为model通常包含表信头息
            ExcelWriter excelWriter = EasyExcel.write(out, pojoClass).build();
            //写第一个sheet,数据全是List<String> 无模型映射关系
            WriteSheet writeSheet = EasyExcel.writerSheet(sheetName).build();

            //分页查询数据
            int pageNumber = 1;
            int pageSize = 5000;
            int dataLength = pageSize;
            long start = System.currentTimeMillis() / 1000;//单位秒
            Map<String, Object> condition = new HashMap<>();
            List<MsgLog> resultList =  Lists.newArrayList();//置list为空，清空内存
            List<CompletableFuture<List<MsgLog>>> futureList = new ArrayList<>();
            while (dataLength == pageSize) {
                int startIndex = (pageNumber - 1) * pageSize;
                condition.put("pageNo", startIndex);
                condition.put("pageSize", pageSize);
                 condition.put("routing_key", "XX");
                //resultList = orderMapper.queryOrderByPage(condition);
                CompletableFuture<List<MsgLog>> completableFutures = CompletableFuture
                        .supplyAsync(() ->this.baseMapper.selectListByWhereExt(condition), queryBatchPool);
                futureList.add(completableFutures);
                resultList = completableFutures.get();
                if (org.springframework.util.CollectionUtils.isEmpty(resultList)) {
                    //写数据
                    excelWriter.write(resultList, writeSheet);
                    break;
                }
                dataLength = resultList.size();
                pageNumber++;
                //写数据
                excelWriter.write(resultList, writeSheet);
            }
            //关闭writer的输出流
            excelWriter.finish();
            long end = System.currentTimeMillis() / 1000;
            log.info("导出耗时：" + (end - start) + " 秒");
        } catch (Exception e) {
            log.error("导出异常", e);
        }
    }

    private static final String DATA_FORMAT = "yyyy-MM-dd-HH-mm-ss";
    private static final String CHARACTER_UTF_8 = "UTF-8";

    private static final String CONTENT_TYPE = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet";

    private static final String CONTENT_DISPOSITION = "Content-Disposition";
    public static ServletOutputStream getServletOutputStream(HttpServletResponse response, String sheetName) throws IOException {
        DateTimeFormatter formatter = DateTimeFormatter.ofPattern(DATA_FORMAT);
        String nowTime = formatter.format(LocalDateTime.now());
        String fileName = sheetName.concat("_").concat(nowTime).concat(".xlsx");
        response.setContentType(CONTENT_TYPE);
        //设置字符集为utf-8
        response.setCharacterEncoding(CHARACTER_UTF_8);
        //用postman测正常，浏览器多了filename_=utf-8等字样
        response.setHeader(CONTENT_DISPOSITION,
                "attachment;filename=" + URLEncoder.encode(fileName, CHARACTER_UTF_8)
                        + ";filename*=utf-8''" + URLEncoder.encode(fileName, CHARACTER_UTF_8));
        //postman测会乱码，但浏览器下载就正常
//        response.setHeader(CONTENT_DISPOSITION,
//                "attachment;filename=" + URLEncoder.encode(fileName, "UTF-8"));
        //发送一个报头，告诉浏览器当前页面不进行缓存，每次访问的时间必须从服务器上读取最新的数据
        return response.getOutputStream();
    }

}
