//package com.guanzi.big_data_mq_process.listener;
//
//import com.alibaba.excel.context.AnalysisContext;
//import com.alibaba.excel.read.listener.ReadListener;
//import com.alibaba.fastjson.JSON;
//import com.alibaba.fastjson.TypeReference;
//import com.guanzi.big_data_mq_process.model.bo.UserOrderBO;
//import com.guanzi.big_data_mq_process.model.entity.UserOrder;
//import com.guanzi.big_data_mq_process.service.IUserOrderService;
//import lombok.extern.slf4j.Slf4j;
//import org.springframework.beans.BeanUtils;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.beans.factory.annotation.Qualifier;
//import org.springframework.stereotype.Component;
//import org.springframework.transaction.annotation.Transactional;
//
//import java.util.ArrayList;
//import java.util.List;
//import java.util.concurrent.CompletableFuture;
//import java.util.concurrent.ThreadPoolExecutor;
//import java.util.concurrent.atomic.AtomicInteger;
//
///**
// * UserOrder类 导入监听器
// * 每读取到excel中的一行数据，就会回调这里的方法，可以做保存处理
// *
// * @author 管子
// * @date 2025/6/14 10:07
// * @description: UserOrderListener 类
// */
//@Slf4j
//@Component
//public class UserOrderListener implements ReadListener<UserOrderBO> {
//
//    @Autowired
//    @Qualifier("importThreadPool")
//    private ThreadPoolExecutor importThreadPool;
//
//    @Autowired
//    private IUserOrderService userOrderService;
//
//    public static List<CompletableFuture<?>> saveTaskFutureList = new ArrayList<>();
//
//    // 线程级别的数据列表，每个线程各自保存一份数据
//    private final ThreadLocal<List<UserOrder>> userOrderList = ThreadLocal.withInitial(ArrayList::new);
//
//    private static AtomicInteger count = new AtomicInteger(1);
//
//    private static final int batchSize = 10000;
//
//    @Override
//    @Transactional(rollbackFor = Exception.class)
//    public void invoke(UserOrderBO data, AnalysisContext context) {
//        UserOrder entity = new UserOrder();
//        BeanUtils.copyProperties(data, entity);
//
//        // 方案一：单线程逐行插入
////        userOrderService.saveOrUpdate(entity);
////        log.info("线程: {} 第 {} 次插入1条数据", count.getAndAdd(1));
//
//        // 方案二：积累一定的数量集体插入 多线程
//        userOrderList.get().add(entity);
//        if (userOrderList.get().size() >= batchSize) {
////            saveBatchData();
//            asyncSaveBatchData();
//        }
//    }
//
//    @Override
//    @Transactional(rollbackFor = Exception.class)
//    public void doAfterAllAnalysed(AnalysisContext context) {
//        log.info("处理完了一个Sheet数据");
//        if (userOrderList.get().size() >= batchSize) {
//            saveBatchData();
//        }
//    }
//
//    private void saveBatchData() {
//        if (!userOrderList.get().isEmpty()) {
//            userOrderService.saveBatch(userOrderList.get(), userOrderList.get().size());
//            log.info("线程: {} 第 {} 次插入 {} 条数据", Thread.currentThread().getName(), count.getAndAdd(1), userOrderList.get().size());
//            userOrderList.get().clear();
//        }
//    }
//
//    private void asyncSaveBatchData() {
//        // 注意userOrderList是调用 asyncSaveBatchData() 的线程的ThreadLocal里的数据，不能直接交给线程池使用
//        if (!userOrderList.get().isEmpty()) {
//            // 使用JSON转换深拷贝
//            List<UserOrder> copyUserOrderList = JSON.parseObject(JSON.toJSONString(userOrderList.get()), new TypeReference<>() {});
//            CompletableFuture<Void> future = CompletableFuture.runAsync(() -> {
//                userOrderService.saveBatch(copyUserOrderList, copyUserOrderList.size());
//            }, importThreadPool);
//            log.info("线程: {} 第 {} 次插入 {} 条数据", Thread.currentThread().getName(), count.getAndAdd(1), userOrderList.get().size());
//            userOrderList.get().clear();
//
//            saveTaskFutureList.add(future);
//        }
//    }
//
//}
