package com.tenthoundsqps.service.sync;

import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.tenthoundsqps.dao.OrderDao;
import com.tenthoundsqps.entity.Order;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;

import java.time.LocalDateTime;
import java.util.List;

/**
 * 批量订单同步服务
 * 定期将数据库中的订单数据批量同步到Elasticsearch
 * 
 * 批量同步策略：
 * 1. 定时执行：每天凌晨执行全量或增量同步
 * 2. 分页处理：避免一次性加载大量数据导致内存溢出
 * 3. 增量同步：只同步指定时间范围内的数据
 * 4. 错误重试：失败的数据记录到日志，便于后续处理
 */
@Service
public class BatchOrderSyncService {
    
    private static final Logger logger = LoggerFactory.getLogger(BatchOrderSyncService.class);
    
    /**
     * 订单数据访问对象
     * 用于分页查询订单数据
     */
    @Autowired
    private OrderDao orderDao;
    
    /**
     * 订单同步服务
     * 用于执行实际的同步操作
     */
    @Autowired
    private OrderSyncService orderSyncService;
    
    // 每页处理的订单数量
    private static final int PAGE_SIZE = 1000;
    
    /**
     * 每天凌晨3点执行全量数据同步
     * 将数据库中所有的订单数据同步到Elasticsearch
     */
    @Scheduled(cron = "0 0 3 * * ?")
    public void syncAllOrders() {
        logger.info("Starting full order sync job");
        
        long totalSynced = 0;
        long currentPage = 1;
        
        try {
            Page<Order> page;
            do {
                // 分页查询订单数据
                page = new Page<>(currentPage, PAGE_SIZE);
                QueryWrapper<Order> queryWrapper = new QueryWrapper<>();
                page = orderDao.selectPage(page, queryWrapper);
                
                List<Order> orders = page.getRecords();
                if (!orders.isEmpty()) {
                    // 批量同步到Elasticsearch
                    orderSyncService.bulkSyncOrdersToElasticsearch(orders);
                    totalSynced += orders.size();
                    logger.info("Synced {} orders in page {}", orders.size(), currentPage);
                }
                
                currentPage++;
            } while (page.hasNext());
            
            logger.info("Full order sync job completed, total synced: {}", totalSynced);
        } catch (Exception e) {
            logger.error("Error occurred during full order sync", e);
        }
    }
    
    /**
     * 每小时执行增量数据同步
     * 只同步最近一小时内发生变化的订单数据
     */
    @Scheduled(cron = "0 0 * * * ?")
    public void syncIncrementalOrders() {
        logger.info("Starting incremental order sync job");
        
        try {
            // 计算同步时间范围（最近一小时）
            LocalDateTime endTime = LocalDateTime.now();
            LocalDateTime startTime = endTime.minusHours(1);
            
            // 查询指定时间范围内创建或更新的订单
            QueryWrapper<Order> queryWrapper = new QueryWrapper<>();
            queryWrapper.and(wrapper -> wrapper
                    .ge("create_time", startTime)
                    .le("create_time", endTime))
                .or()
                .and(wrapper -> wrapper
                    .ge("update_time", startTime)
                    .le("update_time", endTime));
            
            // 分页查询
            long totalSynced = 0;
            long currentPage = 1;
            
            Page<Order> page;
            do {
                page = new Page<>(currentPage, PAGE_SIZE);
                page = orderDao.selectPage(page, queryWrapper);
                
                List<Order> orders = page.getRecords();
                if (!orders.isEmpty()) {
                    // 批量同步到Elasticsearch
                    orderSyncService.bulkSyncOrdersToElasticsearch(orders);
                    totalSynced += orders.size();
                    logger.info("Incrementally synced {} orders in page {}", orders.size(), currentPage);
                }
                
                currentPage++;
            } while (page.hasNext());
            
            logger.info("Incremental order sync job completed, total synced: {}", totalSynced);
        } catch (Exception e) {
            logger.error("Error occurred during incremental order sync", e);
        }
    }
}