package com.zzh.partnersys.job;

import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.zzh.partnersys.entity.UserDO;
import com.zzh.partnersys.es.service.ElasticsearchSyncService;
import com.zzh.partnersys.mapper.UserMapper;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.CommandLineRunner;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;

import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;

/**
 * 初始化用户信息到ES
 *
 * @author: zzh
 * @date: 2025/11/29 17:27:04
 * @version: 1.0
 */
@Slf4j
@Component
@Order(1) // 设置执行顺序，用户数据先初始化
public class UserInitEsPostConstruct implements CommandLineRunner {

    private static final int BATCH_SIZE = 500; // 每批处理数量

    @Resource
    private UserMapper userMapper;

    @Resource
    private ElasticsearchSyncService elasticsearchSyncService;

    /**
     * 初始化用户信息到ES中
     * 异步执行，避免阻塞启动
     */
    @Override
    public void run(String... args) {
        // 使用 CompletableFuture 异步执行，避免阻塞启动
        CompletableFuture.runAsync(this::initUserInfoToEs);
    }

    /**
     * 初始化用户信息到ES的实际逻辑
     */
    private void initUserInfoToEs() {
        log.info("开始初始化用户信息到ES...");
        try {
            // 1. 清理ES中的所有用户数据
            clearAllUsersFromEs();

            // 2. 分批查询所有用户信息，并批量插入ES中
            long totalCount = userMapper.selectCount(new QueryWrapper<>());
            log.info("用户总数: {}", totalCount);

            if (totalCount == 0) {
                log.info("没有用户数据需要同步");
                return;
            }

            int totalBatches = (int) Math.ceil((double) totalCount / BATCH_SIZE);
            log.info("将分 {} 批同步用户数据，每批 {} 条", totalBatches, BATCH_SIZE);

            for (int i = 0; i < totalBatches; i++) {
                long offset = (long) i * BATCH_SIZE;
                List<UserDO> userList = userMapper.customPaginationQuery(offset, BATCH_SIZE);

                if (userList != null && !userList.isEmpty()) {
                    elasticsearchSyncService.batchSyncUsersToEs(userList);
                    log.info("已同步用户数据: {}/{} 批，当前批数量: {}", i + 1, totalBatches, userList.size());
                }

                // 避免一次性加载过多数据，稍微延迟
                try {
                    Thread.sleep(100);
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                    log.warn("同步用户数据被中断");
                    break;
                }
            }

            log.info("用户信息初始化到ES完成，共 {} 条数据", totalCount);
        } catch (Exception e) {
            log.error("初始化用户信息到ES失败", e);
        }
    }

    /**
     * 清理ES中的所有用户数据
     */
    private void clearAllUsersFromEs() {
        log.info("开始清理ES中的所有用户数据...");
        try {
            // 查询所有用户ID
            QueryWrapper<UserDO> queryWrapper = new QueryWrapper<>();
            queryWrapper.select("id");
            List<UserDO> allUsers = userMapper.selectList(queryWrapper);

            if (allUsers != null && !allUsers.isEmpty()) {
                List<Long> userIdList = allUsers.stream()
                        .map(UserDO::getId)
                        .filter(id -> id != null && id > 0)
                        .collect(Collectors.toList());

                if (!userIdList.isEmpty()) {
                    // 分批删除，避免一次性删除过多
                    int deleteBatchSize = 1000;
                    for (int i = 0; i < userIdList.size(); i += deleteBatchSize) {
                        int end = Math.min(i + deleteBatchSize, userIdList.size());
                        List<Long> batch = userIdList.subList(i, end);
                        elasticsearchSyncService.batchDeleteUsersFromEs(batch);
                    }
                    log.info("已清理ES中的用户数据，共 {} 条", userIdList.size());
                }
            } else {
                log.info("ES中没有用户数据需要清理");
            }
        } catch (Exception e) {
            log.error("清理ES用户数据失败", e);
        }
    }
}
