package com.example.yimai.service.impl;

import com.example.yimai.entity.News;
import com.example.yimai.entity.Order;
import com.example.yimai.entity.Product;
import com.example.yimai.entity.User;
import com.example.yimai.mapper.NewsMapper;
import com.example.yimai.mapper.OrderMapper;
import com.example.yimai.mapper.ProductMapper;
import com.example.yimai.mapper.UserMapper;
import com.example.yimai.repository.NewsRepository;
import com.example.yimai.repository.OrderRepository;
import com.example.yimai.repository.ProductRepository;
import com.example.yimai.repository.UserRepository;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.elasticsearch.repository.ElasticsearchRepository;
import org.springframework.stereotype.Service;

import java.util.List;
import java.util.concurrent.CompletableFuture;


@Service
@Slf4j
public class ElasticsearchService {

    @Autowired
    private NewsRepository newsRepository;

    @Autowired
    private NewsMapper newsMapper;

    @Autowired
    private UserRepository userRepository;
    @Autowired
    private UserMapper userMapper;

    @Autowired
    private ProductRepository productRepository;

    @Autowired
    private ProductMapper productMapper;

    @Autowired
    private OrderRepository orderRepository;

    @Autowired
    private OrderMapper orderMapper;

    public String fullImport() {
        log.info("开始全量导入");

        // 并行处理不同类型的数据
        CompletableFuture<Void> newsFuture = CompletableFuture.runAsync(this::importNews);
        CompletableFuture<Void> userFuture = CompletableFuture.runAsync(this::importUsers);
        CompletableFuture<Void> productFuture = CompletableFuture.runAsync(this::importProducts);
        CompletableFuture<Void> orderFuture = CompletableFuture.runAsync(this::importOrders);


        // 等待所有任务完成
        CompletableFuture.allOf(newsFuture, userFuture, productFuture, orderFuture).join();

        log.info("全量导入完成");
        return "success";
    }

    private void importNews() {
        try {
            newsRepository.deleteAll();
            List<News> newsList = newsMapper.selectList(null);
            // 分批导入，避免大数据量时内存溢出
            batchSave(newsList, newsRepository, "资讯");
            log.info("导入资讯数据完成，共 {} 条", newsList.size());
        } catch (Exception e) {
            log.error("导入资讯数据失败", e);
            throw new RuntimeException("导入资讯数据失败: " + e.getMessage());
        }
    }

    private void importUsers() {
        try {
            userRepository.deleteAll();
            List<User> userList = userMapper.selectList(null);
            batchSave(userList, userRepository, "用户");
            log.info("导入用户数据完成，共 {} 条", userList.size());
        } catch (Exception e) {
            log.error("导入用户数据失败", e);
            throw new RuntimeException("导入用户数据失败: " + e.getMessage());
        }
    }

    private void importProducts() {
        try {
            productRepository.deleteAll();
            List<Product> productList = productMapper.selectList(null);
            batchSave(productList, productRepository, "商品");
            log.info("导入商品数据完成，共 {} 条", productList.size());
        } catch (Exception e) {
            log.error("导入商品数据失败", e);
            throw new RuntimeException("导入商品数据失败: " + e.getMessage());
        }
    }

    private void importOrders() {
        try {
            orderRepository.deleteAll();
            List<Order> orderList = orderMapper.selectList(null);
            batchSave(orderList, orderRepository, "订单");
            log.info("导入订单数据完成，共 {} 条", orderList.size());
        } catch (Exception e) {
            log.error("导入订单数据失败", e);
            throw new RuntimeException("导入订单数据失败: " + e.getMessage());
        }
    }

    // 通用的批量保存方法
    private <T> void batchSave(List<T> dataList, ElasticsearchRepository<T, ?> repository, String dataType) {
        int batchSize = 1000; // 批量大小
        for (int i = 0; i < dataList.size(); i += batchSize) {
            int endIndex = Math.min(i + batchSize, dataList.size());
            List<T> batch = dataList.subList(i, endIndex);
            repository.saveAll(batch);
            log.debug("已导入{}数据 {}-{} 条", dataType, i + 1, endIndex);
        }
    }

}
