package org.chen.task;

import co.elastic.clients.elasticsearch.ElasticsearchClient;
import co.elastic.clients.elasticsearch._types.Time;
import co.elastic.clients.elasticsearch.core.BulkResponse;
import co.elastic.clients.elasticsearch.core.ScrollResponse;
import co.elastic.clients.elasticsearch.core.SearchRequest;
import co.elastic.clients.elasticsearch.core.SearchResponse;
import co.elastic.clients.elasticsearch.core.bulk.BulkOperation;
import co.elastic.clients.elasticsearch.core.search.Hit;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.chen.model.elastic.ArticleIndex;
import org.chen.model.mongodb.ArticleDocument;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;

@Slf4j
@Component
public class ArticleDataSyncTask {

    @Autowired
    private MongoTemplate mongoTemplate;
    
    @Autowired
    private ElasticsearchClient esClient;

    private static final int BATCH_SIZE = 1000; // 批处理大小
    private static final String SCROLL_TIME = "10m"; // 滚动查询超时时间

    /**
     * 每天凌晨2点
     */
    @Scheduled(cron = "0 0 2 * * ?")
//    @Scheduled(cron = "0 * * * * ?")
    public void syncData() {
        log.info("Starting article data synchronization task...");
        try {
            // 1. 获取所有ID
            Set<String> mongoIds = getMongoDocumentIds();
            Set<String> esIds = getEsDocumentIdsWithScroll();

            // 2. 找出不一致的ID
            Set<String> onlyInMongo = new HashSet<>(mongoIds);
            onlyInMongo.removeAll(esIds);

            Set<String> onlyInEs = new HashSet<>(esIds);
            onlyInEs.removeAll(mongoIds);

            // 3. 批量处理不一致数据
            handleInconsistentDataInBatches(onlyInMongo, onlyInEs);

            log.info("Article data synchronization completed. MongoDB only: {}, ES only: {}",
                    onlyInMongo.size(), onlyInEs.size());
        } catch (Exception e) {
            log.error("Error during article data synchronization", e);
        }
    }

    private Set<String> getMongoDocumentIds() {
        Set<String> allIds = new HashSet<>();
        long skip = 0;

        while (true) {
            Query query = new Query().skip(skip).limit(BATCH_SIZE);
            query.fields().include("_id");

            List<ArticleDocument> batch = mongoTemplate.find(query, ArticleDocument.class);
            if (batch.isEmpty()) {
                break;
            }

            allIds.addAll(batch.stream()
                    .map(ArticleDocument::getId)
                    .collect(Collectors.toSet()));

            skip += BATCH_SIZE;
            log.info("Processed {} MongoDB documents", allIds.size());
        }

        return allIds;
    }

    private Set<String> getEsDocumentIdsWithScroll() throws IOException {
        Set<String> allIds = new HashSet<>();

        // 初始化滚动查询
        SearchRequest searchRequest = SearchRequest.of(s -> s
                .index("articles")
                .scroll(Time.of(t -> t.time(SCROLL_TIME)))
                .size(BATCH_SIZE)
                .source(src -> src.fetch(false))
        );

        SearchResponse<ArticleIndex> response = esClient.search(searchRequest, ArticleIndex.class);
        String currentScrollId = response.scrollId();

        // 处理第一批结果
        processSearchHits(response.hits().hits(), allIds);

        // 继续滚动直到没有更多结果
        try {
            while (true) {
                final String scrollId = currentScrollId;  // 在循环内创建 final 变量
                ScrollResponse<ArticleIndex> scrollResponse = esClient.scroll(
                        s -> s.scrollId(scrollId).scroll(Time.of(t -> t.time(SCROLL_TIME))),
                        ArticleIndex.class
                );

                if (scrollResponse.hits().hits().isEmpty()) {
                    break;
                }

                processSearchHits(scrollResponse.hits().hits(), allIds);
                currentScrollId = scrollResponse.scrollId();
                log.info("Processed {} ES documents", allIds.size());
            }
        } finally {
            // 在 finally 块中使用 final 变量清理
            final String lastScrollId = currentScrollId;
            esClient.clearScroll(c -> c.scrollId(lastScrollId));
        }

        return allIds;
    }

    private void processSearchHits(List<Hit<ArticleIndex>> hits, Set<String> ids) {
        hits.forEach(hit -> ids.add(hit.id()));
    }

    private void handleInconsistentDataInBatches(Set<String> onlyInMongo, Set<String> onlyInEs) {
        // 批量处理MongoDB到ES的同步
        Lists.partition(new ArrayList<>(onlyInMongo), BATCH_SIZE).forEach(batch -> {
            try {
                List<BulkOperation> operations = new ArrayList<>();

                for (String mongoId : batch) {
                    ArticleDocument mongoDoc = mongoTemplate.findById(mongoId, ArticleDocument.class);
                    if (mongoDoc != null) {
                        ArticleIndex esDoc = convertToEsDocument(mongoDoc);
                        operations.add(BulkOperation.of(op -> op
                                .index(idx -> idx
                                        .index("articles")
                                        .id(mongoId)
                                        .document(esDoc)
                                )
                        ));
                    }
                }

                if (!operations.isEmpty()) {
                    BulkResponse bulkResponse = esClient.bulk(b -> b.operations(operations));
                    handleBulkResponse(bulkResponse);
                }

                log.info("Synchronized batch of {} documents from MongoDB to ES", batch.size());
            } catch (Exception e) {
                log.error("Error synchronizing batch to ES", e);
            }
        });

        // 批量处理ES的删除
        Lists.partition(new ArrayList<>(onlyInEs), BATCH_SIZE).forEach(batch -> {
            try {
                List<BulkOperation> operations = batch.stream()
                        .map(id -> BulkOperation.of(op -> op
                                .delete(d -> d
                                        .index("articles")
                                        .id(id)
                                )
                        ))
                        .collect(Collectors.toList());

                if (!operations.isEmpty()) {
                    BulkResponse bulkResponse = esClient.bulk(b -> b.operations(operations));
                    handleBulkResponse(bulkResponse);
                }

                log.info("Deleted batch of {} documents from ES", batch.size());
            } catch (Exception e) {
                log.error("Error deleting batch from ES", e);
            }
        });
    }

    private void handleBulkResponse(BulkResponse response) {
        if (response.errors()) {
            response.items().forEach(item -> {
                if (item.error() != null) {
                    log.error("Bulk operation error for id {}: {}",
                            item.id(), item.error().reason());
                }
            });
        }
    }

    private ArticleIndex convertToEsDocument(ArticleDocument mongoDoc) {
        return ArticleIndex.builder()
                .id(mongoDoc.getId())
                .title(mongoDoc.getTitle())
                .summary(mongoDoc.getSummary())
                .author(mongoDoc.getAuthor())
                .category(mongoDoc.getCategory())
                .thumbnailUrl(mongoDoc.getThumbnailUrl())
                .tags(mongoDoc.getTags())
                .source(mongoDoc.getSource())
                .status(mongoDoc.getStatus())
                .isTop(mongoDoc.getIsTop())
                .viewCount(mongoDoc.getViewCount())
                .createTime(mongoDoc.getCreateTime())
                .topOrder(mongoDoc.getTopOrder())
                .isHot(mongoDoc.getIsHot())
                .hotOrder(mongoDoc.getHotOrder())
                .isRecommend(mongoDoc.getIsRecommend())
                .recommendOrder(mongoDoc.getRecommendOrder())
                .build();
    }

    /**
     * 手动触发清理
     */
    public void manualCleanup() {
        syncData();
    }
}