package org.chen.task;

import co.elastic.clients.elasticsearch.ElasticsearchClient;
import co.elastic.clients.elasticsearch.core.UpdateRequest;
import com.influxdb.client.InfluxDBClient;
import com.influxdb.client.WriteApi;
import com.influxdb.client.domain.WritePrecision;
import com.influxdb.client.write.Point;
import lombok.extern.slf4j.Slf4j;
import org.chen.config.TaskExecutorConfig;
import org.chen.mapper.ArticleElasticsearchRepository;
import org.chen.mapper.ArticleRepository;
import org.chen.model.elastic.ArticleIndex;
import org.chen.model.mongodb.ArticleDocument;
import org.chen.utils.DateUtil;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.data.mongodb.core.FindAndModifyOptions;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;

import javax.annotation.PostConstruct;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.*;

@Component
@Slf4j
public class UpdateViewCountSchedule {

    private final StringRedisTemplate redisTemplate;
    private final MongoTemplate mongoTemplate;
    private final ArticleElasticsearchRepository articleIndexRepository;
    private final ThreadPoolTaskExecutor taskExecutor;
    private final WriteApi writeApi;


    public UpdateViewCountSchedule(
            ArticleElasticsearchRepository articleIndexRepository,
            MongoTemplate mongoTemplate,
            InfluxDBClient influxDBClient,
            @Qualifier("taskExecutor") ThreadPoolTaskExecutor taskExecutor,
            StringRedisTemplate redisTemplate) {

        this.redisTemplate = redisTemplate;
        this.articleIndexRepository = articleIndexRepository;
        this.mongoTemplate = mongoTemplate;
        this.writeApi = influxDBClient.makeWriteApi();
        this.taskExecutor = taskExecutor;
    }

    // 定时任务同步到DB（每小时或每天）
    @Scheduled(cron = "0 0/10 * * * ?") // 每10分钟执行
    public void syncViewCount() {
        log.info("---syncViewCount task------produce Data with fixed rate task: Execution Time - {}", DateUtil.nowDateTime());
        Set<String> keys = redisTemplate.keys("article:count:*");
        log.info("Keys found: {}", keys);
        for (String key : keys) {
            String id = key.substring("article:count:".length());
            String count = redisTemplate.opsForValue().get(key);
            if (count != null) {
                taskExecutor.execute(() -> {
                    updateViewCountToDB(id, Long.parseLong(count));
                    redisTemplate.delete(key);
                });
            }
        }
    }

    @Async
    public void updateViewCountToDB(String id, Long count) {
        try {
            // 3.1 更新MongoDB
            Query query = new Query(Criteria.where("id").is(id));
            Update update = new Update().inc("viewCount", count);
            ArticleDocument updated = mongoTemplate.findAndModify(
                    query, update,
                    FindAndModifyOptions.options().returnNew(true),
                    ArticleDocument.class
            );

            if (updated == null) {
                return;
            }

            // 3.2. 更新ES索引
            ArticleIndex index = articleIndexRepository.findById(id)
                    .orElseThrow(() -> new RuntimeException("文章索引不存在"));
            index.setViewCount(updated.getViewCount());
            articleIndexRepository.save(index);

        } catch (Exception e) {
            log.error("Failed to update view count for article: {}", id, e);
            throw new RuntimeException("Failed to update view count", e);
        }
    }

    // 每天零点同步历史数据到时序数据库
    @Scheduled(cron = "0 0 0 * * ?")
    public void syncHistoricalData() {
        log.info("---syncHistoricalData task------produce Data with fixed rate task: Execution Time - {}", DateUtil.nowDateTime());
        try {
            String yesterday = LocalDate.now().minusDays(0)
                    .format(DateTimeFormatter.ofPattern("yyyyMMdd"));

            // 1. 查询前一天的所有相关数据key
            Set<String> allKeys = new HashSet<>();

            // 1.1 小时级别PV数据
            Set<String> hourKeys = redisTemplate.keys("article:pv:*");
            if (hourKeys != null) {
                allKeys.addAll(hourKeys);
            }

            // 1.2 文章UV数据（HyperLogLog）小时级别
            Set<String> hourUv = redisTemplate.keys("article:uv:*");
            if (hourUv != null) {
                allKeys.addAll(hourUv);
            }

            // 1.3 文章总PV数据
            Set<String> pvKeys = redisTemplate.keys("article:pv:*:" + yesterday);
            if (pvKeys != null) {
                allKeys.addAll(pvKeys);
            }

            // 1.4 文章总UV数据
            Set<String> uvKeys = redisTemplate.keys("article:uv:*:" + yesterday);
            if (uvKeys != null) {
                allKeys.addAll(uvKeys);
            }

            log.info("Found total {} keys for date: {}", allKeys.size(), yesterday);

            if (allKeys.isEmpty()) {
                return;
            }

            // 2. 收集每个文章的统计数据并写入InfluxDB
            Map<String, List<String>> articleKeysMap = new HashMap<>();
            for (String key : allKeys) {
                String[] parts = key.split(":");
                if (parts.length >= 3) {
                    String articleId = parts[2];
                    articleKeysMap.computeIfAbsent(articleId, k -> new ArrayList<>()).add(key);
                }
            }

            List<Point> points = new ArrayList<>();
            for (Map.Entry<String, List<String>> entry : articleKeysMap.entrySet()) {
                String articleId = entry.getKey();
                List<String> keys = entry.getValue();

                // 2.1 处理小时级别PV
                Map<String, Long> hourlyPv = new HashMap<>();
                for (String key : keys) {
                    if (key.matches("article:pv:" + articleId)) {
                        String hour = key.substring(key.length() - 2);
                        String count = redisTemplate.opsForValue().get(key);
                        if (count != null) {
                            hourlyPv.put(hour, Long.parseLong(count));
                        }
                    }
                }

                // 2.2 获取UV
                String hourUVKeys = "article:uv:" + articleId;
                Long houruv = redisTemplate.opsForHyperLogLog().size(hourUVKeys);

                // 2.3 获取总PV
                String pvKey = "article:pv:" + articleId + ":" + yesterday;
                String totalPv = redisTemplate.opsForValue().get(pvKey);
                long pv = totalPv != null ? Long.parseLong(totalPv) : 0;

                // 2.3 获取总UV
                String uvKey = "article:pv:" + articleId + ":" + yesterday;
                String totalUv = redisTemplate.opsForValue().get(uvKey);
                long uv = totalUv != null ? Long.parseLong(totalUv) : 0;

                // 2.4 写入InfluxDB
                for (Map.Entry<String, Long> hourEntry : hourlyPv.entrySet()) {
                    String fullHour = yesterday + "00";
                    Point point = Point.measurement("article_stats")
                            .addTag("article_id", articleId)
                            .addField("pv", hourEntry.getValue())
                            .addField("total_pv", pv)
                            .addField("total_uv", uv)
                            .addField("uv", houruv)
                            .time(
                                    LocalDateTime.parse(fullHour, DateTimeFormatter.ofPattern("yyyyMMddHH"))
                                            .toInstant(ZoneOffset.UTC).toEpochMilli(),
                                    WritePrecision.MS
                            );
                    log.info(articleId);
                    log.info(String.valueOf(hourEntry.getValue()));
                    log.info(String.valueOf(pv));
                    log.info(String.valueOf(uv));
                    log.info(String.valueOf(houruv));
                    log.info(String.valueOf(LocalDateTime.parse(fullHour, DateTimeFormatter.ofPattern("yyyyMMddHH"))
                                    .toInstant(ZoneOffset.UTC).toEpochMilli()),
                            WritePrecision.MS);
                    points.add(point);
                }
            }

            // 3. 批量写入InfluxDB
            if (!points.isEmpty()) {
                writeApi.writePoints(points);
                log.info("Successfully wrote {} points to InfluxDB", points.size());
            }

            // 4. 清理所有Redis数据
            redisTemplate.delete(allKeys);
            log.info("Cleaned up {} Redis keys for date: {}", allKeys.size(), yesterday);

        } catch (Exception e) {
            log.error("Sync historical data failed", e);
        }
    }

    /**
     * 手动触发清理
     */
    public void manualCleanup() {
        syncViewCount();
        syncHistoricalData();
    }
}
