package org.abc.fund.service.funds;

import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.abc.fund.config.PythonConfig;
import org.abc.fund.dto.tags.SimilarFundRecommendationDTO;
import org.abc.fund.dto.tags.TagClusterDTO;
import org.abc.fund.entity.funds.FundBasicInfo;
import org.abc.fund.entity.funds.FundTagRelation;
import org.abc.fund.repository.funds.FundBasicInfoRepository;
import org.abc.fund.repository.funds.FundTagRelationRepository;
import org.springframework.core.io.ClassPathResource;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.server.ResponseStatusException;

import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.regex.Pattern;

/**
 * 标签聚类 + 相似基金推荐服务
 */
@Slf4j
@Service
@RequiredArgsConstructor
public class TagClusterService {

    private static final Duration CACHE_TTL = Duration.ofHours(6);
    private static final int DEFAULT_MAX_CLUSTERS = 6;
    private static final int DEFAULT_TOP_KEYWORDS = 5;
    private static final int DEFAULT_TOP_RECOMMENDATIONS = 5;
    private static final int DEFAULT_MAX_FEATURES = 4000;
    private static final Set<String> STOP_WORDS = Set.of(
            "在市",
            "开放式基金",
            "开放式",
            "基金",
            "of"
    );
    private static final Set<String> STOP_WORDS_NORMALIZED = STOP_WORDS.stream()
            .map(word -> word.toLowerCase(Locale.ROOT))
            .collect(Collectors.toUnmodifiableSet());
    private static final Pattern TOKEN_SPLIT_PATTERN = Pattern.compile("[,，;；/|]+|\\s+");
    private static final List<String> REASON_CATEGORY_PRIORITY = List.of(
            "主题",
            "行业",
            "风格",
            "策略",
            "基金类型",
            "产品类型",
            "上市状态"
    );

    private final FundTagRelationRepository relationRepository;
    private final FundBasicInfoRepository fundBasicInfoRepository;
    private final ObjectMapper objectMapper;
    private final PythonConfig pythonConfig;

    private final AtomicReference<ClusterResult> cache = new AtomicReference<>();
    private volatile Path scriptPath;

    /**
     * 获取所有聚类及其基金列表
     */
    public List<TagClusterDTO> getAllClusters() {
        ClusterResult result = ensureClusterResult();
        return result.clusters();
    }

    /**
     * 查询某基金的相似基金推荐
     */
    public SimilarFundRecommendationDTO getSimilarFunds(String fundCode) {
        if (!StringUtils.hasText(fundCode)) {
            throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "fundCode 不能为空");
        }
        ClusterResult result = ensureClusterResult();
        Integer clusterId = result.fundClusterLookup().get(fundCode);
        if (clusterId == null) {
            throw new ResponseStatusException(HttpStatus.NOT_FOUND, "未找到该基金的聚类结果");
        }

        SimilarFundRecommendationDTO response = new SimilarFundRecommendationDTO();
        response.setFundCode(fundCode);
        response.setClusterId(clusterId);
        List<SimilarFundRecommendationDTO.RecommendedFundDTO> recommendations =
                result.recommendationsByFund().getOrDefault(fundCode, Collections.emptyList());
        response.setRecommendedFunds(recommendations);
        return response;
    }

    private ClusterResult ensureClusterResult() {
        ClusterResult current = cache.get();
        Instant now = Instant.now();
        if (current != null && Duration.between(current.generatedAt(), now).compareTo(CACHE_TTL) < 0) {
            return current;
        }

        synchronized (this) {
            current = cache.get();
            if (current != null && Duration.between(current.generatedAt(), Instant.now()).compareTo(CACHE_TTL) < 0) {
                return current;
            }
            ClusterResult refreshed = computeClusterResult();
            cache.set(refreshed);
            return refreshed;
        }
    }

    private ClusterResult computeClusterResult() {
        DocumentPreparationResult preparationResult = buildDocuments();
        List<Map<String, Object>> documents = preparationResult.documents();
        if (documents.size() < 2) {
            throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "有效基金标签不足，无法进行聚类");
        }

        JsonNode resultNode = invokeClusteringScript(documents);
        Map<Integer, TagClusterDTO> clusterMap = initClusters(resultNode.path("clusters"));
        Map<String, Integer> fundClusterLookup = new HashMap<>();

        for (JsonNode assignment : resultNode.withArray("assignments")) {
            int clusterId = assignment.path("clusterId").asInt();
            String fundCode = assignment.path("fundCode").asText();
            double score = assignment.path("score").asDouble();

            TagClusterDTO clusterDTO = clusterMap.computeIfAbsent(clusterId, id -> {
                TagClusterDTO dto = new TagClusterDTO();
                dto.setClusterId(id);
                return dto;
            });
            TagClusterDTO.ClusterFundDTO fundDTO = new TagClusterDTO.ClusterFundDTO();
            fundDTO.setFundCode(fundCode);
            fundDTO.setScore(score);
            clusterDTO.getFunds().add(fundDTO);
            fundClusterLookup.put(fundCode, clusterId);
        }

        clusterMap.values().forEach(cluster ->
                cluster.getFunds().sort(Comparator.comparingDouble(TagClusterDTO.ClusterFundDTO::getScore).reversed()));

        Map<String, List<SimilarFundRecommendationDTO.RecommendedFundDTO>> recommendations =
                buildRecommendations(resultNode.withArray("recommendations"), clusterMap, preparationResult.descriptors());

        List<TagClusterDTO> clusters = clusterMap.values().stream()
                .sorted(Comparator.comparing(TagClusterDTO::getClusterId))
                .collect(Collectors.toUnmodifiableList());

        return new ClusterResult(clusters, fundClusterLookup, recommendations, Instant.now());
    }

    private Map<Integer, TagClusterDTO> initClusters(JsonNode clusterNodes) {
        Map<Integer, TagClusterDTO> clusterMap = new LinkedHashMap<>();
        if (clusterNodes == null || !clusterNodes.isArray()) {
            return clusterMap;
        }
        for (JsonNode clusterNode : clusterNodes) {
            TagClusterDTO dto = new TagClusterDTO();
            int clusterId = clusterNode.path("clusterId").asInt();
            dto.setClusterId(clusterId);
            List<String> keywords = objectMapper.convertValue(
                    clusterNode.path("keywords"),
                    new TypeReference<List<String>>() {
                    });
            if (keywords != null) {
                dto.setKeywords(keywords);
            }
            clusterMap.put(clusterId, dto);
        }
        return clusterMap;
    }

    private Map<String, List<SimilarFundRecommendationDTO.RecommendedFundDTO>> buildRecommendations(
            JsonNode recommendationNodes,
            Map<Integer, TagClusterDTO> clusterMap,
            Map<String, FundDescriptor> descriptors) {
        Map<String, List<SimilarFundRecommendationDTO.RecommendedFundDTO>> recommendations = new HashMap<>();
        if (recommendationNodes == null || !recommendationNodes.isArray()) {
            return recommendations;
        }

        for (JsonNode node : recommendationNodes) {
            String sourceFund = node.path("sourceFundCode").asText();
            String targetFund = node.path("targetFundCode").asText();
            double score = node.path("score").asDouble();
            int clusterId = node.path("clusterId").asInt();
            String reason = generateRecommendationReason(sourceFund, targetFund, clusterId, clusterMap, descriptors, node.path("reason").asText());

            SimilarFundRecommendationDTO.RecommendedFundDTO dto = new SimilarFundRecommendationDTO.RecommendedFundDTO();
            dto.setFundCode(targetFund);
            dto.setScore(score);
            dto.setReason(reason);

            recommendations
                    .computeIfAbsent(sourceFund, key -> new ArrayList<>())
                    .add(dto);
        }

        recommendations.values().forEach(list ->
                list.sort(Comparator.comparingDouble(SimilarFundRecommendationDTO.RecommendedFundDTO::getScore).reversed()));

        return recommendations;
    }

    private DocumentPreparationResult buildDocuments() {
        List<FundTagRelation> relations = relationRepository.findAll();
        if (relations.isEmpty()) {
            throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "尚未配置任何基金标签");
        }

        Map<String, FundDescriptorBuilder> descriptorBuilders = new LinkedHashMap<>();
        for (FundTagRelation relation : relations) {
            if (relation.getTag() == null) {
                continue;
            }
            FundDescriptorBuilder builder = descriptorBuilders.computeIfAbsent(relation.getFundCode(), key -> new FundDescriptorBuilder());
            addDescriptorValue(builder, relation.getTag().getName(), relation.getTag().getCategory());
        }

        if (descriptorBuilders.isEmpty()) {
            throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "基金标签数据为空");
        }

        List<FundBasicInfo> fundInfos = fundBasicInfoRepository.findAllById(descriptorBuilders.keySet());
        Map<String, FundBasicInfo> infoMap = fundInfos.stream()
                .collect(Collectors.toMap(FundBasicInfo::getCode, info -> info));

        descriptorBuilders.forEach((fundCode, builder) -> {
            FundBasicInfo info = infoMap.get(fundCode);
            if (info != null) {
                builder.setFundName(info.getName());
                addDescriptorValue(builder, info.getName(), "主题");
                addDescriptorValue(builder, info.getType(), "产品类型");
                addDescriptorValue(builder, info.getFundType(), "基金类型");
                addDescriptorValue(builder, info.getInvestStyle(), "风格");
                addDescriptorValue(builder, info.getStatus(), "上市状态");
            }
        });

        List<Map<String, Object>> documents = new ArrayList<>();
        Map<String, FundDescriptor> descriptors = new LinkedHashMap<>();

        descriptorBuilders.forEach((fundCode, builder) -> {
            List<String> tokens = builder.tokens();
            if (tokens.isEmpty()) {
                return;
            }
            Map<String, Object> doc = new LinkedHashMap<>();
            doc.put("fundCode", fundCode);
            doc.put("text", String.join(" ", tokens));
            documents.add(doc);
            descriptors.put(fundCode, builder.build());
        });

        if (documents.isEmpty()) {
            throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "基金标签数据为空");
        }

        return new DocumentPreparationResult(documents, descriptors);
    }

    private void addDescriptorValue(FundDescriptorBuilder builder, String value, String category) {
        if (builder == null) {
            return;
        }
        for (String token : tokenizeValue(value)) {
            builder.addToken(token);
            if (StringUtils.hasText(category)) {
                builder.addCategoryValue(category, token);
            }
        }
    }

    private Collection<String> tokenizeValue(String value) {
        if (!StringUtils.hasText(value)) {
            return Collections.emptyList();
        }
        String trimmed = value.trim();
        if (trimmed.isEmpty()) {
            return Collections.emptyList();
        }
        List<String> results = new ArrayList<>();
        for (String part : TOKEN_SPLIT_PATTERN.split(trimmed)) {
            String sanitized = sanitizeToken(part);
            if (sanitized != null) {
                results.add(sanitized);
            }
        }
        return results;
    }

    private String sanitizeToken(String value) {
        if (!StringUtils.hasText(value)) {
            return null;
        }
        String normalized = value
                .replaceAll("[()（）]", "")
                .replaceAll("[·•]", "")
                .trim();
        if (normalized.isEmpty()) {
            return null;
        }
        normalized = normalized.replaceAll("\\s+", "");
        if (normalized.length() < 2 && !normalized.chars().anyMatch(this::isChinese)) {
            return null;
        }
        if (isStopWord(normalized)) {
            return null;
        }
        return normalized;
    }

    private boolean isChinese(int codePoint) {
        return codePoint >= 0x4E00 && codePoint <= 0x9FFF;
    }

    private boolean isStopWord(String token) {
        String lower = token.toLowerCase(Locale.ROOT);
        return STOP_WORDS_NORMALIZED.contains(lower);
    }

    private String generateRecommendationReason(String sourceFund,
                                                String targetFund,
                                                int clusterId,
                                                Map<Integer, TagClusterDTO> clusterMap,
                                                Map<String, FundDescriptor> descriptors,
                                                String originalReason) {
        FundDescriptor sourceDescriptor = descriptors.get(sourceFund);
        FundDescriptor targetDescriptor = descriptors.get(targetFund);
        List<String> reasonFragments = new ArrayList<>();
        if (sourceDescriptor != null && targetDescriptor != null) {
            for (String category : REASON_CATEGORY_PRIORITY) {
                String sharedTag = findSharedTag(
                        sourceDescriptor.tagsByCategory().get(category),
                        targetDescriptor.tagsByCategory().get(category));
                if (sharedTag != null) {
                    reasonFragments.add(formatReasonFragment(category, sharedTag));
                }
                if (reasonFragments.size() >= 2) {
                    break;
                }
            }
            String sharedTokens = buildSharedTokensFragment(sourceDescriptor, targetDescriptor);
            if (StringUtils.hasText(sharedTokens)) {
                reasonFragments.add(sharedTokens);
            }
        }

        if (reasonFragments.isEmpty() && StringUtils.hasText(originalReason) && !"同簇相似基金".equals(originalReason)) {
            reasonFragments.add(originalReason);
        }

        if (reasonFragments.isEmpty()) {
            TagClusterDTO clusterDTO = clusterMap.get(clusterId);
            if (clusterDTO != null && !CollectionUtils.isEmpty(clusterDTO.getKeywords())) {
                reasonFragments.add("同簇标签：" + String.join(" / ", clusterDTO.getKeywords()));
            }
        } else {
            TagClusterDTO clusterDTO = clusterMap.get(clusterId);
            String clusterInsight = buildClusterKeywordInsight(clusterDTO);
            if (StringUtils.hasText(clusterInsight)) {
                reasonFragments.add(clusterInsight);
            }
        }
        if (reasonFragments.isEmpty()) {
            reasonFragments.add("同簇相似基金");
        }
        return reasonFragments.stream()
                .filter(StringUtils::hasText)
                .limit(4)
                .collect(Collectors.joining("；"));
    }

    private String findSharedTag(Collection<String> sourceTags, Collection<String> targetTags) {
        if (CollectionUtils.isEmpty(sourceTags) || CollectionUtils.isEmpty(targetTags)) {
            return null;
        }
        for (String tag : sourceTags) {
            if (targetTags.contains(tag)) {
                return tag;
            }
        }
        return null;
    }

    private String formatReasonFragment(String category, String tag) {
        return switch (category) {
            case "风格" -> "同类" + tag + "风格";
            case "主题" -> "同属" + tag + "主题";
            case "行业" -> "同属" + tag + "行业";
            case "策略" -> "策略相近：" + tag;
            case "基金类型" -> "基金类型一致：" + tag;
            case "产品类型" -> "产品类型一致：" + tag;
            case "上市状态" -> "上市状态相同：" + tag;
            default -> category + "相同：" + tag;
        };
    }

    private String buildSharedTokensFragment(FundDescriptor sourceDescriptor, FundDescriptor targetDescriptor) {
        if (CollectionUtils.isEmpty(sourceDescriptor.tokens()) || CollectionUtils.isEmpty(targetDescriptor.tokens())) {
            return null;
        }
        LinkedHashSet<String> shared = new LinkedHashSet<>(sourceDescriptor.tokens());
        shared.retainAll(targetDescriptor.tokens());
        if (shared.isEmpty()) {
            return null;
        }
        shared.removeIf(token -> token.length() == 1 && !token.chars().anyMatch(this::isChinese));
        if (shared.isEmpty()) {
            return null;
        }
        return "共有标签：" + shared.stream().limit(4).collect(Collectors.joining(" / "));
    }

    private String buildClusterKeywordInsight(TagClusterDTO clusterDTO) {
        if (clusterDTO == null || CollectionUtils.isEmpty(clusterDTO.getKeywords())) {
            return null;
        }
        return "簇关键词：" + clusterDTO.getKeywords().stream().limit(5).collect(Collectors.joining(" / "));
    }

    private JsonNode invokeClusteringScript(List<Map<String, Object>> documents) {
        Map<String, Object> payload = new LinkedHashMap<>();
        payload.put("documents", documents);
        payload.put("params", buildParamMap(documents.size()));

        try {
            String jsonPayload = objectMapper.writeValueAsString(payload);
            String pythonExecutable = resolvePythonExecutable();
            Path script = resolveScriptPath();

            ProcessBuilder processBuilder = new ProcessBuilder(pythonExecutable, script.toAbsolutePath().toString());
            processBuilder.environment().put("PYTHONIOENCODING", StandardCharsets.UTF_8.name());
            processBuilder.environment().put("PYTHONUTF8", "1");
            processBuilder.redirectErrorStream(true);
            Process process = processBuilder.start();

            try (BufferedWriter writer = new BufferedWriter(
                    new OutputStreamWriter(process.getOutputStream(), StandardCharsets.UTF_8))) {
                writer.write(jsonPayload);
                writer.flush();
            }

            String output = new String(process.getInputStream().readAllBytes(), StandardCharsets.UTF_8);
            int exitCode = process.waitFor();

            if (exitCode != 0) {
                log.error("标签聚类脚本执行失败，退出码 {}，输出 {}", exitCode, output);
                throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "标签聚类脚本执行失败");
            }

            JsonNode root = objectMapper.readTree(output);
            String status = root.path("status").asText();
            if (!"success".equalsIgnoreCase(status)) {
                String message = root.path("message").asText("标签聚类失败");
                throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, message);
            }
            return root;

        } catch (InterruptedException ex) {
            Thread.currentThread().interrupt();
            log.error("执行标签聚类脚本被中断", ex);
            throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "执行标签聚类脚本被中断", ex);
        } catch (IOException ex) {
            log.error("执行标签聚类脚本异常", ex);
            throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "执行标签聚类脚本异常", ex);
        }
    }

    private Map<String, Object> buildParamMap(int docSize) {
        Map<String, Object> params = new HashMap<>();
        int clusters = Math.min(Math.max(2, docSize), DEFAULT_MAX_CLUSTERS);
        params.put("n_clusters", clusters);
        params.put("top_keywords", DEFAULT_TOP_KEYWORDS);
        params.put("top_recommendations", DEFAULT_TOP_RECOMMENDATIONS);
        params.put("max_features", DEFAULT_MAX_FEATURES);
        return params;
    }

    private String resolvePythonExecutable() {
        String configured = pythonConfig.getInterpreterPath();
        if (StringUtils.hasText(configured)) {
            return configured;
        }
        return "python";
    }

    private Path resolveScriptPath() {
        Path existing = this.scriptPath;
        if (existing != null && Files.exists(existing)) {
            return existing;
        }
        synchronized (this) {
            existing = this.scriptPath;
            if (existing != null && Files.exists(existing)) {
                return existing;
            }
            try {
                ClassPathResource resource = new ClassPathResource("scripts/tagClusterPy.py");
                Path tempFile = Files.createTempFile("tag-cluster", ".py");
                try (InputStream inputStream = resource.getInputStream()) {
                    Files.copy(inputStream, tempFile, StandardCopyOption.REPLACE_EXISTING);
                }
                tempFile.toFile().deleteOnExit();
                this.scriptPath = tempFile;
                return tempFile;
            } catch (IOException e) {
                log.error("加载标签聚类脚本失败", e);
                throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "无法加载标签聚类脚本", e);
            }
        }
    }

    private record ClusterResult(
            List<TagClusterDTO> clusters,
            Map<String, Integer> fundClusterLookup,
            Map<String, List<SimilarFundRecommendationDTO.RecommendedFundDTO>> recommendationsByFund,
            Instant generatedAt) {
    }

    private record DocumentPreparationResult(
            List<Map<String, Object>> documents,
            Map<String, FundDescriptor> descriptors) {
    }

    private static class FundDescriptorBuilder {
        private final LinkedHashSet<String> tokens = new LinkedHashSet<>();
        private final Map<String, LinkedHashSet<String>> tagsByCategory = new LinkedHashMap<>();
        private String fundName;

        void addToken(String token) {
            tokens.add(token);
        }

        void addCategoryValue(String category, String value) {
            if (!StringUtils.hasText(category) || !StringUtils.hasText(value)) {
                return;
            }
            tagsByCategory
                    .computeIfAbsent(category, key -> new LinkedHashSet<>())
                    .add(value);
        }

        void setFundName(String name) {
            this.fundName = name;
        }

        List<String> tokens() {
            return new ArrayList<>(tokens);
        }

        FundDescriptor build() {
            Map<String, List<String>> categories = new LinkedHashMap<>();
            tagsByCategory.forEach((category, values) -> categories.put(category, new ArrayList<>(values)));
            return new FundDescriptor(tokens(), categories, fundName);
        }
    }

    private record FundDescriptor(
            List<String> tokens,
            Map<String, List<String>> tagsByCategory,
            String fundName) {
    }
}


