package com.thinvent.recommend.manager.manager.impl;

import co.elastic.clients.elasticsearch.ElasticsearchClient;
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
import co.elastic.clients.elasticsearch.core.SearchRequest;
import co.elastic.clients.elasticsearch.core.SearchResponse;
import co.elastic.clients.elasticsearch.core.search.Hit;
import com.thinvent.recommend.common.enums.PositionKeywordEnums;
import com.thinvent.recommend.entity.KbFileHistory;
import com.thinvent.recommend.entity.KbFileInfo;
import com.thinvent.recommend.entity.TEhrUserInfo;
import com.thinvent.recommend.entity.TQywxUserInfo;
import com.thinvent.recommend.manager.dto.KbFileContentDTO;
import com.thinvent.recommend.manager.dto.Pager;
import com.thinvent.recommend.manager.dto.RecommendedDocDTO;
import com.thinvent.recommend.manager.dto.UserBehaviorFilesDTO;
import com.thinvent.recommend.manager.manager.FileInfoManager;
import com.thinvent.recommend.manager.manager.RecommendationManager;
import com.thinvent.recommend.manager.manager.Text2VecHelperManager;
import com.thinvent.recommend.manager.manager.UserBehaviorDataManager;
import com.thinvent.recommend.mapper.KbFileHistoryMapper;
import com.thinvent.recommend.mapper.TEhrUserInfoMapper;
import com.thinvent.recommend.mapper.TQywxUserInfoMapper;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;

@Component
public class RecommendationManagerImpl implements RecommendationManager {
    private static final Logger log = LoggerFactory.getLogger(RecommendationManagerImpl.class);
    private static final String VEC_FIELD = "vec";
    /** 从枚举里直接拿全局 term → index 映射 */
    private final Map<String,Integer> termToIndex = PositionKeywordEnums.getTermToIndexMap();

    private final UserBehaviorDataManager userBehaviorDataManager;
    private final FileInfoManager fileInfoManager;
    private final Text2VecHelperManager helperManager;
    private final ElasticsearchClient esClient;
    private final TEhrUserInfoMapper ehrUserInfoMapper;
    private final KbFileHistoryMapper kbFileHistoryMapper;
    private final TQywxUserInfoMapper tQywxUserInfoMapper;

    @Value("${elasticsearch.rest.indexName}")
    private String indexName;
    @Value("${elasticsearch.vector.dims}")
    private int vectorDims;
    @Value("${elasticsearch.knn.size}")
    private int resultSize;
    @Value("${elasticsearch.knn.numCandidates}")
    private int numCandidates;

    @Autowired
    public RecommendationManagerImpl(UserBehaviorDataManager userBehaviorDataManager,
                                     FileInfoManager fileInfoManager,
                                     Text2VecHelperManager helperManager,
                                     ElasticsearchClient esClient,
                                     TEhrUserInfoMapper ehrUserInfoMapper,
                                     KbFileHistoryMapper kbFileHistoryMapper,
                                     TQywxUserInfoMapper tQywxUserInfoMapper) {
        this.userBehaviorDataManager = userBehaviorDataManager;
        this.fileInfoManager = fileInfoManager;
        this.helperManager = helperManager;
        this.esClient = esClient;
        this.ehrUserInfoMapper = ehrUserInfoMapper;
        this.kbFileHistoryMapper = kbFileHistoryMapper;
        this.tQywxUserInfoMapper = tQywxUserInfoMapper;
    }

    /**
     * 根据用户行为推荐文档
     */
    @Override
    public List<RecommendedDocDTO> getUserRecommendations(String userId) {
        // 1. 拉取用户行为记录
        List<UserBehaviorFilesDTO> behaviors = userBehaviorDataManager.getUserBehaviorData(userId);
        if (behaviors.isEmpty()) {
            log.info("用户 {} 无行为记录，使用数据库热门文件作为推荐", userId);
            return fallbackRecommendations();
        }

        // 1.1 收集每个文件的行为权重
        Map<String, Double> behaviorWeights = new HashMap<>();
        for (UserBehaviorFilesDTO b : behaviors) {
            behaviorWeights.put(b.getFildId(), getBehaviorWeight(b.getType()));
        }

        // 2. 加载文件信息
        List<KbFileInfo> infos = new ArrayList<>();
        for (UserBehaviorFilesDTO b : behaviors) {
            KbFileInfo info = fileInfoManager.getFileInfoById(b.getFildId());
            if (info != null) {
                infos.add(info);
            }
        }
        if (infos.isEmpty()) {
            log.warn("用户 {} 的行为文档在 FileInfoManager 中未找到，使用热门文件", userId);
            return fallbackRecommendations();
        }

        // 3. 解析 & 向量化
        List<KbFileContentDTO> docs = helperManager.fetchAndParse(infos);
        if (docs.isEmpty()) {
            log.warn("用户 {} 行为文档解析后为空，使用热门文件", userId);
            return fallbackRecommendations();
        }

        // 4. 计算 TF–IDF 向量
        Map<String, Map<String, Double>> docVectors;
        try {
            docVectors = helperManager.computeTfIdf(docs);
        } catch (Exception e) {
            log.error("用户 {} TF–IDF 计算失败，使用热门文件", userId, e);
            return fallbackRecommendations();
        }
        if (docVectors.isEmpty()) {
            log.warn("用户 {} 文档 TF–IDF 结果为空，使用热门文件", userId);
            return fallbackRecommendations();
        }

        // 5. 加权合成并归一化用户向量
        Map<String, Double> weightedSum = new HashMap<>();
        double totalWeight = 0.0;
        for (Map.Entry<String, Map<String, Double>> entry : docVectors.entrySet()) {
            String docId = entry.getKey();
            double w = behaviorWeights.getOrDefault(docId, 1.0);
            totalWeight += w;
            Map<String, Double> vec = entry.getValue();
            for (Map.Entry<String, Double> term : vec.entrySet()) {
                weightedSum.merge(term.getKey(), term.getValue() * w, Double::sum);
            }
        }
        if (totalWeight == 0.0) totalWeight = 1.0;
        for (Map.Entry<String, Double> e : weightedSum.entrySet()) {
            weightedSum.put(e.getKey(), e.getValue() / totalWeight);
        }

        List<Double> userVec = toDenseVector(weightedSum);
        double norm = 0.0;
        for (Double v : userVec) norm += v * v;
        norm = Math.sqrt(norm);
        if (norm > 0) {
            for (int i = 0; i < userVec.size(); i++) {
                userVec.set(i, userVec.get(i) / norm);
            }
        }

        // 6. ES KNN 检索
        List<Hit<Map>> hits;
        try {
            SearchResponse<Map> resp = esClient.search(
                    SearchRequest.of(s -> s
                            .index(indexName)
                            .size(resultSize)
                            .knn(k -> k
                                    .field(VEC_FIELD)
                                    .queryVector(userVec)
                                    .k(resultSize)
                                    .numCandidates(numCandidates)
                            )
                    ),
                    Map.class
            );
            hits = resp.hits().hits();
        } catch (ElasticsearchException | IOException e) {
            log.error("用户 {} ES KNN 检索失败: {}，使用热门文件", userId, e.getMessage());
            return fallbackRecommendations();
        }

        if (hits.isEmpty()) {
            log.info("用户 {} KNN 检索无结果，使用热门文件", userId);
            return fallbackRecommendations();
        }

        // 7. 去重并截断
        LinkedHashSet<String> seen = new LinkedHashSet<>();
        List<RecommendedDocDTO> results = new ArrayList<>(hits.size());
        for (Hit<Map> hit : hits) {
            String id = hit.id();
            if (seen.add(id)) {
                float score = hit.score() != null ? hit.score().floatValue() : 0f;
                results.add(new RecommendedDocDTO(id, score));
                if (results.size() >= resultSize) break;
            }
        }
        return results;
    }

    /**
     * 根据用户岗位关键词推荐文档
     */
    @Override
    public List<RecommendedDocDTO> recommendByUserPosition(String userId) {
        // 1. 获取用户及岗位
        TEhrUserInfo user = ehrUserInfoMapper.selectByWxId(userId);
        if (user == null || StringUtils.isBlank(user.getPosition())) {
            log.warn("用户 {} 岗位信息缺失，返回热门文件", userId);
            return fallbackRecommendations();
        }
        String position = user.getPosition();

        // 2. 从枚举中取最小粒度关键词
        PositionKeywordEnums posEnum = PositionKeywordEnums.fromPosition(position);
        if (posEnum == null || posEnum.getKeywords().isEmpty()) {
            log.warn("岗位 {} 未配置关键词或关键词列表为空，返回热门文件", position);
            return fallbackRecommendations();
        }
        List<String> keywords = posEnum.getKeywords();

        // 3. 计算 TF–IDF 稀疏向量
        Map<String, Double> sparseVec;
        try {
            sparseVec = helperManager.computeTfIdfForTerms(keywords);
        } catch (Exception ex) {
            log.error("岗位 {} 关键词 TF–IDF 计算失败，返回热门文件", position, ex);
            return fallbackRecommendations();
        }
        if (sparseVec.isEmpty()) {
            log.warn("岗位 {} TF–IDF 向量为空，返回热门文件", position);
            return fallbackRecommendations();
        }

        // 4. 稀疏向量转定长 dense-vector 并归一化
        List<Double> queryVec = buildNormalizedDenseVector(sparseVec);

        // 5. ES 原生 KNN 检索（与 getUserRecommendations 完全一致）
        List<Hit<Map>> hits;
        try {
            SearchResponse<Map> resp = esClient.search(
                    SearchRequest.of(s -> s
                            .index(indexName)
                            .size(resultSize)
                            .knn(k -> k
                                    .field(VEC_FIELD)
                                    .queryVector(queryVec)
                                    .k(resultSize)
                                    .numCandidates(numCandidates)
                            )
                    ),
                    Map.class
            );
            hits = resp.hits().hits();
        } catch (ElasticsearchException | IOException e) {
            log.error("岗位 {} ES KNN 检索失败: {}，返回热门文件", position, e.getMessage());
            return fallbackRecommendations();
        }

        // 6. 无结果时兜底
        if (hits.isEmpty()) {
            log.info("岗位 {} KNN 检索无结果，返回热门文件", position);
            return fallbackRecommendations();
        }

        // 7. 去重并截断返回
        LinkedHashSet<String> seen = new LinkedHashSet<>();
        List<RecommendedDocDTO> result = new ArrayList<>(hits.size());
        for (Hit<Map> hit : hits) {
            String docId = hit.id();
            if (seen.add(docId)) {
                float score = hit.score() != null ? hit.score().floatValue() : 0f;
                result.add(new RecommendedDocDTO(docId, score));
                if (result.size() >= resultSize) {
                    break;
                }
            }
        }
        return result;
    }

    @Override
    public List<RecommendedDocDTO> recommendByDeptHistory(String userId) {
        // 1. 根据 WX_USER_ID 查出当前用户及其 deptId
        TQywxUserInfo me = tQywxUserInfoMapper.selectByWxUserId(userId);
        if (me == null || me.getDeptId() == null) {
            return Collections.emptyList();
        }

        // 2. 查同部门且排除自己的其他用户
        List<TQywxUserInfo> peers = tQywxUserInfoMapper
                .selectByDeptIdAndExcludeWxUserId(me.getDeptId(), userId);
        if (peers.isEmpty()) {
            return Collections.emptyList();
        }
        List<String> peerIds = peers.stream()
                .map(TQywxUserInfo::getWxUserId)
                .collect(Collectors.toList());

        // 3. 根据这些 user_id 批量查历史记录
        List<KbFileHistory> records = kbFileHistoryMapper.selectByUserIds(peerIds);
        if (records.isEmpty()) {
            return Collections.emptyList();
        }

        // 4. 只保留 actionType = 1(编辑)、2(预览)、3(下载)
        List<KbFileHistory> filtered = records.stream()
                .filter(h -> "1".equals(h.getActionType())
                        || "2".equals(h.getActionType())
                        || "3".equals(h.getActionType()))
                .collect(Collectors.toList());
        if (filtered.isEmpty()) {
            return Collections.emptyList();
        }

        // 5. 按 fileId 计数并按降序返回 DTO 列表
        Map<String, Long> freq = filtered.stream()
                .collect(Collectors.groupingBy(
                        KbFileHistory::getFileId,
                        Collectors.counting()
                ));

        return freq.entrySet().stream()
                .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder()))
                .map(e -> new RecommendedDocDTO(e.getKey(), e.getValue().floatValue()))
                .collect(Collectors.toList());
    }

    /**
     * 将稀疏 TF–IDF 向量转换为与 ES dense_vector 同维度的定长数组，并做 L2 归一化
     *
     * @param sparseVec   稀疏向量 Map<term, weight>
     * @return            已归一化的密集向量 List<Double>
     */
    private List<Double> buildNormalizedDenseVector(Map<String, Double> sparseVec) {
        // 1. 初始化全 0 的定长数组
        List<Double> dense = new ArrayList<>(Collections.nCopies(vectorDims, 0.0));
        // 2. 按照 termToIndex 把每个非零项填进去
        for (Map.Entry<String, Double> e : sparseVec.entrySet()) {
            Integer idx = termToIndex.get(e.getKey());
            if (idx != null && idx >= 0 && idx < vectorDims) {
                dense.set(idx, e.getValue());
            }
        }
        // 3. 计算 L2 范数
        double sumSq = 0.0;
        for (double v : dense) {
            sumSq += v * v;
        }
        double norm = Math.sqrt(sumSq);
        // 4. 归一化
        if (norm > 0) {
            for (int i = 0; i < dense.size(); i++) {
                dense.set(i, dense.get(i) / norm);
            }
        }
        return dense;
    }


    // 兜底推荐热门文件
    private List<RecommendedDocDTO> fallbackRecommendations() {
        Pager<KbFileInfo> pager = fileInfoManager.listActiveFiles(1, resultSize);
        List<RecommendedDocDTO> list = new ArrayList<>();
        for (KbFileInfo info : pager.getData()) {
            list.add(new RecommendedDocDTO(info.getId(), 0f));
        }
        return list;
    }


    /** 行为权重映射 */
    private double getBehaviorWeight(String type) {
        switch (type) {
            case "编辑": return 5.0;
            case "收藏": return 4.0;
            case "下载": return 3.0;
            case "点赞": return 2.0;
            default:      return 1.0;
        }
    }

    /** 将稀疏 Map 转成定长 List<Double> */
    private List<Double> toDenseVector(Map<String, Double> vecMap) {
        List<Double> list = new ArrayList<>(vectorDims);
        for (int i = 0; i < vectorDims; i++) {
            list.add(vecMap.getOrDefault(String.valueOf(i), 0.0));
        }
        return list;
    }
}
