package com.kgar.service.impl;


import cn.hutool.core.bean.BeanUtil;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.kgar.dao.ResumeDao;
import com.kgar.dto.Result;
import com.kgar.dto.SearchDTO;
import com.kgar.dto.UserDTO;
import com.kgar.entity.graph.node.JobPositionNode;
import com.kgar.entity.graph.node.ResumeNode;
import com.kgar.entity.graph.node.UserNode;
import com.kgar.entity.pojo.JobInfo;
import com.kgar.entity.pojo.JobInfoDoc;
import com.kgar.entity.pojo.Resume;
import com.kgar.repository.JobInfoDocRepository;
import com.kgar.repository.ResumeRepository;
import com.kgar.repository.UserRepository;
import com.kgar.service.AsyncService;
import com.kgar.service.JobInfoDocService;
import com.kgar.util.UserHolder;
import lombok.extern.slf4j.Slf4j;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.index.query.functionscore.ScriptScoreFunctionBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.SuggestBuilders;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
import org.jetbrains.annotations.NotNull;
import org.springframework.context.annotation.Lazy;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.SearchHit;
import org.springframework.data.elasticsearch.core.SearchHits;
import org.springframework.data.elasticsearch.core.mapping.IndexCoordinates;
import org.springframework.data.elasticsearch.core.query.NativeSearchQuery;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.*;
import java.util.stream.Collectors;

import static com.kgar.util.RedisConstants.HOT_JOB_TAGS;

@Slf4j
@Service
@Lazy
public class JobInfoDocServiceImpl implements JobInfoDocService {

    @Resource
    private ResumeDao resumeDao;

    @Resource
    private ElasticsearchRestTemplate esRestTemplate;
    @Resource
    private JobInfoDocRepository jobInfoDocRepository;
    @Resource
    private StringRedisTemplate stringRedisTemplate;
    @Resource
    private AsyncService asyncService;

    @Resource
    private ResumeRepository resumeRepository;

    @Resource
    private UserRepository userRepository;

    @Override
    public Result completion(SearchDTO searchDTO) {
        String keyword = searchDTO.getKeys();
        ArrayList<String> strings = new ArrayList<>();
        CompletionSuggestionBuilder completionSuggestionBuilder = SuggestBuilders.completionSuggestion("suggestion")
                .size(10)
                .skipDuplicates(true)
                .prefix(keyword);
        SuggestBuilder suggestBuilder = new SuggestBuilder();
        suggestBuilder.addSuggestion("suggest_text", completionSuggestionBuilder);
        Suggest suggest = esRestTemplate.suggest(suggestBuilder, IndexCoordinates.of("job_info")).getSuggest();
        Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> suggestText = suggest.getSuggestion("suggest_text");
        for (Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option> entry : suggestText.getEntries()) {
            List<? extends Suggest.Suggestion.Entry.Option> options = entry.getOptions();
            for (Suggest.Suggestion.Entry.Option option : options) {
                strings.add(option.getText().string());
            }
        }

        return Result.ok(strings);
    }

    @Override
    public Result searchWithKeys(SearchDTO searchDTO) {
        // 构造分页信息：第一个参数是页码，从0算起。第二个参数是每页显示的条数
        Integer page = searchDTO.getPage();
        if (page == null) {
            page = 1;
        }
        int from = page - 1;
        Pageable pageable = PageRequest.of(from, 30);


        NativeSearchQuery query = new NativeSearchQueryBuilder()
                .withQuery(QueryBuilders.matchQuery("recommend", searchDTO.getKeys()))
                .withPageable(pageable)
                .build();
        SearchHits<JobInfoDoc> jobInfo = esRestTemplate.search(query, JobInfoDoc.class, IndexCoordinates.of("job_info"));

        ArrayList<JobInfo> jobInfos = new ArrayList<>();
        for (SearchHit<JobInfoDoc> searchHit : jobInfo.getSearchHits()) {
            jobInfos.add(BeanUtil.copyProperties(searchHit.getContent(), JobInfo.class));
        }
        return Result.ok(jobInfos);
    }

    @Override
    public Result searchWithTag(SearchDTO searchDTO) {
        //查询成功将该标签的热度值加一
        Double isIncr = stringRedisTemplate.opsForZSet().incrementScore(HOT_JOB_TAGS, searchDTO.getKeys(), 1);
        if (isIncr == null)
            stringRedisTemplate.opsForZSet().add(HOT_JOB_TAGS, searchDTO.getKeys(), 1);

        return searchWithKeys(searchDTO);
    }

    @Override
    public Result getBestJob() {
        try {
            UserDTO user = UserHolder.getUser();
            Integer userId = user.getUserId();
            List<Resume> resumes = resumeDao.selectList(Wrappers.lambdaQuery(Resume.class).eq(Resume::getUserId, userId));

            Pageable pageable = PageRequest.of(0, 30);


            //treeSet集合直接去重并按分数大小排序
            Set<JobInfo> jobInfos = new TreeSet<>(Comparator.comparing(JobInfo::getScore).reversed());

            //如果未填写简历，则随机查询，分数打一折处理
            if(resumes.isEmpty()){
                QueryBuilder matchAllQueryBuilder = QueryBuilders.matchAllQuery();

                SearchHits<JobInfoDoc> searchHits = getSearchHits(pageable, matchAllQueryBuilder);

                //获取最大分数
                double maxScore = searchHits.getMaxScore()+(searchHits.getMaxScore()<100?100:searchHits.getMaxScore()*0.1);

                double minScore = 0.1;
                if(searchHits.getTotalHits()>0){
                    SearchHit<JobInfoDoc> minSearchHit = searchHits.getSearchHit(searchHits.getTotalHits()<30?(int)searchHits.getTotalHits()-1:29);
                    //获取最小分数
                    minScore = minSearchHit.getScore()-(minSearchHit.getScore()>10?minSearchHit.getScore()*0.1:0);
                }

                for (SearchHit<JobInfoDoc> searchHit : searchHits.getSearchHits()) {
                    JobInfo jobInfo = BeanUtil.copyProperties(searchHit.getContent(), JobInfo.class);
                    //将原始Elasticsearch查询分数转换为0-100%的匹配度百分比，并保留相对顺序
                    jobInfo.setScore(normalizeScore(searchHit.getScore(),minScore,maxScore));
                    jobInfos.add(jobInfo);
                }


                return Result.ok(jobInfos.stream().toList());
            }

            //若填写过简历，则推荐查询
            resumes.forEach(resume -> {

//                ResumeNode byResumeId = resumeRepository.findByResumeId(resume.getResumeId());
                StringBuilder sb = new StringBuilder();

                if (resume.getRawText() != null)
                    sb.append(resume.getRawText());

//                List<String> tags = byResumeId.getTags();
//                sb.append(tags.toString());
                String s = sb.toString();

                // 创建一个基础查询，查询匹配项
                QueryBuilder baseQuery = QueryBuilders.matchQuery("recommend", s);

                // 使用function_score_query包装基础查询，添加ScriptScoreFunctionBuilder
                SearchHits<JobInfoDoc> searchHits = getSearchHits(pageable, baseQuery);



                //获取最大分数
                double maxScore = searchHits.getMaxScore()+(searchHits.getMaxScore()<100?100:searchHits.getMaxScore()*0.1);

                 double minScore = 0.1;
                if(searchHits.getTotalHits()>0){
                    SearchHit<JobInfoDoc> minSearchHit = searchHits.getSearchHit(searchHits.getTotalHits()<30?(int)searchHits.getTotalHits()-1:29);
                    //获取最小分数
                    minScore = minSearchHit.getScore()-(minSearchHit.getScore()>10?minSearchHit.getScore()*0.1:0);
                }


                //用于存放与某个简历相关的职位
                ArrayList<JobInfo> jobInfoList = new ArrayList<>();
                for (SearchHit<JobInfoDoc> searchHit : searchHits.getSearchHits()) {
                    JobInfo jobInfo = BeanUtil.copyProperties(searchHit.getContent(), JobInfo.class);
                    //将原始Elasticsearch查询分数转换为0-100%的匹配度百分比，并保留相对顺序
                    jobInfo.setScore(normalizeScore(searchHit.getScore(),minScore,maxScore));
                    jobInfos.add(jobInfo);

                    jobInfoList.add(jobInfo);
                }

                // 在知识图谱中建立简历与职位之间的关系
                asyncService.createOrUpdateRelationWithResume2Job(user,resume,jobInfoList);
            });

            //取去重并排序后的 分数前三十的数据
            List<JobInfo> list = jobInfos.stream()
                    .limit(30)
                    .toList();
            return Result.ok(list);
        } catch (Exception e) {
            log.error(e.getMessage());
            throw new RuntimeException(e);
        }

    }


    @NotNull
    private SearchHits<JobInfoDoc> getSearchHits(Pageable pageable, QueryBuilder matchAllQueryBuilder) {
        //创建一个Painless脚本，生成一个介于0到1之间的随机数
        Script script = new Script(ScriptType.INLINE, "painless", "Math.random()", Collections.emptyMap());

        //创建一个ScriptScoreFunctionBuilder，它会使用上述脚本来修改得分
        ScriptScoreFunctionBuilder scriptScoreFunction = new ScriptScoreFunctionBuilder(script);

        // 使用function_score_query包装基础查询，添加ScriptScoreFunctionBuilder
        FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(matchAllQueryBuilder, scriptScoreFunction);

        NativeSearchQuery query = new NativeSearchQueryBuilder()
                .withQuery(functionScoreQueryBuilder)
                .withPageable(pageable)
                .build();

        return esRestTemplate.search(query, JobInfoDoc.class, IndexCoordinates.of("job_info"));
    }

    @Override
    public Result findAll(String orderBy) {
        Pageable pageable = PageRequest.of(0, 30);
        Iterable<JobInfoDoc> all = jobInfoDocRepository.findAll(pageable);
        List<JobInfo> jobInfos = new ArrayList<>(30);
        all.forEach(jobInfoDoc ->
                jobInfos.add(BeanUtil.copyProperties(jobInfoDoc, JobInfo.class))
        );
        return Result.ok(jobInfos);
    }

    @Override
    public Result getAssessment() {
        UserDTO user = UserHolder.getUser();

        Map<String, Double> tagScoreMap = new HashMap<>();

        Optional<UserNode> optionalUserNode = userRepository.findById(user.getUserId());
        if (optionalUserNode.isPresent()) {
            UserNode userNode = optionalUserNode.get();
            List<ResumeNode> resumeNodes = userNode.getResumeNodes().stream()
                    .limit(30)
                    .toList();
            if(resumeNodes.isEmpty()){
                return Result.fail("请先填写简历！");
            }

            // 合并简历和职位标签的分数计算过程，减少循环层级
            resumeNodes.forEach(resumeNode -> {
                addTagScores(tagScoreMap, resumeNode.getTags(), 50.0);
                resumeNode.getMatchWithRelations().forEach(matchWithRelation -> {
                    JobPositionNode jobPositionNode = matchWithRelation.getJobPositionNode();
                    addTagScores(tagScoreMap, jobPositionNode.getTag(), matchWithRelation.getMatchScore());
                });
            });

            // 直接使用Stream API对Entry集合并排序后转换为LinkedHashMap
            Map<String, Double> sortedMap = tagScoreMap.entrySet().stream()
                    .sorted(Map.Entry.<String, Double>comparingByValue().reversed())
                    .collect(Collectors.toMap(
                            Map.Entry::getKey,
                            Map.Entry::getValue,
                            (oldValue, newValue) -> oldValue, // 如果有重复键，则保留旧值（此处不会发生，仅作完整性处理）
                            LinkedHashMap::new
                    ));

            return Result.ok(sortedMap);
        }

        return Result.fail("用户不存在！");
    }

    // 新增一个方法用于简洁地累加标签分数
    private void addTagScores(Map<String, Double> tagScoreMap, List<String> tags, double scoreToAdd) {
        tags.forEach(tag -> tagScoreMap.merge(tag, scoreToAdd, Double::sum));
    }
    /**
     * 将原始Elasticsearch查询分数转换为0-100%的匹配度百分比，并保留相对顺序
     * @param rawScore Elasticsearch查询返回的原始分数
     * @param minScore 当前结果集中所有文档的最小分数
     * @param maxScore 当前结果集中所有文档的最大分数
     * @return 百分比形式的匹配度
     */
    private static double normalizeScore(double rawScore, double minScore, double maxScore) {
        if (maxScore == minScore) { // 防止除以零的情况，如果所有文档得分相同，则返回统一值
            return 100.0;
        } else {
            return ((rawScore - minScore) / (maxScore - minScore)) * 100.0;
        }
    }
}
