import re
from typing import Optional, cast

import jieba.analyse

from util.rag.stopwords import STOPWORDS

class JiebaKeywordTableHandler:
    def __init__(self):
        jieba.analyse.default_tfidf.stop_words = STOPWORDS  # type: ignore

    # 提取关键词
    # 提取文本 + 提取数量
    def extract_keywords(self, text: str, max_keywords_per_chunk: Optional[int] = 10) -> set[str]:

        keywords = jieba.analyse.extract_tags(
            sentence=text,
            topK=max_keywords_per_chunk,
        )
        # jieba.analyse.extract_tags returns list[Any] when withFlag is False by default.
        keywords = cast(list[str], keywords)

        return set(self._expand_tokens_with_subtokens(set(keywords)))


    # 将数字字母添加进原集合中
    def _expand_tokens_with_subtokens(self, tokens: set[str]) -> set[str]:

        results = set()
        for token in tokens:
            results.add(token)
            sub_tokens = re.findall(r"\w+", token)
            if len(sub_tokens) > 1:
                results.update({w for w in sub_tokens if w not in list(STOPWORDS)})

        return results
