import asyncio
import json
import os
import pickle
import random
import re
import shutil
import sys
import traceback
from collections import Counter
from logging import Logger
from pathlib import Path
from typing import (TYPE_CHECKING, Collection, Dict, Iterable, List, Optional,
                    Set, Tuple)

from asyncache import cached as asyncached
from cachetools import LRUCache
from cachetools.keys import hashkey
from pydantic import ValidationError
from tqdm import tqdm

from chernc.agents.translator import TranslatorBase
from chernc.analyzer.matcher import (CherncMatcher, MatchFail, remove_comments,
                                     text_of_match)
from chernc.compiler.rust.error_messages import RustcErrorMessages
from chernc.constants import (DEFAULT_LOGGER_DIR, DEFAULT_LOGGER_FILE_NAME,
                              DEFAULT_PROJECT_CHECKPOINTS_DIR,
                              DEFAULT_PROJECT_LOGGER_DIR)
from chernc.core.cargo import CargoConfig, Package
from chernc.core.cargo_manager import CargoManager
from chernc.core.context_manager import ContextManager, UseManager
from chernc.core.repair import (adjust_messages, annotate_lines_for_code,
                                extern_use_rest, extract_rust_code_from_md,
                                extract_toml_code_from_md,
                                format_error_messages, into_whole,
                                select_messages)
from chernc.core.result import (ChunkResult, ExternDeclaration, FileResult,
                                Match, ModDeclaration, ProjectResult,
                                SnippetResult)
from chernc.llm.uni_tokenizer import UniTokenizer
from chernc.slicer.code_chunk import CodeChunk
from chernc.slicer.code_slice import CodeSlice, all_depended_slices
from chernc.slicer.slicer import Slicer
from chernc.textdiff.word_matcher import matched_text

if TYPE_CHECKING:
    from chernc.core.transpiler import ProjectTranspiler

from chernc.logging import (build_logger, build_simple_file_logger,
                            release_logger)

RUSTC_PRIVATE = "#![feature(rustc_private)]\n"
logger = build_logger(__name__, DEFAULT_LOGGER_FILE_NAME, logger_dir=DEFAULT_LOGGER_DIR)


def build_simple_context(slices: Collection[CodeSlice], context_manager: ContextManager) -> str:
    # if any(slice.isMain() for slice in slices):
    #     context = ""
    # else:
    # rustc will run in test mode, make sure there is a #[test] function
    context = "#[test] fn dummy_test() {\n}\n"
    _deps = all_depended_slices(slices)
    _deps = set(map(context_manager.query_slice_res, _deps)).difference(slices)
    deps = sorted(_deps, key=lambda r: min(s.level for s in r.slices))
    context += "\n".join(dep.text for dep in deps)
    return context


def text_of_slices(slices: Iterable[CodeSlice]):
    return "\n".join(map(lambda s: s.text, sorted(slices, key=lambda s: s.level)))


class NeedToWithdraw(Exception):
    def __init__(self, cur_chunk: CodeChunk, withdraw_chunk: CodeChunk):
        super().__init__(f"Need to withdraw chunk {withdraw_chunk.id} when translating chunk {cur_chunk.id}")


# just a wrapper of interactly translating
class InteractEngine:
    """
    交互式翻译引擎，处理翻译以及翻译过程中的错误和修正。
    """

    def __init__(self, project: "ProjectTranspiler", translator: TranslatorBase, package_installer: CargoManager):
        self.project = project
        self.translator = translator
        self.package_installer = package_installer

    @asyncached(cache=LRUCache(maxsize=256), key=lambda self, rust_code: hashkey(rust_code))
    async def compile_rust_snippet(self, rust_code: str) -> List[RustcErrorMessages]:
        p = await asyncio.create_subprocess_shell(
            f"rustc - --test --error-format=json -Z unstable-options -Z no-codegen",
            stdin=asyncio.subprocess.PIPE,
            stdout=asyncio.subprocess.PIPE,
            stderr=asyncio.subprocess.PIPE,
        )

        stdout, stderr = await p.communicate(rust_code.encode())
        msgs = []
        for line in stderr.decode().splitlines():
            try:
                d = json.loads(line)
                msgs.append(RustcErrorMessages.model_validate(d))
            except ValidationError as e:
                logger.error(f"Can not parse error message")
                traceback.print_exc()

        return msgs

    @asyncached(cache=LRUCache(maxsize=256))
    async def has_error(self, code: str, context_end: int) -> bool:
        msgs = await self.compile_rust_snippet(rust_code=code)
        adjust_msgs = adjust_messages(msgs, context_end=context_end)
        return len(adjust_msgs) > 0

    async def translate_chunk(self, chunk: CodeChunk, chunk_logger: Logger, max_translation: int = 5) -> Optional[str]:
        # marked_chunk = MarkedCodeChunk.from_codechunk(chunk)
        partial_result: Optional[str] = None
        matcher = CherncMatcher.createforChunk(chunk)
        # logging
        _deps = set(dep for s in chunk.slices for dep in s.depends).difference(chunk.slices)
        _deps = set(map(self.cm.query_slice_res, _deps))
        deps = sorted(_deps, key=lambda r: min(s.level for s in r.slices))
        chunk_logger.debug("=======DEP=======")
        list(map(chunk_logger.debug, (d.text for d in deps)))
        extern_decls = set()
        extern_uses = UseManager()
        to_trans: Set[CodeSlice] = set(chunk.slices)
        all_match: Match = {}
        for translate_count in range(max_translation):
            chunk_logger.debug(f"======SOURCE({translate_count})=====")
            chunk_logger.debug(text_of_slices(to_trans))
            raw_result = await self.translator.translate(
                source=text_of_slices(to_trans),
                deps="\n".join(d.text for d in deps),
                temperature=random.randint(0, 10) / 10,  # 随机一个温度，避免模型每次重试输出过于一致
            )
            chunk_logger.debug(f"======TRANS({translate_count})======")
            chunk_logger.debug(raw_result)

            partial_result = extract_rust_code_from_md(raw_result)
            # 如果没有结果就重试
            if not partial_result:
                logger.debug(f"Retry for chunk {chunk.id} because of no result.")
                continue
            _extern_decls, _extern_uses, the_rest = extern_use_rest(partial_result)
            extern_decls |= _extern_decls
            extern_uses |= _extern_uses
            the_rest = self.fix_code_with_rules(chunk, chunk_logger, the_rest)

            # update according to the match
            match = matcher.try_to_match(the_rest)
            all_match |= match
            deps.extend(match.values())

            # if there is something required to be translated, but it is not in the match
            if to_trans - match.keys():
                # remove the translated part
                to_trans -= match.keys()
                logger.debug(f"Retry for chunk {chunk.id} because something is not translated.")
                continue

            break

        trans_result = into_whole(extern_decls, extern_uses, text_of_match(all_match))
        chunk_logger.debug("=====TRANS_RES=====")
        chunk_logger.debug(trans_result)
        chunk_logger.debug("=======DONE=======")
        return trans_result

    def fix_code_with_rules(
        self,
        chunk: CodeChunk,
        chunk_logger: Logger,
        chunk_code: str,
    ) -> str:
        # remove line annotations
        chunk_code = re.sub(r" // line \d+.*", "", chunk_code, flags=re.M)

        # illegal global variables
        chunk_code = re.sub(r"^let\s+", "static ", chunk_code, flags=re.M)
        # illegal `static fn`
        chunk_code = re.sub(r"\bstatic\s+fn\s+", "fn ", chunk_code)
        # `type ... = struct ...`
        chunk_code = re.sub(r"\btype\s+(\w+)\s+=\s+struct\b", lambda m: f"struct {m[1]}", chunk_code)
        # typedef redefine
        chunk_code = re.sub(r"\btype\s+(\w+)\s+=\s+(\w+);", lambda m: "" if m[1] == m[2] else m[0], chunk_code)
        # struct redefine
        chunk_code = re.sub(r"\bstruct\s+\w+\s+\w+;", "", chunk_code)
        # struct ... { ... };
        chunk_code = re.sub(r"(\bstruct\s+\w+\s*\{[^{}]*\});", r"\g<1>", chunk_code)

        # pub field
        def add_pub_to_fields(m):
            return m[1] + "{" + re.sub(r"(?<!pub )\b(\w+)\s*:(?!:)", r"pub \1:", m[2]) + "}"

        chunk_code = re.sub(r"(\b(?:struct|enum)\s+\w+\s*)\{([^{}]*)\}", add_pub_to_fields, chunk_code)
        # extern "C"
        chunk_code = re.sub(r'\bextern\s+("C"\s+)?', "", chunk_code)

        # add #[test] to fn test_* if there is not
        chunk_code = re.sub(r"(?<!#\[test\]\n)(pub\s+)(unsafe\s+)?\bfn\s+test_(\w+)", r"#[test]\n\g<0>", chunk_code)
        # add pub to all functions
        chunk_code = re.sub(r"(?<!pub )(unsafe\s+)?\bfn\s+\w+", r"pub \g<0>", chunk_code)
        return chunk_code

    def build_context(self, slices: Collection[CodeSlice]) -> str:
        return RUSTC_PRIVATE + into_whole(self.cm.extern_decls, self.cm.extern_uses, build_simple_context(slices, self.cm)) + "\n"

    async def fix_chunk_with_llm(
        self,
        chunk: CodeChunk,
        chunk_logger: Logger,
        chunk_code: str,
        max_fix_count=5,
    ) -> str:
        _deps = set(dep for s in chunk.slices for dep in s.depends).difference(chunk.slices)
        _deps = set(map(self.cm.query_slice_res, _deps))
        deps = sorted(_deps, key=lambda r: min(s.level for s in r.slices))
        matcher = CherncMatcher.createforChunk(chunk)
        # fix_result永远不空
        extern_decls, extern_uses, the_rest = extern_use_rest(chunk_code)
        extern_decls -= self.cm.extern_decls
        extern_uses -= self.cm.extern_uses
        fix_result = extern_decls, extern_uses, the_rest
        fix_code = into_whole(*fix_result)
        match_the_rest: Dict[CodeSlice, SnippetResult] = dict(matcher.try_to_match(the_rest))
        # 先编译代码一次
        for extern in fix_result[0]:
            await self.package_installer.add_dependency(extern)
        context = self.build_context(chunk.slices)
        msgs = await self.compile_rust_snippet(rust_code=context + fix_code)
        msgs = adjust_messages(msgs, context_end=context.count("\n"))

        if not msgs:
            chunk_logger.debug("=====FIX_RES======")
            chunk_logger.debug(fix_code)
            chunk_logger.debug("=======DONE=======")
            return fix_code

        msgs_selected = select_messages(msgs)
        chunk_logger.debug("=======SELECTED ERR=======")
        chunk_logger.debug(format_error_messages(msgs_selected, True))

        annotated_lines = set()
        for msg in msgs_selected:
            annotated_lines.update(span.line_start for span in msg.spans)
        # to_fix = fix_code + annotations恒成立
        to_fix = annotate_lines_for_code(fix_code, annotated_lines)

        # 开始修复
        for fix_count in range(max_fix_count):
            resp = await self.translator.fix(
                to_fix=to_fix,
                deps="\n".join(dep.text for dep in deps),
                err_msgs=format_error_messages(msgs_selected),
                temperature=random.randint(0, 10) / 10,  # 随机一个温度，避免模型每次重试输出过于一致
            )
            chunk_logger.debug(f"=======FIX({fix_count})======")
            chunk_logger.debug(resp)

            resp_code = extract_rust_code_from_md(resp)
            if toml := extract_toml_code_from_md(resp):
                await self.package_installer.add_from_config(toml)

            # 没有结果
            if not resp_code:
                logger.debug(f"Retry for chunk {chunk.id} because of no result.")
                continue

            # get extern decalrations, extern uses and the rest of the fix code
            extern_decls, extern_uses, the_rest = extern_use_rest(resp_code)

            fix_match = matcher.try_to_match(the_rest)

            # 现在fix_code不是最终结果，因为LLM的回复是不完整的

            # 如果结果是一些top-level片段，我们对每个片段进行替换
            # 我们只能假设这个结果只有top-level片段

            if fix_match:
                # 如果存在整块，那么用matcher来匹配
                # 然后再合成the rest代码
                for slice, snippet in fix_match.items():
                    if slice in match_the_rest:
                        be_replaced = match_the_rest[slice].text
                    else:
                        be_replaced = ""
                    replacement = snippet.text
                    if be_replaced == replacement:
                        continue
                    chunk_logger.debug(f"=======REPLACED(TOP-LEVEL)======")
                    chunk_logger.debug(be_replaced)
                    chunk_logger.debug(f"=======REPLACEMENT(TOP-LEVEL)======")
                    chunk_logger.debug(replacement)
                match_the_rest |= fix_match
                the_rest = text_of_match(match_the_rest)

            else:
                # 如果结果不是top-level片段，那么就是某个bug行附近的代码
                be_replaced = matched_text(to_fix, the_rest, {l - 1 for l in annotated_lines})

                if be_replaced is not None:
                    chunk_logger.debug(f"=======REPLACED(TEXT-MATCH)======")
                    chunk_logger.debug(be_replaced)
                    chunk_logger.debug(f"=======REPLACEMENT(TEXT-MATCH)======")
                    chunk_logger.debug(the_rest)
                    the_rest = fix_result[2].replace(be_replaced, the_rest)
                else:
                    chunk_logger.debug(f"=======REPLACED(KEEP-CODE)======")
                    chunk_logger.debug(be_replaced)
                    chunk_logger.debug(f"=======REPLACEMENT(KEEP-CODE)======")
                    chunk_logger.debug(the_rest)
                    the_rest = fix_result[2]

            # fix with rules
            the_rest = self.fix_code_with_rules(chunk, chunk_logger, the_rest)

            # update extern declarations and extern uses, replace the code with fixed code
            context = self.build_context(chunk.slices)
            extern_decls = (extern_decls | fix_result[0]) - self.cm.extern_decls
            extern_uses = (extern_uses | fix_result[1]) - self.cm.extern_uses
            fix_result = extern_decls, extern_uses, the_rest
            fix_code = into_whole(*fix_result)
            chunk_logger.debug("=====FIX CODE=====")
            chunk_logger.debug(fix_code)
            # 编译
            # prepare for the environment
            for extern in fix_result[0]:
                await self.package_installer.add_dependency(extern)
            msgs = await self.compile_rust_snippet(rust_code=context + fix_code)
            msgs = adjust_messages(msgs, context_end=context.count("\n"))
            if len(msgs) != 0:
                # 选择一部分编译错误
                msgs_selected = select_messages(msgs)
                logger.warning(f"Retry for chunk {chunk.id} because of compilation failure.")
                chunk_logger.debug(f"=======SELECTED ERR({fix_count})=======")
                chunk_logger.debug(format_error_messages(msgs_selected, True))
                annotated_lines = set()
                for msg in msgs_selected:
                    annotated_lines.update(span.line_start for span in msg.spans)
                to_fix = annotate_lines_for_code(fix_code, annotated_lines)
                continue
            else:
                break

        chunk_logger.debug("=====FIX_RES======")
        chunk_logger.debug(fix_code)

        chunk_logger.debug("=======DONE=======")
        return fix_code

    async def _async_translate_and_match_chunk(
        self, chunk: CodeChunk, chunk_logger: Logger, try_num: int = 5
    ) -> Tuple[str, Match]:

        cc_matcher = CherncMatcher.createforChunk(chunk)
        if not cc_matcher:
            logger.warning(f"Chunk-{chunk.id}: Cannot create matcher for this chunk")

        chunk_code: Optional[str] = None
        match: Match = {}
        for try_i in range(try_num):
            # 第一个pass: 翻译chunk
            chunk_logger.debug(f"=======TRANSLATION PASS ({try_i})=======")
            step_chunk_code = await self.translate_chunk(chunk, chunk_logger)
            if step_chunk_code is not None and len(step_chunk_code.strip()) != 0:
                chunk_code = step_chunk_code
            if chunk_code is None:
                continue

            # 第二个pass: 使用LLM修复chunk
            chunk_logger.debug(f"=======LLM FIX PASS ({try_i})=======")
            chunk_code = await self.fix_chunk_with_llm(chunk, chunk_logger, chunk_code, max_fix_count=max(chunk.size // 150, 3))

            # 第三个pass: 检查过不过编译
            context = self.build_context(chunk.slices)
            if await self.has_error(context + chunk_code, context_end=context.count("\n")):
                chunk_logger.debug(f"=====COMPILER REJECT ({try_i})=====")
                continue
            chunk_logger.debug(f"=====COMPILER PASS ({try_i})=====")

            # 第四个pass: 能否被match
            if cc_matcher:
                try:
                    match = cc_matcher.match(chunk_code)
                except MatchFail as e:
                    chunk_logger.debug(f"=====MATCHER REJECT({try_i})=====")
                    chunk_logger.debug(e)
                    continue

            # 成功的标志就是match不为空
            chunk_logger.debug("=====ACCEPT=====")
            break

        if chunk_code is None:
            logger.error(f"Chunk-{chunk.id}: Translation result is empty after rule-based and LLM-based fix...")
            chunk_code = ""
        if not match:
            logger.error(f"Chunk-{chunk.id}: Translation failed after rule-based and LLM-based fix...")

        # 先尽可能地match
        if not match and cc_matcher:
            match = cc_matcher.try_to_match(chunk_code)

        return chunk_code, match

    async def async_trans_chunk(self, project: "ProjectTranspiler", chunk: CodeChunk, out_path: str):
        """
        :return: None if the chunk is successfully processed
        :rtype: None

        :raises NeedToWithdraw: This chunk cannot be processed due to the need to withdraw previous chunks.
        """
        checkpoint_path = os.path.join(out_path, DEFAULT_PROJECT_CHECKPOINTS_DIR)
        os.makedirs(checkpoint_path, exist_ok=True)
        chunk_checkpoint_path = os.path.join(checkpoint_path, f"chunk_checkpoint_{chunk.id}.pkl")
        if os.path.exists(chunk_checkpoint_path):
            # Read checkpoint
            with open(chunk_checkpoint_path, "rb") as file:
                chunk_code, match = pickle.load(file)
        else:
            # Create a logger for this chunk
            chunk_logger = build_simple_file_logger(
                f"chernc.{project.project_hash}.result_{chunk.id}",
                os.path.join(out_path, DEFAULT_PROJECT_LOGGER_DIR, "translation", f"result_{chunk.id}.log"),
            )
            chunk_code, match = await self._async_translate_and_match_chunk(chunk=chunk, chunk_logger=chunk_logger)
            with open(chunk_checkpoint_path, "wb") as file:
                pickle.dump((chunk_code, match), file)

            # Close the logger
            release_logger(chunk_logger)

        extern_decls, extern_uses, _ = extern_use_rest(chunk_code)

        self.cm.finish_one_chunk(chunk, extern_decls, extern_uses, ChunkResult(chunk, match))

    def _clean_logs(self, out_path: str):
        log_dir = os.path.join(out_path, DEFAULT_PROJECT_LOGGER_DIR)
        if os.path.exists(log_dir):
            logger.info(f"cleaning logs in {out_path}")
            # Verify that the log directory contains only `.log` files
            for root, _, files in os.walk(log_dir):
                for file in files:
                    if not file.endswith(".log"):
                        raise ValueError(
                            f"Log directory {log_dir} contains non-.log files, "
                            f"such as {file}. Cleaning aborted to avoid data loss."
                        )
            # Remove the directory
            shutil.rmtree(log_dir)

    def _slice_chunks(self, project: "ProjectTranspiler", out_path: str):
        checkpoint_path = os.path.join(out_path, DEFAULT_PROJECT_CHECKPOINTS_DIR)
        os.makedirs(checkpoint_path, exist_ok=True)
        chunks_checkpoint_path = os.path.join(checkpoint_path, "chunks_checkpoint.pkl")
        if os.path.exists(chunks_checkpoint_path):
            # Read checkpoint
            with open(chunks_checkpoint_path, "rb") as file:
                chunks = pickle.load(file)
        else:
            slicer = Slicer(project.codeql_database)
            logger.info("generating slices, waiting...")
            slicer_tokenizer = UniTokenizer(project.config.token_num_method)
            chunks = slicer.slice(max_size=project.config.max_token_num, size_calc=slicer_tokenizer.token_num)

            # Log chunks
            for chunk in chunks:
                chunk_logger = build_simple_file_logger(
                    f"chernc.{project.project_hash}.chunk_{chunk.id}",
                    os.path.join(out_path, DEFAULT_PROJECT_LOGGER_DIR, "all_chunks", f"chunk_{chunk.id}.log"),
                )
                chunk_logger.debug(chunk.text)
                # Close the logger
                release_logger(chunk_logger)

            # Save checkpoint
            with open(chunks_checkpoint_path, "wb") as file:
                pickle.dump(chunks, file)

        return chunks

    async def async_trans_project(self, project: "ProjectTranspiler", out_path: str) -> ProjectResult:
        self.cm = ContextManager()
        self.cm.extern_decls.add("libc")
        # Clean logs
        self._clean_logs(out_path=out_path)
        # Slice into chunks
        chunks = self._slice_chunks(project=project, out_path=out_path)

        # Feed chunks into context manager
        self.cm.feed_chunks(chunks)

        # Tranlate chunks
        bar = tqdm(total=len(chunks), desc="Processing Chunks", unit="chunk", file=sys.stdout)

        def binder(count: int):
            bar.n = count
            bar.refresh()

        self.cm.bind_finished_count(binder)

        in_degree: Dict[CodeChunk, int] = {chunk: len(chunk.depends) for chunk in chunks}

        async def trans_and_push_tasks(tg: asyncio.TaskGroup, chunk: CodeChunk):
            nonlocal in_degree
            try:
                # Translate the chunk
                await self.async_trans_chunk(project=project, chunk=chunk, out_path=out_path)
                # Update the in-degree of the dependent chunks
                # If the in-degree of a dependent chunk becomes 0, push it to the task group
                for dependent in chunk.depended:
                    in_degree[dependent] -= 1
                    if in_degree[dependent] == 0:
                        tg.create_task(trans_and_push_tasks(tg, dependent))
            except Exception as e:
                traceback.print_exc()
                raise e

        async with asyncio.TaskGroup() as tg:
            for chunk, indegree in in_degree.items():
                if indegree == 0:
                    tg.create_task(trans_and_push_tasks(tg, chunk))
        bar.close()

        # await self.count_compiled_pass(chunks, self.cm)

        # get all slices results and sort them by the lowest level of the slices
        snippet_results = sorted(set(self.cm.slice_results.values()), key=lambda r: min(s.level for s in r.slices))

        # 这里提到的所有路径都是相对路径，相对于根目录，所以带有src/或者test/
        file_of_snippet: Dict[SnippetResult, FileResult] = {}

        # 这里维护的是结果“原”路径到FileResult的映射，而实际上FileResult的真实路径可能移动了
        # 这个数据只在准备阶段使用
        _file_results: Dict[str, FileResult] = {}
        hasEntry = False
        for result in snippet_results:
            # 少数服从多数
            [(source_file, _)] = Counter(slice.file for slice in result.slices).most_common(1)  # 绝对路径，C/CPP文件
            # 当rust文件名和模块里面出现了'-'会很麻烦，我们替换为下划线
            # 见：https://stackoverflow.com/questions/57535061/how-to-use-another-file-in-rust-module-with-hyphens-in-it

            result_path = os.path.relpath(source_file, self.project.project_path)
            result_path = result_path.replace("-", "_")
            result_path = str(Path(result_path).with_suffix(".rs"))
            if result_path.startswith("test/"):
                result_path = "tests/" + result_path[5:]

            # has_main = any(slice.isMain() for slice in result.slices)

            if result_path not in _file_results:
                _file_results[result_path] = FileResult(result_path)
            file_result = _file_results[result_path]
            if result_path.startswith("tests/test_"):
                file_result.isEntry = True
            # if has_main:
            #     hasEntry = True
            #     file_result.isEntry = True
            #     file_result.move(os.path.basename(result_path))
            file_result.add_snippet(result)
            file_of_snippet[result] = file_result
        # add a lib.rs
        if not hasEntry:
            file_result = FileResult("src/lib.rs")
            _file_results["src/lib.rs"] = file_result
            file_result.isEntry = True
        file_results: Dict[str, FileResult] = {f.path: f for f in _file_results.values()}
        del _file_results

        mod_files = [f for f in file_results.values() if f.isEntry]

        # 添加 mod.rs 到除了src/之外的所有文件夹下
        for file_result in list(file_results.values()):
            dir = os.path.dirname(file_result.path)
            while dir and dir not in ("src", "tests"):
                mod_file = os.path.join(dir, "mod.rs")
                if mod_file in file_results:
                    break
                file_result = FileResult(mod_file)
                file_results[mod_file] = file_result
                mod_files.append(file_result)
                dir = os.path.dirname(dir)

        # 添加 mod声明
        for mod_file in mod_files:
            dir = os.path.dirname(mod_file.path)

            mod_declarations_set: Set[str] = set()

            for path, result in file_results.items():
                if result.isEntry:
                    continue
                if not path.startswith(dir):
                    continue
                if path == mod_file.path:
                    continue
                remove_head = os.path.relpath(path, dir)
                top = remove_head.split(os.sep)[0]
                top = top.replace(".rs", "")
                mod_declarations_set.add(top)
            mod_file.mod_declarations = [ModDeclaration(mod, modifier="pub") for mod in mod_declarations_set]

        # 若代码包含某个名字，就认为引用了它
        for file_result in file_results.values():
            text = "\n".join(snippet.text for snippet in file_result.snippets)
            text = remove_comments(text)
            is_test = file_result.path.startswith("tests/")
            for snippet_result, ref_file in file_of_snippet.items():
                if ref_file == file_result:
                    continue
                if ref_file.isEntry:
                    continue
                ref_is_test = ref_file.path.startswith("tests/")
                if not is_test and not ref_is_test:
                    ref_path = "crate::" + ref_file.path[4:].replace(".rs", "").replace(os.sep, "::")
                elif is_test and not ref_is_test:
                    ref_path = f"{self.project.project_name}::" + ref_file.path[4:].replace(".rs", "").replace(os.sep, "::")
                elif is_test and ref_is_test:
                    # ref_file.path.startswith("tests/")
                    ref_path = ref_file.path[6:].replace(".rs", "").replace(os.sep, "::")
                else:
                    continue
                for name in snippet_result.names:
                    if name in text:
                        file_result.uses.use(f"{ref_path}::{name}")
                        snippet_result.add_pub(name)

        # 有必要区分内部和外部use，也许，内部需要mod语句引入。
        # for each file, we add use declarations of extern modules
        for file_result in file_results.values():
            if file_result.isEntry:
                file_result.extern_decls = [ExternDeclaration(ext_decl) for ext_decl in self.cm.extern_decls]
            file_result.uses |= self.cm.extern_uses

        # 生成Cargo.toml
        config = CargoConfig(
            package=Package(
                name=self.project.project_name, version="0.1.0", edition="2021", authors=["Your Name <youremail@example.com>"]
            )
        )
        config.bin = []
        # 检索全部的Entries
        # for file_path, file_result in file_results.items():
        #     # 所有入口都在root下面
        #     if file_result.isEntry:
        #         config.bin.append(
        #             Target(name=file_path.removesuffix(".rs"), path=os.path.join(self.project.config.source_path, file_path))
        #         )
        if self.package_installer.config.dependencies:
            config.dependencies = dict(
                filter(lambda x: x[0] in self.cm.extern_decls, self.package_installer.config.dependencies.items())
            )

        # 确定项目的target类型是bin还是lib，同时如果是lib合成lib.rs
        # target_type: Literal["bin", "lib"] = "bin"
        # for file_result in file_results.values():
        #     if file_result.isEntry:
        #         target_type = "lib"
        #         break
        # if config.bin is None or len(config.bin) == 0:
        #     # config.add_default_bin()
        #     if config.bin is None:
        #         config.bin = []
        #     if len(config.bin) == 0:
        #         config.bin.append(Target(name="main", path="src/main.rs"))

        #     if "main.rs" not in file_results:
        #         main_file = FileRepairResult("main.rs")
        #         main_file.snippets.append(
        #             SnippetResult(slices=[], text='fn main() {\n    println!("Hello, world!");\n}\n', names=["main"])
        #         )
        #         main_file.isEntry = True

        #         file_results["main.rs"] = main_file

        return ProjectResult(cargo=config, files=file_results)
