from llama_index.core.agent import ReActAgent
from llama_index.core.tools import FunctionTool
from llama_index.llms.openllm import OpenLLM
import os
from llama_index.llms.openai import OpenAI


def execute_python_code(code: str) -> str:
    """Executes the given Python code and returns the result."""
    try:
        # Define a restricted environment with allowed built-ins
        allowed_globals = {
            '__builtins__': {
                '__import__': __import__,  # Allow importing modules
                'print': print,  # Allow printing
                'os': os,  # Allow using the os module
                'list': list,  # Allow basic data types
                'str': str,
                'int': int,
                'float': float,
                'lambda': lambda: None,  # Allow lambda functions
            }
        }
        local_vars = {}

        # Execute the code in the restricted environment
        exec(code, allowed_globals, local_vars)

        # Capture the result if any
        result = local_vars.get('result', None)
        return str(result)
    except Exception as e:
        return f"Error executing code: {str(e)}"


# Create tools for the functions
execute_code_tool = FunctionTool.from_defaults(fn=execute_python_code)

# Initialize the LLM
# llm = OpenLLM(
#     model="deepseek-ai/DeepSeek-R1-Distill-Qwen-7B", api_base="https://api.siliconflow.cn/v1",
#     api_key=""
# )
llm = OpenAI(model="gpt-3.5-turbo")

# Create the ReAct agent with the tools
agent = ReActAgent.from_tools([execute_code_tool], llm=llm, verbose=True)

# Example usage
response = agent.chat("我的电脑是macos，请帮我查询/Users/jingxiang/Downloads目录下最大的文件是哪个")
print(str(response))
