Spaces:
Runtime error
Runtime error
import os | |
from fastapi import FastAPI | |
from pydantic import BaseModel | |
from huggingface_hub import InferenceClient | |
from smolagents import BasicAgent | |
from crewai_tools import BaseTool | |
# Use your token from HF secrets | |
HF_TOKEN = os.environ.get("HF_TOKEN") # Must be set in HF Space secrets | |
# Define a wrapper that conforms to SmolAgent’s LLM expectations | |
class HuggingFaceInferenceWrapper: | |
def __init__(self, client: InferenceClient): | |
self.client = client | |
def __call__(self, prompt: str, **kwargs): | |
# Use text_generation endpoint | |
response = self.client.text_generation(prompt=prompt, max_new_tokens=512) | |
return response | |
# Initialize InferenceClient | |
client = InferenceClient( | |
model="Qwen/Qwen1.5-1.8B-Chat", # Or another model you have access to | |
token=HUGGINGFACE_API_KEY | |
) | |
# Wrap the client | |
llm = HuggingFaceInferenceWrapper(client) | |
# Define a dummy tool | |
class DuckDuckGoTool(BaseTool): | |
name: str = "DuckDuckGo Search" | |
description: str = "Use this tool to search for real-time facts and current events." | |
def _run(self, query: str) -> str: | |
return f"Search results for '{query}' (this is a placeholder)." | |
# Create the agent | |
agent = BasicAgent( | |
role="Helpful AI", | |
goal="Answer tough questions using search and reasoning", | |
backstory="An expert at finding information and answering questions.", | |
tools=[DuckDuckGoTool()], | |
llm=llm | |
) | |
# Define FastAPI app | |
app = FastAPI() | |
class QuestionInput(BaseModel): | |
question: str | |
def ask_question(payload: QuestionInput): | |
result = agent.run(payload.question) | |
return {"answer": result} | |