Spaces:
Starting
Starting
File size: 4,104 Bytes
751d628 1bbca12 751d628 1bbca12 751d628 c6951f4 751d628 c6951f4 751d628 c6951f4 751d628 c6951f4 751d628 1bbca12 751d628 c6951f4 751d628 c6951f4 751d628 c6951f4 751d628 c6951f4 751d628 c6951f4 751d628 c6951f4 751d628 c6951f4 751d628 4701375 751d628 c6951f4 751d628 c6951f4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 |
import logging
import os
from langchain_core.tools import StructuredTool
from pydantic import BaseModel, Field
from typing import Optional, List
from serpapi import GoogleSearch
logger = logging.getLogger(__name__)
class SearchInput(BaseModel):
query: str = Field(description="Search query")
async def search_func(query: str) -> List[str]:
"""
Perform a web search using SerpAPI and return relevant snippets.
Args:
query (str): The search query to execute.
Returns:
List[str]: A list of search result snippets.
"""
try:
logger.info(f"Executing SerpAPI search for query: {query}")
params = {
"q": query,
"api_key": os.getenv("SERPAPI_API_KEY"),
"num": 10
}
search = GoogleSearch(params)
results = search.get_dict().get("organic_results", [])
return [result.get("snippet", "") for result in results if "snippet" in result]
except Exception as e:
logger.error(f"SerpAPI search failed for query '{query}': {e}")
return []
search_tool = StructuredTool.from_function(
func=search_func,
name="search_tool",
args_schema=SearchInput,
coroutine=search_func
)
class MultiHopSearchInput(BaseModel):
query: str = Field(description="Multi-hop search query")
steps: int = Field(description="Number of search steps", ge=1, le=3)
llm_client: Optional[object] = Field(description="LLM client", default=None)
llm_type: Optional[str] = Field(description="LLM type", default="together")
llm_model: Optional[str] = Field(description="LLM model", default="meta-llama/Llama-3.3-70B-Instruct-Turbo-Free")
async def multi_hop_search_func(query: str, steps: int, llm_client: Optional[object] = None, llm_type: Optional[str] = "together", llm_model: Optional[str] = "meta-llama/Llama-3.3-70B-Instruct-Turbo-Free") -> List[str]:
"""
Perform a multi-hop web search using SerpAPI with iterative query refinement.
Args:
query (str): The initial multi-hop search query.
steps (int): Number of search steps to perform (1 to 3).
llm_client (Optional[object]): LLM client for query refinement.
llm_type (Optional[str]): Type of LLM (e.g., 'together').
llm_model (Optional[str]): LLM model name.
Returns:
List[str]: A list of search result snippets from all steps.
"""
try:
logger.info(f"Executing multi-hop search for query: {query}, steps: {steps}")
results = []
current_query = query
for step in range(steps):
logger.info(f"Multi-hop step {step + 1}: {current_query}")
step_results = await search_func(current_query)
results.extend(step_results)
if step < steps - 1 and llm_client:
prompt = f"Given the query '{current_query}' and results: {step_results[:3]}, generate a follow-up search query to refine or expand the search."
messages = [
{"role": "system", "content": "Generate a single search query as a string."},
{"role": "user", "content": prompt}
]
if llm_type == "together":
response = llm_client.chat.completions.create(
model=llm_model,
messages=messages,
max_tokens=50,
temperature=0.7
)
current_query = response.choices[0].message.content.strip()
else:
logger.warning("LLM not configured for multi-hop refinement")
break
return results[:5] if results else ["No results found"]
except Exception as e:
logger.error(f"Multi-hop search failed for query '{query}': {e}")
return [f"Error: {str(e)}"]
multi_hop_search_tool = StructuredTool.from_function(
func=multi_hop_search_func,
name="multi_hop_search_tool",
args_schema=MultiHopSearchInput,
coroutine=multi_hop_search_func
) |