File size: 4,645 Bytes
9142765
 
 
 
 
 
 
 
 
8a52178
b20f69e
9142765
 
 
 
 
 
 
 
 
 
 
 
 
 
a63f8bd
9142765
 
 
 
 
 
 
 
 
 
 
 
a63f8bd
9142765
a63f8bd
9142765
 
 
7290c34
 
 
 
 
fe07d7a
2b6a599
 
 
 
 
7290c34
9142765
 
08557a7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
277f4a7
2434d18
 
 
 
 
9142765
 
 
 
2434d18
9142765
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
from langchain_community.chat_models import ChatOllama
from langgraph.graph import MessagesState, StateGraph, START, END
from langchain_core.messages import SystemMessage, HumanMessage
from langchain_community.tools import DuckDuckGoSearchRun
from langchain_core.tools import tool
from langgraph.prebuilt import ToolNode
from langchain_community.document_loaders import WikipediaLoader
from langgraph.prebuilt import tools_condition
from langchain_huggingface import HuggingFaceEndpoint
from langchain_huggingface import ChatHuggingFace
from langchain.llms import HuggingFaceHub
import os
from huggingface_hub import login
from dotenv import load_dotenv
load_dotenv()  
os.environ["HUGGINGFACEHUB_API_TOKEN"] = os.getenv("HF_TOKEN")

@tool
def use_search_tool(query: str) -> str:
    """Use the search tool to find information.
    
    Args: query (str): The search query.
    Returns: str: The search result.
    """
    search_result = DuckDuckGoSearchRun(verbose=0).run(query)
    return {"messages": search_result}

@tool
def use_wikipedia_tool(query: str) -> str:
    """Fetch a summary from Wikipedia.
    
    Args:
        query (str): The topic to search on Wikipedia.
    Returns:
        str: A summary of the topic from Wikipedia.
    """
    result = WikipediaLoader(query=query, load_max_docs=2).load()
    if result:
        return {"messages": result}
    else:
        return {"messages" : f"Sorry, I couldn't find any information on '{query}' in Wikipedia."}

def build_agent():
    # llm = ChatOllama(model="llama3.1")
    # llm = HuggingFaceEndpoint(
    #     endpoint_url="https://api-inference.huggingface.co/models/deepseek-ai/DeepSeek-Prover-V2-671B",
    #     huggingfacehub_api_token=os.getenv("HUGGINGFACEHUB_API_TOKEN")
    # )
    
    llm = HuggingFaceHub(repo_id="openai-community/gpt2-medium", task="text-generation",
                    model_kwargs={
        "temperature": 0.7,         # Controls randomness (0.0 = deterministic)
        "max_new_tokens": 100       # Optional: max output length
    }, 
    verbose=True)
    
    tools = [use_wikipedia_tool, use_search_tool]

    # system_template = (
    #     "You are a helpful, friendly, and respectful AI assistant. "
    #     "Always address the user politely and answer their questions in a positive manner.\n"
    #     "When reasoning, always use the following format:\n"
    #     "Thought: [your reasoning here]\n"
    #     "Action: [the action to take, should be one of [{tool_names}]]\n"
    #     "Action Input: [the input to the action]\n"
    #     "If you know the answer without using a tool, respond with:\n"
    #     "Thought: [your reasoning here]\n"
    #     "Final Answer: [your answer here]\n"
    #     "Always ensure your responses are polite, accurate, and helpful."
    # )
    # system_prompt = SystemMessage(content=system_template.format(
    #     tool_names=", ".join([tool.name for tool in tools])
    # ))

    system_template = (
      "You are a helpful assistant tasked with answering questions using a set of tools. "
"""Now, I will ask you a question. Report your thoughts, and finish your answer with the following template: 
FINAL ANSWER: [YOUR FINAL ANSWER]. 
YOUR FINAL ANSWER should be a number OR as few words as possible OR a comma separated list of numbers and/or strings. If you are asked for a number, don't use comma to write your number neither use units such as $ or percent sign unless specified otherwise. If you are asked for a string, don't use articles, neither abbreviations (e.g. for cities), and write the digits in plain text unless specified otherwise. If you are asked for a comma separated list, apply the above rules depending of whether the element to be put in the list is a number or a string.
Your answer should only start with "FINAL ANSWER: ", then follows with the answer. """
    )

    def call_model(state: MessagesState):
        """Call the LLM with the given state."""
        messages = [SystemMessage(content=system_template)] + state["messages"]
        response = llm.invoke(messages)
        return {"messages" : response} 
    
    workflow = StateGraph(MessagesState)
    workflow.add_node("Assistent", call_model)
    workflow.add_node("tools", ToolNode(tools))
    workflow.add_edge(START, "Assistent")
    workflow.add_conditional_edges("Assistent", tools_condition)
    workflow.add_edge("tools", "Assistent")
    workflow.add_edge("Assistent", END)
    return workflow.compile()

if __name__ == "__main__":
    

    graph = build_agent()
    input =  HumanMessage(content="Hello, how are you?")
    response = graph.invoke(input)

    print(response)