|
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate |
|
from langchain_core.messages import HumanMessage, AIMessage |
|
from langchain_groq import ChatGroq |
|
from typing import List |
|
import os |
|
from src.services.prompts import ASSISTANT_PROMPT |
|
from langchain.memory import ConversationSummaryMemory |
|
from dotenv import load_dotenv |
|
load_dotenv() |
|
os.environ["GROQ_API_KEY"]=os.getenv("GROQ_API_KEY") |
|
|
|
class ConversationHandler: |
|
def __init__(self, model_name="llama-3.3-70b-versatile", temperature=0.7): |
|
self.chat_model = ChatGroq( |
|
model_name=model_name, |
|
temperature=temperature |
|
) |
|
self.prompt = ChatPromptTemplate.from_messages([ |
|
("system", ASSISTANT_PROMPT)]) |
|
self.memory=ConversationSummaryMemory( |
|
llm=self.chat_model, |
|
max_token_limit=2000, |
|
return_messages=True, |
|
memory_key="chat_history" |
|
) |
|
|
|
async def give_response(self,user_input): |
|
chain= self.prompt|self.chat_model |
|
memory_variables = self.memory.load_memory_variables({}) |
|
response=await chain.ainvoke( |
|
{ |
|
"user_query": user_input, |
|
"chat_history": memory_variables["chat_history"] |
|
|
|
|
|
} |
|
) |
|
self.memory.save_context( |
|
{"input": user_input}, |
|
{"output": response.content} |
|
) |
|
return response.content |
|
async def summarize_conversation(self) -> str: |
|
memory_variables = self.memory.load_memory_variables({}) |
|
return self.memory.predict_new_summary( |
|
messages=memory_variables["chat_history"], |
|
existing_summary="" |
|
) |
|
|
|
async def clear_memory(self): |
|
await self.memory.clear() |
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
import asyncio |
|
|
|
async def main(): |
|
handler = ConversationHandler() |
|
|
|
|
|
response1 = await handler.give_response("What is machine learning?by the way my name is ravi and my home town is ilam.") |
|
print("Response 1:", response1.content) |
|
|
|
response2 = await handler.give_response("Can you give me an example of machine learning mathematics?") |
|
print("Response 2:", response2.content) |
|
response2 = await handler.give_response("Can you tell me more example? and also tell me from which country i am?") |
|
print("Response 2:", response2.content) |
|
summary = await handler.summarize_conversation() |
|
print("\nConversation Summary:", summary) |
|
asyncio.run(main()) |
|
|