app1 / model.py
Sandaruth
update llm
ffa8147
raw
history blame
No virus
3.24 kB
import os
from dotenv import load_dotenv
# Load environment variables from .env file
load_dotenv()
# Access the value of OPENAI_API_KEY
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
os.environ["OPENAI_API_KEY"] = OPENAI_API_KEY
from langchain_openai import ChatOpenAI
llm_OpenAi = ChatOpenAI(model="gpt-3.5-turbo", temperature=0,)
from langchain.chat_models import ChatAnyscale
ANYSCALE_ENDPOINT_TOKEN=os.environ.get("ANYSCALE_ENDPOINT_TOKEN")
anyscale_api_key =ANYSCALE_ENDPOINT_TOKEN
llm=ChatAnyscale(anyscale_api_key=anyscale_api_key,temperature=0, model_name='mistralai/Mistral-7B-Instruct-v0.1', streaming=False)
## Create embeddings and splitter
from langchain.embeddings import HuggingFaceBgeEmbeddings
from langchain.text_splitter import RecursiveCharacterTextSplitter
# Create Embeddings
model_name = "BAAI/bge-large-en"
embedding = HuggingFaceBgeEmbeddings(
model_name = model_name,
# model_kwargs = {'device':'cuda'},
encode_kwargs = {'normalize_embeddings': True}
)
# Create Splitter
splitter = RecursiveCharacterTextSplitter(
chunk_size=1000,
chunk_overlap=100,
)
from langchain_community.vectorstores import FAISS
persits_directory="./faiss_Test02_500_C_BGE_large"
# persits_directory="./faiss_V03_C500_BGE_large-final"
vectorstore= FAISS.load_local(persits_directory, embedding)
# Define a custom prompt for Unser manual
from langchain.prompts import PromptTemplate
qa_template = ("""
You are the AI assistant of the IronOne Technologies which provide services for companies members and novice users with learning with ATrad Aplication .
You have provided context information below related to learning material.
Context: {context}
Given this information, please answer the question with the latest information.
If you dont know the answer say you dont know, dont try to makeup answers.
if context is not enough to answer the question, ask for more information.
if context is not related to the question, say I dont know.
give the answer with very clear structure and clear language.
each answer Must start with code word ATrad Ai(QA):
Question: {question}
answer: let me think about it...""")
qa_template2 = ("""
Welcome to IronOne Technologies' AI Assistant, designed to assist you in learning with the ATrad Application.
Context: {context}
As your AI assistant, I'm here to help you navigate through learning materials and provide guidance.
Please provide me with any questions or concerns you have regarding the ATrad Application.
If you're unsure about something or need more information, feel free to ask.
each answer Must start with code word ATrad Ai(QA):
Question: {question}
ATrad Ai(QA): Let me think about it...""")
QA_PROMPT = PromptTemplate(input_variables=["context", "question"],template=qa_template,)
# Chain for Web
from langchain.chains import RetrievalQA
Web_qa = RetrievalQA.from_chain_type(
llm=llm_OpenAi,
chain_type="stuff",
retriever = vectorstore.as_retriever(search_kwargs={"k": 4}),
return_source_documents= True,
input_key="question",
chain_type_kwargs={"prompt": QA_PROMPT},
)