cgpt 5
Browse files
app.py
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
import streamlit as st
|
2 |
import os
|
3 |
-
from
|
4 |
-
from
|
5 |
-
from
|
6 |
from langchain.prompts import ChatPromptTemplate, PromptTemplate
|
7 |
from langchain.schema import format_document
|
8 |
from typing import List
|
@@ -12,11 +12,11 @@ import time
|
|
12 |
|
13 |
# Load the embedding function
|
14 |
model_name = "BAAI/bge-base-en"
|
15 |
-
encode_kwargs = {'normalize_embeddings': True}
|
16 |
embedding_function = HuggingFaceBgeEmbeddings(model_name=model_name, encode_kwargs=encode_kwargs)
|
17 |
|
18 |
# Load the LLM
|
19 |
-
llm = Together(model="mistralai/Mixtral-8x22B-Instruct-v0.1", temperature=0.2, max_tokens=19096, top_k=10, together_api_key=os.environ['pilotikval']
|
20 |
|
21 |
msgs = StreamlitChatMessageHistory(key="langchain_messages")
|
22 |
memory = ConversationBufferMemory(chat_memory=msgs)
|
|
|
1 |
import streamlit as st
|
2 |
import os
|
3 |
+
from langchain_community.vectorstores import Chroma
|
4 |
+
from langchain_community.embeddings import HuggingFaceBgeEmbeddings
|
5 |
+
from langchain_together import Together
|
6 |
from langchain.prompts import ChatPromptTemplate, PromptTemplate
|
7 |
from langchain.schema import format_document
|
8 |
from typing import List
|
|
|
12 |
|
13 |
# Load the embedding function
|
14 |
model_name = "BAAI/bge-base-en"
|
15 |
+
encode_kwargs = {'normalize_embeddings': True}
|
16 |
embedding_function = HuggingFaceBgeEmbeddings(model_name=model_name, encode_kwargs=encode_kwargs)
|
17 |
|
18 |
# Load the LLM
|
19 |
+
llm = Together(model="mistralai/Mixtral-8x22B-Instruct-v0.1", temperature=0.2, max_tokens=19096, top_k=10, together_api_key=os.environ['pilotikval'])
|
20 |
|
21 |
msgs = StreamlitChatMessageHistory(key="langchain_messages")
|
22 |
memory = ConversationBufferMemory(chat_memory=msgs)
|