Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,3 +1,4 @@
|
|
|
|
1 |
import streamlit as st
|
2 |
from dotenv import load_dotenv
|
3 |
from PyPDF2 import PdfReader
|
@@ -11,7 +12,7 @@ from langchain.chat_models.gigachat import GigaChat
|
|
11 |
from htmlTemplates import css, bot_template, user_template
|
12 |
from langchain.llms import HuggingFaceHub, LlamaCpp
|
13 |
from huggingface_hub import snapshot_download, hf_hub_download
|
14 |
-
|
15 |
|
16 |
credentials = os.getenv("GIGACHAT_CREDENTIALS")
|
17 |
|
@@ -56,20 +57,20 @@ def get_vectorstore(text_chunks):
|
|
56 |
|
57 |
def get_conversation_chain(vectorstore, model_name):
|
58 |
|
59 |
-
#llm = LlamaCpp(model_path=model_name,
|
60 |
-
#
|
61 |
-
#
|
62 |
-
#
|
63 |
-
#
|
64 |
-
#
|
65 |
-
#
|
66 |
-
#
|
67 |
# )
|
68 |
|
69 |
-
#llm = ChatOpenAI()
|
70 |
|
71 |
llm = GigaChat(credentials=credentials,
|
72 |
-
verify_ssl_certs=False
|
73 |
|
74 |
memory = ConversationBufferMemory(memory_key='chat_history',
|
75 |
input_key='question',
|
|
|
1 |
+
import os
|
2 |
import streamlit as st
|
3 |
from dotenv import load_dotenv
|
4 |
from PyPDF2 import PdfReader
|
|
|
12 |
from htmlTemplates import css, bot_template, user_template
|
13 |
from langchain.llms import HuggingFaceHub, LlamaCpp
|
14 |
from huggingface_hub import snapshot_download, hf_hub_download
|
15 |
+
|
16 |
|
17 |
credentials = os.getenv("GIGACHAT_CREDENTIALS")
|
18 |
|
|
|
57 |
|
58 |
def get_conversation_chain(vectorstore, model_name):
|
59 |
|
60 |
+
# llm = LlamaCpp(model_path=model_name,
|
61 |
+
# temperature=0.1,
|
62 |
+
# top_k=30,
|
63 |
+
# top_p=0.9,
|
64 |
+
# streaming=True,
|
65 |
+
# n_ctx=2048,
|
66 |
+
# n_parts=1,
|
67 |
+
# echo=True
|
68 |
# )
|
69 |
|
70 |
+
# llm = ChatOpenAI()
|
71 |
|
72 |
llm = GigaChat(credentials=credentials,
|
73 |
+
verify_ssl_certs=False)
|
74 |
|
75 |
memory = ConversationBufferMemory(memory_key='chat_history',
|
76 |
input_key='question',
|