Update app.py
Browse files
app.py
CHANGED
@@ -289,13 +289,21 @@ def get_text_chunks(documents):
|
|
289 |
|
290 |
# ํ
์คํธ ์ฒญํฌ๋ค๋ก๋ถํฐ ๋ฒกํฐ ์คํ ์ด๋ฅผ ์์ฑํ๋ ํจ์์
๋๋ค.
|
291 |
def get_vectorstore(text_chunks):
|
|
|
|
|
|
|
292 |
# ์ํ๋ ์๋ฒ ๋ฉ ๋ชจ๋ธ์ ๋ก๋ํฉ๋๋ค.
|
293 |
embeddings = HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L12-v2',
|
294 |
model_kwargs={'device': 'cpu'}) # ์๋ฒ ๋ฉ ๋ชจ๋ธ์ ์ค์ ํฉ๋๋ค.
|
|
|
|
|
|
|
|
|
295 |
vectorstore = FAISS.from_documents(text_chunks, embeddings) # FAISS ๋ฒกํฐ ์คํ ์ด๋ฅผ ์์ฑํฉ๋๋ค.
|
296 |
return vectorstore # ์์ฑ๋ ๋ฒกํฐ ์คํ ์ด๋ฅผ ๋ฐํํฉ๋๋ค.
|
297 |
|
298 |
|
|
|
299 |
def get_conversation_chain(vectorstore):
|
300 |
model_name_or_path = 'TheBloke/Llama-2-7B-chat-GGUF'
|
301 |
model_basename = 'llama-2-7b-chat.Q2_K.gguf'
|
|
|
289 |
|
290 |
# ํ
์คํธ ์ฒญํฌ๋ค๋ก๋ถํฐ ๋ฒกํฐ ์คํ ์ด๋ฅผ ์์ฑํ๋ ํจ์์
๋๋ค.
|
291 |
def get_vectorstore(text_chunks):
|
292 |
+
if not text_chunks:
|
293 |
+
return None # ๋น text_chunks๋ฅผ ์ฒ๋ฆฌํ๋ ๋ฐฉ๋ฒ์ ์ถ๊ฐํฉ๋๋ค.
|
294 |
+
|
295 |
# ์ํ๋ ์๋ฒ ๋ฉ ๋ชจ๋ธ์ ๋ก๋ํฉ๋๋ค.
|
296 |
embeddings = HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L12-v2',
|
297 |
model_kwargs={'device': 'cpu'}) # ์๋ฒ ๋ฉ ๋ชจ๋ธ์ ์ค์ ํฉ๋๋ค.
|
298 |
+
|
299 |
+
if not embeddings:
|
300 |
+
return None # embeddings๊ฐ ๋น์ด ์๋ ๊ฒฝ์ฐ ์ฒ๋ฆฌ ๋ฐฉ๋ฒ์ ์ถ๊ฐํฉ๋๋ค.
|
301 |
+
|
302 |
vectorstore = FAISS.from_documents(text_chunks, embeddings) # FAISS ๋ฒกํฐ ์คํ ์ด๋ฅผ ์์ฑํฉ๋๋ค.
|
303 |
return vectorstore # ์์ฑ๋ ๋ฒกํฐ ์คํ ์ด๋ฅผ ๋ฐํํฉ๋๋ค.
|
304 |
|
305 |
|
306 |
+
|
307 |
def get_conversation_chain(vectorstore):
|
308 |
model_name_or_path = 'TheBloke/Llama-2-7B-chat-GGUF'
|
309 |
model_basename = 'llama-2-7b-chat.Q2_K.gguf'
|