eliujl commited on
Commit
3607afa
1 Parent(s): c933e41

Correct hist_fn

Browse files

Correct hist_fn initialization

Files changed (2) hide show
  1. app.py +1 -0
  2. improved_python_code.py +96 -0
app.py CHANGED
@@ -311,6 +311,7 @@ def main(pinecone_index_name, chroma_collection_name, persist_directory, docsear
311
  usage = 'RAG'
312
  local_llm_path = './models/'
313
  user_llm_path = ''
 
314
  # Get user input of whether to use Pinecone or not
315
  col1, col2, col3 = st.columns([1, 1, 1])
316
  # create the radio buttons and text input fields
 
311
  usage = 'RAG'
312
  local_llm_path = './models/'
313
  user_llm_path = ''
314
+ hist_fn = ''
315
  # Get user input of whether to use Pinecone or not
316
  col1, col2, col3 = st.columns([1, 1, 1])
317
  # create the radio buttons and text input fields
improved_python_code.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ # Import required libraries
3
+ import json
4
+ import os
5
+ import shutil
6
+ import langchain
7
+ import pinecone
8
+ import streamlit as st
9
+ from langchain.chat_models import ChatOpenAI
10
+ from langchain.chains import ConversationalRetrievalChain, LLMChain, load_qa_chain
11
+ from langchain.document_loaders import PyMuPDFLoader, UnstructuredFileLoader, UnstructuredWordDocumentLoader
12
+ from langchain.embeddings import HuggingFaceEmbeddings, OpenAIEmbeddings
13
+ from langchain.memory import ConversationBufferMemory
14
+ from langchain.prompts import PromptTemplate
15
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
16
+ from langchain.vectorstores import Chroma, Pinecone
17
+
18
+ # API keys and model names
19
+ OPENAI_API_KEY = ''
20
+ PINECONE_API_KEY = ''
21
+ PINECONE_API_ENV = ''
22
+ gpt3p5 = 'gpt-3.5-turbo-1106'
23
+ gpt4 = 'gpt-4-1106-preview'
24
+ local_model_tuples = [
25
+ # Local model tuples as in the original code
26
+ ]
27
+ local_model_names = [t[1] for t in local_model_tuples]
28
+ langchain.verbose = False
29
+
30
+ # Initialization function
31
+ @st.cache_data()
32
+ def init():
33
+ pinecone_index_name = ''
34
+ chroma_collection_name = ''
35
+ persist_directory = ''
36
+ docsearch_ready = False
37
+ directory_name = 'tmp_docs'
38
+ return pinecone_index_name, chroma_collection_name, persist_directory, docsearch_ready, directory_name
39
+
40
+ # File saving function
41
+ @st.cache_data()
42
+ def save_file(files, directory_name):
43
+ # Original save_file function logic
44
+
45
+ # File loading function
46
+ def load_files(directory_name):
47
+ # Original load_files function logic
48
+
49
+ # Ingestion function
50
+ @st.cache_resource()
51
+ def ingest(_all_texts, use_pinecone, _embeddings, pinecone_index_name, chroma_collection_name, persist_directory):
52
+ # Original ingest function logic
53
+
54
+ # Retriever setup function
55
+ def setup_retriever(docsearch, k):
56
+ # Original setup_retriever function logic
57
+
58
+ # Docsearch setup function
59
+ def setup_docsearch(use_pinecone, pinecone_index_name, embeddings, chroma_collection_name, persist_directory):
60
+ # Original setup_docsearch function logic
61
+
62
+ # Response generation function
63
+ def get_response(query, chat_history, CRqa):
64
+ # Original get_response function logic
65
+
66
+ # Local LLM usage function
67
+ @st.cache_resource()
68
+ def use_local_llm(r_llm, local_llm_path, temperature):
69
+ # Original use_local_llm function logic
70
+
71
+ # Prompt setup function
72
+ def setup_prompt(r_llm, usage):
73
+ # Original setup_prompt function logic
74
+
75
+ # Embeddings and LLM setup function
76
+ def setup_em_llm(OPENAI_API_KEY, temperature, r_llm, local_llm_path, usage):
77
+ # Original setup_em_llm function logic
78
+
79
+ # Chat history loading function
80
+ def load_chat_history(CHAT_HISTORY_FILENAME):
81
+ # Original load_chat_history function logic
82
+
83
+ # Chat history saving function
84
+ def save_chat_history(chat_history, CHAT_HISTORY_FILENAME):
85
+ # Original save_chat_history function logic
86
+
87
+ # Initialization
88
+ pinecone_index_name, chroma_collection_name, persist_directory, docsearch_ready, directory_name = init()
89
+
90
+ # Main function
91
+ def main(pinecone_index_name, chroma_collection_name, persist_directory, docsearch_ready, directory_name):
92
+ # Original main function logic
93
+
94
+ # Entry point
95
+ if __name__ == '__main__':
96
+ main(pinecone_index_name, chroma_collection_name, persist_directory, docsearch_ready, directory_name)