isayahc commited on
Commit
de4004c
1 Parent(s): ab0ba22

set up MVP

Browse files
Files changed (1) hide show
  1. app.py +22 -12
app.py CHANGED
@@ -34,14 +34,22 @@ dotenv.load_dotenv()
34
  text_splitter = CharacterTextSplitter(chunk_size=350, chunk_overlap=0)
35
 
36
  # flan_ul2 = HuggingFaceHub(repo_id="HuggingFaceH4/zephyr-7b-beta", model_kwargs={"temperature":0.1, "max_new_tokens":300})
37
- flan_ul2 = OpenAI()
 
 
 
 
 
 
 
 
38
 
39
  global qa
40
 
41
  # embeddings = HuggingFaceHubEmbeddings()
42
  COHERE_API_KEY = os.getenv("COHERE_API_KEY")
43
  embeddings = CohereEmbeddings(
44
- model="embed-english-light-v3.0",
45
  cohere_api_key=COHERE_API_KEY
46
  )
47
 
@@ -126,24 +134,26 @@ def add_text(history, text):
126
  history = history + [(text, None)]
127
  return history, ""
128
 
129
- # def bot(history):
130
- # response = infer(history[-1][0])
131
- # history[-1][1] = response['result']
132
- # return history
133
-
134
  def bot(history):
135
- response = infer(history[-1][0], history)
136
- sources = [doc.metadata.get("source") for doc in response['source_documents']]
137
- src_list = '\n'.join(sources)
138
- print_this = response['answer'] + "\n\n\n Sources: \n\n\n" + src_list
 
 
 
 
 
 
139
 
140
- def infer(question, history):
141
 
142
  query = question
143
  # result = qa({"query": query, "context":""})
144
  # result = qa({"query": query, })
145
  result = qa({"query": query, "history": history, "question": question})
146
 
 
147
  return result
148
 
149
  css="""
 
34
  text_splitter = CharacterTextSplitter(chunk_size=350, chunk_overlap=0)
35
 
36
  # flan_ul2 = HuggingFaceHub(repo_id="HuggingFaceH4/zephyr-7b-beta", model_kwargs={"temperature":0.1, "max_new_tokens":300})
37
+ # flan_ul2 = OpenAI()
38
+ from langchain.chat_models import ChatOpenAI
39
+
40
+ flan_ul2 = chat = ChatOpenAI(
41
+ model_name='gpt-3.5-turbo-16k',
42
+ # temperature = self.config.llm.temperature,
43
+ # openai_api_key = self.config.llm.openai_api_key,
44
+ # max_tokens=self.config.llm.max_tokens
45
+ )
46
 
47
  global qa
48
 
49
  # embeddings = HuggingFaceHubEmbeddings()
50
  COHERE_API_KEY = os.getenv("COHERE_API_KEY")
51
  embeddings = CohereEmbeddings(
52
+ model="embed-english-v3.0",
53
  cohere_api_key=COHERE_API_KEY
54
  )
55
 
 
134
  history = history + [(text, None)]
135
  return history, ""
136
 
 
 
 
 
 
137
  def bot(history):
138
+ response = infer(history[-1][0],"")
139
+ history[-1][1] = response['answer']
140
+ return history
141
+
142
+ # def bot(history):
143
+ # response = infer(history[-1][0], history)
144
+ # sources = [doc.metadata.get("source") for doc in response['source_documents']]
145
+ # src_list = '\n'.join(sources)
146
+ # print_this = response['answer'] + "\n\n\n Sources: \n\n\n" + src_list
147
+ # return print_this
148
 
149
+ def infer(question, history) -> dict:
150
 
151
  query = question
152
  # result = qa({"query": query, "context":""})
153
  # result = qa({"query": query, })
154
  result = qa({"query": query, "history": history, "question": question})
155
 
156
+ # result = result['answer']
157
  return result
158
 
159
  css="""