bubuuunel commited on
Commit
f947724
1 Parent(s): 727c780

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -5
app.py CHANGED
@@ -105,19 +105,24 @@ def predict(user_input,company):
105
  )
106
  }
107
  ]
108
- print(prompt)
109
  # Create messages
 
110
  response = client.chat.completions.create(
111
  model=model_name,
112
  messages=prompt,
113
  temperature=0
114
  )
115
 
 
 
 
 
 
116
 
117
  # Get response from the LLM
118
  prediction = response.choices[0].message.content.strip()
119
- print (prediction)
120
-
121
  # While the prediction is made, log both the inputs and outputs to a local log file
122
  # While writing to the log file, ensure that the commit scheduler is locked to avoid parallel
123
  # access
@@ -142,7 +147,7 @@ company_input = gr.Radio(
142
  label = 'company'
143
  )
144
 
145
- model_output = gr.Textbox (label = 'Response')
146
 
147
  # Add text box and radio button to the interface
148
  # The radio button is used to select the company 10k report in which the context needs to be retrieved.
@@ -155,7 +160,7 @@ model_output = gr.Textbox (label = 'Response')
155
  demo = gr.Interface(
156
  fn=predict,
157
  inputs=[user_input,company_input],
158
- outputs=model_output,
159
  title="RAG on 10k-reports",
160
  description="This API allows you to query on annaul reports",
161
  concurrency_limit=16
 
105
  )
106
  }
107
  ]
108
+
109
  # Create messages
110
+ try:
111
  response = client.chat.completions.create(
112
  model=model_name,
113
  messages=prompt,
114
  temperature=0
115
  )
116
 
117
+ prediction = response.choices[0].message.content.strip()
118
+ except Exception as e:
119
+ prediction = f'Sorry, I encountered the following error: \n {e}'
120
+
121
+
122
 
123
  # Get response from the LLM
124
  prediction = response.choices[0].message.content.strip()
125
+
 
126
  # While the prediction is made, log both the inputs and outputs to a local log file
127
  # While writing to the log file, ensure that the commit scheduler is locked to avoid parallel
128
  # access
 
147
  label = 'company'
148
  )
149
 
150
+ model_output = gr.Label (label = 'Response')
151
 
152
  # Add text box and radio button to the interface
153
  # The radio button is used to select the company 10k report in which the context needs to be retrieved.
 
160
  demo = gr.Interface(
161
  fn=predict,
162
  inputs=[user_input,company_input],
163
+ outputs=prediction,
164
  title="RAG on 10k-reports",
165
  description="This API allows you to query on annaul reports",
166
  concurrency_limit=16