thepolymerguy commited on
Commit
2230a35
1 Parent(s): a53224c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -1
app.py CHANGED
@@ -192,7 +192,7 @@ def classifier(userin, SearchType):
192
  return md_class
193
 
194
  def generateresponse(history, temp, top_p, tokens):
195
-
196
  global model
197
  global tokenizer
198
 
@@ -218,6 +218,12 @@ def generateresponse(history, temp, top_p, tokens):
218
  outputs = str(outputs).split('### Response')[1]
219
 
220
  response = f"Response{outputs}"
 
 
 
 
 
 
221
  return response
222
 
223
  def run_model(userin, dropd, temp, top_p, tokens):
@@ -247,6 +253,12 @@ def run_model(userin, dropd, temp, top_p, tokens):
247
  outputs = outputs.split('\n \n \n \n*')[0]
248
 
249
  response = f"Response{outputs}"
 
 
 
 
 
 
250
  return response
251
 
252
  def prosecute(application, priorart, temp, top_p, tokens):
@@ -280,6 +292,12 @@ def prosecute(application, priorart, temp, top_p, tokens):
280
  outputs = outputs.split('\n \n \n \n*')[0]
281
 
282
  response = f"Response{outputs}"
 
 
 
 
 
 
283
  return response
284
 
285
  def ideator(userin, temp, top_p, tokens):
@@ -310,6 +328,12 @@ def ideator(userin, temp, top_p, tokens):
310
 
311
 
312
  response = f"Response{outputs}"
 
 
 
 
 
 
313
  return response
314
 
315
  def Chat(userin, temp, top_p, tokens):
@@ -339,6 +363,12 @@ def Chat(userin, temp, top_p, tokens):
339
  outputs = outputs.split('\n \n \n \n*')[0]
340
 
341
  response = f"Response{outputs}"
 
 
 
 
 
 
342
  return response
343
 
344
  def claim_selector(userin, dropd):
 
192
  return md_class
193
 
194
  def generateresponse(history, temp, top_p, tokens):
195
+
196
  global model
197
  global tokenizer
198
 
 
218
  outputs = str(outputs).split('### Response')[1]
219
 
220
  response = f"Response{outputs}"
221
+
222
+ f = open("Model_bert/log.log", "a")
223
+ count = 1
224
+ f.write(f'{count}\n')
225
+ f.close()
226
+
227
  return response
228
 
229
  def run_model(userin, dropd, temp, top_p, tokens):
 
253
  outputs = outputs.split('\n \n \n \n*')[0]
254
 
255
  response = f"Response{outputs}"
256
+
257
+ f = open("Model_bert/log.log", "a")
258
+ count = 1
259
+ f.write(f'{count}\n')
260
+ f.close()
261
+
262
  return response
263
 
264
  def prosecute(application, priorart, temp, top_p, tokens):
 
292
  outputs = outputs.split('\n \n \n \n*')[0]
293
 
294
  response = f"Response{outputs}"
295
+
296
+ f = open("Model_bert/log.log", "a")
297
+ count = 1
298
+ f.write(f'{count}\n')
299
+ f.close()
300
+
301
  return response
302
 
303
  def ideator(userin, temp, top_p, tokens):
 
328
 
329
 
330
  response = f"Response{outputs}"
331
+
332
+ f = open("Model_bert/log.log", "a")
333
+ count = 1
334
+ f.write(f'{count}\n')
335
+ f.close()
336
+
337
  return response
338
 
339
  def Chat(userin, temp, top_p, tokens):
 
363
  outputs = outputs.split('\n \n \n \n*')[0]
364
 
365
  response = f"Response{outputs}"
366
+
367
+ f = open("Model_bert/log.log", "a")
368
+ count = 1
369
+ f.write(f'{count}\n')
370
+ f.close()
371
+
372
  return response
373
 
374
  def claim_selector(userin, dropd):