alexkueck commited on
Commit
066c598
1 Parent(s): 2e09900

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -21
app.py CHANGED
@@ -390,17 +390,11 @@ def invoke (prompt, file, history, rag_option, model_option, openai_api_key, k=3
390
  ###########################
391
  if (model_option == "OpenAI"):
392
  #Anfrage an OpenAI ----------------------------
393
- if (prompt.find('Bild zeichnen') != -1):
394
  #print("OpenAI zeichnen.......................")
395
  #llm = ChatOpenAI(model_name = MODEL_NAME_OAI_ZEICHNEN, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
396
  data = {"inputs": prompt}
397
  response = requests.post(API_URL, headers=HEADERS, json=data)
398
- result = response.content
399
- image = Image.open(io.BytesIO(result))
400
- history[-1][1] = file #image
401
- print("history......................")
402
- print(history)
403
- return history, "Stop: Success"
404
  else:
405
  print("OpenAI normal.......................")
406
  llm = ChatOpenAI(model_name = MODEL_NAME, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
@@ -429,26 +423,37 @@ def invoke (prompt, file, history, rag_option, model_option, openai_api_key, k=3
429
  result = rag_chain(llm, history_text_und_prompt, db)
430
  else:
431
  print("LLM aufrufen ohne RAG: ...........")
432
- result = llm_chain(llm, history_text_und_prompt)
 
 
 
433
 
434
 
435
  except Exception as e:
436
  raise gr.Error(e)
437
 
438
 
439
- #Antwort als Stream ausgeben... wenn Textantwort gefordert
440
- history[-1][1] = ""
441
- for character in result:
442
- history[-1][1] += character
443
- time.sleep(0.03)
444
- yield history, "Generating"
445
- if shared_state.interrupted:
446
- shared_state.recover()
447
- try:
448
- yield history, "Stop: Success"
449
- return
450
- except:
451
- pass
 
 
 
 
 
 
 
 
452
 
453
  ################################################
454
  #GUI
 
390
  ###########################
391
  if (model_option == "OpenAI"):
392
  #Anfrage an OpenAI ----------------------------
393
+ if (prompt.find('zeichnen') != -1):
394
  #print("OpenAI zeichnen.......................")
395
  #llm = ChatOpenAI(model_name = MODEL_NAME_OAI_ZEICHNEN, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
396
  data = {"inputs": prompt}
397
  response = requests.post(API_URL, headers=HEADERS, json=data)
 
 
 
 
 
 
398
  else:
399
  print("OpenAI normal.......................")
400
  llm = ChatOpenAI(model_name = MODEL_NAME, openai_api_key = openai_api_key, temperature=temperature)#, top_p = top_p)
 
423
  result = rag_chain(llm, history_text_und_prompt, db)
424
  else:
425
  print("LLM aufrufen ohne RAG: ...........")
426
+ if (prompt.find('zeichnen') != -1):
427
+ result = response.content
428
+ else:
429
+ result = llm_chain(llm, history_text_und_prompt)
430
 
431
 
432
  except Exception as e:
433
  raise gr.Error(e)
434
 
435
 
436
+ if (prompt.find('zeichnen') != -1):
437
+ #Bild ausgeben
438
+ image = Image.open(io.BytesIO(result))
439
+ history[-1][1] = file #image
440
+ print("history......................")
441
+ print(history)
442
+ return history, "Stop: Success"
443
+ else:
444
+ #Antwort als Stream ausgeben... wenn Textantwort gefordert
445
+ history[-1][1] = ""
446
+ for character in result:
447
+ history[-1][1] += character
448
+ time.sleep(0.03)
449
+ yield history, "Generating"
450
+ if shared_state.interrupted:
451
+ shared_state.recover()
452
+ try:
453
+ yield history, "Stop: Success"
454
+ return
455
+ except:
456
+ pass
457
 
458
  ################################################
459
  #GUI