alexkueck commited on
Commit
5648df4
1 Parent(s): d5281cb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -3
app.py CHANGED
@@ -361,8 +361,27 @@ def umwandeln_fuer_anzeige(image):
361
  buffer = io.BytesIO()
362
  image.save(buffer, format='PNG')
363
  return buffer.getvalue()
364
-
365
- def invoke (prompt, file, history, rag_option, model_option, openai_api_key, k=3, top_p=0.6, temperature=0.5, max_new_tokens=4048, max_context_length_tokens=2048, repetition_penalty=1.3,):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
366
  global splittet
367
  print(splittet)
368
 
@@ -558,7 +577,7 @@ with gr.Blocks(css=customCSS, theme=small_and_beautiful_theme) as demo:
558
 
559
  #Argumente für generate Funktion als Input
560
  predict_args = dict(
561
- fn=invoke,
562
  inputs=[
563
  user_question,
564
  upload,
 
361
  buffer = io.BytesIO()
362
  image.save(buffer, format='PNG')
363
  return buffer.getvalue()
364
+
365
+ def generate_auswahl(prompt, file, history, rag_option, model_option, openai_api_key, k=3, top_p=0.6, temperature=0.5, max_new_tokens=4048, max_context_length_tokens=2048, repetition_penalty=1.3,):
366
+ if (prompt.find('zeichnen') != -1):
367
+ generate_bild(prompt, file, history, rag_option, model_option, openai_api_key, k=3, top_p=0.6, temperature=0.5, max_new_tokens=4048, max_context_length_tokens=2048, repetition_penalty=1.3,):
368
+ else:
369
+ generate_text(prompt, file, history, rag_option, model_option, openai_api_key, k=3, top_p=0.6, temperature=0.5, max_new_tokens=4048, max_context_length_tokens=2048, repetition_penalty=1.3,):
370
+
371
+ def generate_bild(prompt, history,):
372
+ data = {"inputs": prompt}
373
+ response = requests.post(API_URL, headers=HEADERS, json=data)
374
+ print("fertig Bild")
375
+ result = response.content
376
+ #Bild ausgeben
377
+ image = Image.open(io.BytesIO(result))
378
+
379
+ history[-1][1] = "<img src='data:image/png;base64,{0}'/>".format(b64encode(umwandeln_fuer_anzeige(image)).decode('utf-8'))
380
+ print("history zeichnen......................")
381
+ print(history)
382
+ return history, "Fertig: Success"
383
+
384
+ def generate_text (prompt, file, history, rag_option, model_option, openai_api_key, k=3, top_p=0.6, temperature=0.5, max_new_tokens=4048, max_context_length_tokens=2048, repetition_penalty=1.3,):
385
  global splittet
386
  print(splittet)
387
 
 
577
 
578
  #Argumente für generate Funktion als Input
579
  predict_args = dict(
580
+ fn=generate_auswahl,
581
  inputs=[
582
  user_question,
583
  upload,