Anne31415 commited on
Commit
5c780cd
·
1 Parent(s): 0c94ada

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -11
app.py CHANGED
@@ -10,9 +10,22 @@ from PIL import Image
10
  import io
11
  import matplotlib as plt
12
 
 
 
13
  api_key = os.getenv("OPENAI_API_KEY")
14
  assistant_id = os.getenv("assistant_id")
15
 
 
 
 
 
 
 
 
 
 
 
 
16
  client = openai.Client()
17
 
18
  MODEL = "gpt-4-1106-preview"
@@ -59,7 +72,6 @@ elif hasattr(st.session_state.run, 'status') and st.session_state.run.status ==
59
  thread_id=st.session_state.thread.id
60
  )
61
 
62
-
63
  # Display messages
64
  for message in reversed(st.session_state.messages.data):
65
  if message.role in ["user", "assistant"]:
@@ -68,16 +80,11 @@ elif hasattr(st.session_state.run, 'status') and st.session_state.run.status ==
68
  if content_part.type == 'text': # For text responses
69
  message_text = content_part.text.value
70
  st.markdown(message_text)
71
- elif content_part.type == 'image_url': # For image URLs
72
- image_url = content_part.image_url # Adjust based on actual response structure
73
- st.image(image_url)
74
- elif content_part.type == 'base64_image': # For base64-encoded images
75
- base64_data = content_part.base64_image # Adjust based on actual response structure
76
- image_data = base64.b64decode(base64_data)
77
- image = Image.open(io.BytesIO(image_data))
78
- st.image(image)
79
-
80
-
81
 
82
 
83
  if prompt := st.chat_input("Wie kann ich dir helfen?"):
 
10
  import io
11
  import matplotlib as plt
12
 
13
+ from PIL import Image
14
+
15
  api_key = os.getenv("OPENAI_API_KEY")
16
  assistant_id = os.getenv("assistant_id")
17
 
18
+ def handle_and_display_image(file_id):
19
+ api_response = client.files.with_raw_response.retrieve_content(file_id)
20
+ if api_response.status_code == 200:
21
+ content = api_response.content
22
+ with open('image.png', 'wb') as f:
23
+ f.write(content)
24
+ return Image.open('image.png')
25
+ else:
26
+ print('Failed to download file.')
27
+ return None
28
+
29
  client = openai.Client()
30
 
31
  MODEL = "gpt-4-1106-preview"
 
72
  thread_id=st.session_state.thread.id
73
  )
74
 
 
75
  # Display messages
76
  for message in reversed(st.session_state.messages.data):
77
  if message.role in ["user", "assistant"]:
 
80
  if content_part.type == 'text': # For text responses
81
  message_text = content_part.text.value
82
  st.markdown(message_text)
83
+ # New code to handle image files
84
+ elif hasattr(content_part, 'image_file') and content_part.image_file:
85
+ image = handle_and_display_image(content_part.image_file.file_id)
86
+ if image:
87
+ st.image(image, caption="Generated Image")
 
 
 
 
 
88
 
89
 
90
  if prompt := st.chat_input("Wie kann ich dir helfen?"):