baohuynhbk14 commited on
Commit
e967d75
·
1 Parent(s): b6367f6

Add debug prints for model, tokenizer, and question in http_bot function

Browse files
Files changed (2) hide show
  1. app.py +3 -1
  2. logs/gradio_web_server.log +22 -0
app.py CHANGED
@@ -167,7 +167,6 @@ def add_text(state, message, system_prompt, request: gr.Request):
167
  ) * 5
168
 
169
  model_name = "5CD-AI/Vintern-1B-v3_5"
170
- model = None
171
  model = AutoModel.from_pretrained(
172
  model_name,
173
  torch_dtype=torch.bfloat16,
@@ -238,6 +237,9 @@ def http_bot(
238
  question = '<image>\n'+message
239
  else:
240
  question = message
 
 
 
241
  response, conv_history = model.chat(tokenizer, pixel_values, question, generation_config, history=None, return_history=True)
242
  print(f"AI response: {response}")
243
 
 
167
  ) * 5
168
 
169
  model_name = "5CD-AI/Vintern-1B-v3_5"
 
170
  model = AutoModel.from_pretrained(
171
  model_name,
172
  torch_dtype=torch.bfloat16,
 
237
  question = '<image>\n'+message
238
  else:
239
  question = message
240
+ print("Model: ", model)
241
+ print("Tokenizer: ", tokenizer)
242
+ print("Question: ", question)
243
  response, conv_history = model.chat(tokenizer, pixel_values, question, generation_config, history=None, return_history=True)
244
  print(f"AI response: {response}")
245
 
logs/gradio_web_server.log CHANGED
@@ -2221,3 +2221,25 @@ This is a test response▌
2221
  2025-01-15 08:03:45 | INFO | stdout | Messsage: [{'role': 'user', 'content': 'Please help me analyze this picture.', 'image': [<PIL.Image.Image image mode=RGB size=583x734 at 0x127712B50>]}, {'role': 'assistant', 'content': 'This is a test response▌', 'image': []}]
2222
  2025-01-15 08:03:45 | INFO | stdout | Updated message: [{'role': 'user', 'content': 'Please help me analyze this picture.', 'image': [<PIL.Image.Image image mode=RGB size=583x734 at 0x127712B50>]}, {'role': 'assistant', 'content': 'This is a test response', 'image': []}]
2223
  2025-01-15 08:03:45 | INFO | gradio_web_server | This is a test response
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2221
  2025-01-15 08:03:45 | INFO | stdout | Messsage: [{'role': 'user', 'content': 'Please help me analyze this picture.', 'image': [<PIL.Image.Image image mode=RGB size=583x734 at 0x127712B50>]}, {'role': 'assistant', 'content': 'This is a test response▌', 'image': []}]
2222
  2025-01-15 08:03:45 | INFO | stdout | Updated message: [{'role': 'user', 'content': 'Please help me analyze this picture.', 'image': [<PIL.Image.Image image mode=RGB size=583x734 at 0x127712B50>]}, {'role': 'assistant', 'content': 'This is a test response', 'image': []}]
2223
  2025-01-15 08:03:45 | INFO | gradio_web_server | This is a test response
2224
+ 2025-01-15 08:21:20 | INFO | stdout | Keyboard interruption in main thread... closing server.
2225
+ 2025-01-15 08:21:21 | ERROR | stderr | Traceback (most recent call last):
2226
+ 2025-01-15 08:21:21 | ERROR | stderr | File "/Users/huynhbao/.virtualenvs/huggingface/lib/python3.9/site-packages/gradio/blocks.py", line 2664, in block_thread
2227
+ 2025-01-15 08:21:21 | ERROR | stderr | time.sleep(0.1)
2228
+ 2025-01-15 08:21:21 | ERROR | stderr | KeyboardInterrupt
2229
+ 2025-01-15 08:21:21 | ERROR | stderr |
2230
+ 2025-01-15 08:21:21 | ERROR | stderr | During handling of the above exception, another exception occurred:
2231
+ 2025-01-15 08:21:21 | ERROR | stderr |
2232
+ 2025-01-15 08:21:21 | ERROR | stderr | Traceback (most recent call last):
2233
+ 2025-01-15 08:21:21 | ERROR | stderr | File "/Users/huynhbao/Workspace/5CD_data/huggingface/Vintern-1B/app.py", line 610, in <module>
2234
+ 2025-01-15 08:21:21 | ERROR | stderr | logger.info(args)
2235
+ 2025-01-15 08:21:21 | ERROR | stderr | File "/Users/huynhbao/.virtualenvs/huggingface/lib/python3.9/site-packages/gradio/blocks.py", line 2569, in launch
2236
+ 2025-01-15 08:21:21 | ERROR | stderr | self.block_thread()
2237
+ 2025-01-15 08:21:21 | ERROR | stderr | File "/Users/huynhbao/.virtualenvs/huggingface/lib/python3.9/site-packages/gradio/blocks.py", line 2668, in block_thread
2238
+ 2025-01-15 08:21:21 | ERROR | stderr | self.server.close()
2239
+ 2025-01-15 08:21:21 | ERROR | stderr | File "/Users/huynhbao/.virtualenvs/huggingface/lib/python3.9/site-packages/gradio/http_server.py", line 68, in close
2240
+ 2025-01-15 08:21:21 | ERROR | stderr | self.thread.join(timeout=5)
2241
+ 2025-01-15 08:21:21 | ERROR | stderr | File "/usr/local/Cellar/python@3.9/3.9.16/Frameworks/Python.framework/Versions/3.9/lib/python3.9/threading.py", line 1064, in join
2242
+ 2025-01-15 08:21:21 | ERROR | stderr | self._wait_for_tstate_lock(timeout=max(timeout, 0))
2243
+ 2025-01-15 08:21:21 | ERROR | stderr | File "/usr/local/Cellar/python@3.9/3.9.16/Frameworks/Python.framework/Versions/3.9/lib/python3.9/threading.py", line 1080, in _wait_for_tstate_lock
2244
+ 2025-01-15 08:21:21 | ERROR | stderr | if lock.acquire(block, timeout):
2245
+ 2025-01-15 08:21:21 | ERROR | stderr | KeyboardInterrupt