Spaces:
Runtime error
ona10g
runtime error
predicted flow dtype = torch.float32 predicted flow shape = torch.Size([2, 520, 960]) Traceback (most recent call last): File "app.py", line 179, in <module> gr.Interface(fn=infer, inputs=[gr.Image(source="upload", type="filepath", label="frame 1"), gr.Image(source="upload", type="filepath", label="frame 2")], outputs=[gr.Image(label="flow image"), gr.Files(label="flow file")], title="RAFT Optical Flow", description=description, examples=examples).launch(debug=True) File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/interface.py", line 484, in __init__ self.render_examples() File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/interface.py", line 785, in render_examples self.examples_handler = Examples( File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/helpers.py", line 69, in create_examples utils.synchronize_async(examples_obj.create) File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/utils.py", line 412, in synchronize_async return fsspec.asyn.sync(fsspec.asyn.get_loop(), func, *args, **kwargs) File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/fsspec/asyn.py", line 100, in sync raise return_result File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/fsspec/asyn.py", line 55, in _runner result[0] = await coro File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/helpers.py", line 273, in create await self.cache() File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/helpers.py", line 314, in cache cache_logger.flag(output) File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/flagging.py", line 221, in flag component.deserialize( File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/serializing.py", line 181, in deserialize raise ValueError( ValueError: A FileSerializable component cannot only deserialize a string or a dict, not a: <class 'list'>
Container logs:
Caching examples at: '/home/user/app/gradio_cached_examples/15'
FRAME 1: tensor([[[ 78, 78, 78, ..., 210, 210, 210],
[ 78, 79, 79, ..., 210, 210, 209],
[ 78, 79, 80, ..., 210, 210, 210],
...,
[ 81, 81, 82, ..., 50, 50, 50],
[ 85, 83, 80, ..., 50, 50, 49],
[ 86, 84, 79, ..., 50, 50, 50]],
[[ 85, 85, 85, ..., 210, 210, 210],
[ 85, 86, 86, ..., 210, 210, 209],
[ 85, 86, 87, ..., 210, 210, 210],
...,
[ 54, 54, 55, ..., 39, 39, 39],
[ 58, 56, 53, ..., 39, 39, 38],
[ 59, 57, 52, ..., 39, 39, 39]],
[[ 93, 93, 93, ..., 208, 208, 208],
[ 93, 94, 94, ..., 208, 208, 207],
[ 93, 94, 95, ..., 208, 208, 208],
...,
[ 45, 45, 46, ..., 35, 35, 35],
[ 49, 47, 42, ..., 35, 35, 34],
[ 50, 48, 43, ..., 35, 35, 35]]], dtype=torch.uint8)
FRAME 1: tensor([[[ 78, 78, 78, ..., 210, 210, 210],
[ 78, 79, 79, ..., 210, 210, 209],
[ 78, 79, 80, ..., 210, 210, 210],
...,
[ 80, 83, 86, ..., 49, 50, 50],
[ 81, 85, 83, ..., 50, 50, 50],
[ 81, 85, 83, ..., 50, 50, 50]],
[[ 85, 85, 85, ..., 210, 210, 210],
[ 85, 86, 86, ..., 210, 210, 209],
[ 85, 86, 87, ..., 210, 210, 210],
...,
[ 53, 56, 59, ..., 38, 39, 39],
[ 54, 58, 56, ..., 39, 39, 39],
[ 54, 58, 56, ..., 39, 39, 39]],
[[ 93, 93, 93, ..., 208, 208, 208],
[ 93, 94, 94, ..., 208, 208, 207],
[ 93, 94, 95, ..., 208, 208, 208],
...,
[ 44, 47, 52, ..., 34, 35, 35],
[ 45, 49, 47, ..., 35, 35, 35],
[ 45, 49, 47, ..., 35, 35, 35]]], dtype=torch.uint8)
FRAME AFTER stack: tensor([[[[ 78, 78, 78, ..., 210, 210, 210],
[ 78, 79, 79, ..., 210, 210, 209],
[ 78, 79, 80, ..., 210, 210, 210],
...,
[ 81, 81, 82, ..., 50, 50, 50],
[ 85, 83, 80, ..., 50, 50, 49],
[ 86, 84, 79, ..., 50, 50, 50]],
[[ 85, 85, 85, ..., 210, 210, 210],
[ 85, 86, 86, ..., 210, 210, 209],
[ 85, 86, 87, ..., 210, 210, 210],
...,
[ 54, 54, 55, ..., 39, 39, 39],
[ 58, 56, 53, ..., 39, 39, 38],
[ 59, 57, 52, ..., 39, 39, 39]],
[[ 93, 93, 93, ..., 208, 208, 208],
[ 93, 94, 94, ..., 208, 208, 207],
[ 93, 94, 95, ..., 208, 208, 208],
...,
[ 45, 45, 46, ..., 35, 35, 35],
[ 49, 47, 42, ..., 35, 35, 34],
[ 50, 48, 43, ..., 35, 35, 35]]]], dtype=torch.uint8)
shape = torch.Size([1, 3, 520, 960]), dtype = torch.float32
Downloading: "https://download.pytorch.org/models/raft_large_C_T_SKHT_V2-ff5fadd5.pth" to /home/user/.cache/torch/hub/checkpoints/raft_large_C_T_SKHT_V2-ff5fadd5.pth
list_of_flows type = <class 'list'>
list_of_flows length = 12 = number of iterations of the model
predicted_flows dtype = torch.float32
predicted_flows shape = torch.Size([1, 2, 520, 960]) = (N, 2, H, W)
predicted_flows min = -6.072243690490723, predicted_flows max = 3.367666006088257
predicted flow dtype = torch.float32
predicted flow shape = torch.Size([2, 520, 960])
Traceback (most recent call last):
File "app.py", line 179, in <module>
gr.Interface(fn=infer, inputs=[gr.Image(source="upload", type="filepath", label="frame 1"), gr.Image(source="upload", type="filepath", label="frame 2")], outputs=[gr.Image(label="flow image"), gr.Files(label="flow file")], title="RAFT Optical Flow", description=description, examples=examples).launch(debug=True)
File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/interface.py", line 484, in __init__
self.render_examples()
File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/interface.py", line 785, in render_examples
self.examples_handler = Examples(
File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/helpers.py", line 69, in create_examples
utils.synchronize_async(examples_obj.create)
File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/utils.py", line 412, in synchronize_async
return fsspec.asyn.sync(fsspec.asyn.get_loop(), func, *args, **kwargs)
File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/fsspec/asyn.py", line 100, in sync
raise return_result
File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/fsspec/asyn.py", line 55, in _runner
result[0] = await coro
File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/helpers.py", line 273, in create
await self.cache()
File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/helpers.py", line 314, in cache
cache_logger.flag(output)
File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/flagging.py", line 221, in flag
component.deserialize(
File "/home/user/.pyenv/versions/3.8.9/lib/python3.8/site-packages/gradio/serializing.py", line 181, in deserialize
raise ValueError(
ValueError: A FileSerializable component cannot only deserialize a string or a dict, not a: <class 'list'>