hank1996 commited on
Commit
a75174f
β€’
1 Parent(s): 7168756

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -195,7 +195,7 @@ def detect(img,model):
195
 
196
  vid_path, vid_writer = None, None
197
  img = torch.zeros((1, 3, imgsz, imgsz), device=device) # init img
198
- _ = model(img.half() if half else img) if device.type != 'cpu' else None # run once
199
  model.eval()
200
 
201
 
@@ -273,4 +273,4 @@ def detect(img,model):
273
  return Image.fromarray(im0[:,:,::-1])
274
 
275
 
276
- gr.Interface(detect,[gr.Image(type="pil"),gr.Dropdown(choices=["yolopv2","yolop"])], gr.Image(type="pil"),title="Yolopv2",examples=[["example.jpeg", "yolopv2"]],description="demo for <a href='https://github.com/CAIC-AD/YOLOPv2' style='text-decoration: underline' target='_blank'>yolopv2</a> πŸš€: Better, Faster, Stronger for Panoptic driving Perception (but maybe not fast on cpu:joy:)").launch()
 
195
 
196
  vid_path, vid_writer = None, None
197
  img = torch.zeros((1, 3, imgsz, imgsz), device=device) # init img
198
+ _ = model(img.half() if half else img) # run once
199
  model.eval()
200
 
201
 
 
273
  return Image.fromarray(im0[:,:,::-1])
274
 
275
 
276
+ gr.Interface(detect,[gr.Image(type="pil"),gr.Dropdown(choices=["yolopv2","yolop"])], gr.Image(type="pil"),title="Yolopv2",examples=[["example.jpeg", "yolopv2"]],description="demo for <a href='https://github.com/CAIC-AD/YOLOPv2' style='text-decoration: underline' target='_blank'>yolopv2</a> πŸš€: Better, Faster, Stronger for Panoptic driving Perception ").launch()