valintea commited on
Commit
c243545
1 Parent(s): 678233a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -1,9 +1,9 @@
1
  import gradio as gr
2
  from icevision.all import *
3
  import PIL
4
- class_map = ClassMap(['apple','banana','orange'])
5
- model = models.torchvision.faster_rcnn.model(backbone=models.torchvision.faster_rcnn.backbones.resnet18_fpn(pretrained=True), num_classes=len(class_map))
6
- state_dict = torch.load('fasterRCNNFruits.pth')
7
  model.load_state_dict(state_dict)
8
  size = 384
9
  infer_tfms = tfms.A.Adapter([*tfms.A.resize_and_pad(size),tfms.A.Normalize()])
@@ -16,4 +16,4 @@ def predict(img):
16
  return pred_dict['img']
17
 
18
  # Creamos la interfaz y la lanzamos.
19
- gr.Interface(fn=predict, inputs=["image"], outputs=["image"], examples=['fruits/train/images/mixed_14.jpg','fruits/train/images/mixed_15.jpg']).launch(share=True,debug=True)
 
1
  import gradio as gr
2
  from icevision.all import *
3
  import PIL
4
+ class_map = ClassMap(['ace', 'jack', 'king', 'nine', 'queen', 'ten'])
5
+ model = models.torchvision.faster_rcnn.model(backbone=models.torchvision.faster_rcnn.backbones.resnet50_fpn(pretrained=True), num_classes=len(class_map))
6
+ state_dict = torch.load('modelCards.pth')
7
  model.load_state_dict(state_dict)
8
  size = 384
9
  infer_tfms = tfms.A.Adapter([*tfms.A.resize_and_pad(size),tfms.A.Normalize()])
 
16
  return pred_dict['img']
17
 
18
  # Creamos la interfaz y la lanzamos.
19
+ gr.Interface(fn=predict, inputs=["image"], outputs=["image"], examples=['IMG_2383.JPG','IMG_2677.JPG']).launch(share=True,debug=True)