anilbhatt1 commited on
Commit
38c5a71
1 Parent(s): d73ecb2

Initial commit with app.py

Browse files
Files changed (2) hide show
  1. app.py +75 -0
  2. requirements.txt +22 -0
app.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import cv2
3
+ import numpy as np
4
+ from PIL import Image
5
+ import gradio as gr
6
+ import json
7
+ import matplotlib.pyplot as plt
8
+ import subprocess
9
+
10
+ repo_url = "https://github.com/CASIA-IVA-Lab/FastSAM.git"
11
+ target_directory = "./FastSAM"
12
+ subprocess.run(['git', 'clone', repo_url, target_directory])
13
+ os.chdir('./FastSAM')
14
+
15
+ from fastsam import FastSAM, FastSAMPrompt
16
+ import ast
17
+ import torch
18
+ from PIL import Image
19
+ from utils.tools import convert_box_xywh_to_xyxy
20
+
21
+ def gradio_fn(pil_input_img):
22
+ # load model
23
+ model = FastSAM('./weights/FastSAM.pt')
24
+ args_point_prompt = ast.literal_eval("[[0,0]]")
25
+ args_box_prompt = convert_box_xywh_to_xyxy(ast.literal_eval("[[0,0,0,0]]"))
26
+ args_point_label = ast.literal_eval("[0]")
27
+ args_text_prompt = None
28
+ input = pil_input_img
29
+ input = input.convert("RGB")
30
+ everything_results = model(
31
+ input,
32
+ device="cpu",
33
+ retina_masks=True,
34
+ imgsz=1024,
35
+ conf=0.4,
36
+ iou=0.9
37
+ )
38
+ bboxes = None
39
+ points = None
40
+ point_label = None
41
+ prompt_process = FastSAMPrompt(input, everything_results, device="cpu")
42
+ if args_box_prompt[0][2] != 0 and args_box_prompt[0][3] != 0:
43
+ ann = prompt_process.box_prompt(bboxes=args_box_prompt)
44
+ bboxes = args_box_prompt
45
+ elif args_text_prompt != None:
46
+ ann = prompt_process.text_prompt(text=args_text_prompt)
47
+ elif args_point_prompt[0] != [0, 0]:
48
+ ann = prompt_process.point_prompt(
49
+ points=args_point_prompt, pointlabel=args_point_label
50
+ )
51
+ points = args_point_prompt
52
+ point_label = args_point_label
53
+ else:
54
+ ann = prompt_process.everything_prompt()
55
+ prompt_process.plot(
56
+ annotations=ann,
57
+ output_path="./output.jpg",
58
+ bboxes = bboxes,
59
+ points = points,
60
+ point_label = point_label,
61
+ withContours=False,
62
+ better_quality=False,
63
+ )
64
+ pil_image_output = Image.open('./output.jpg')
65
+ np_img_array = np.array(pil_image_output)
66
+ return np_img_array
67
+
68
+ demo = gr.Interface(fn=gradio_fn,
69
+ inputs=gr.Image(type="pil"),
70
+ outputs="image",
71
+ title="FAST-SAM Segment Everything",
72
+ description="- **FastSAM** model that returns segmented RGB image of given input image. \
73
+ - **Credits** : \
74
+ - https://huggingface.co/An-619 \
75
+ - https://github.com/CASIA-IVA-Lab/FastSAM")
requirements.txt ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Base-----------------------------------
2
+ matplotlib>=3.2.2
3
+ opencv-python>=4.6.0
4
+ Pillow>=7.1.2
5
+ PyYAML>=5.3.1
6
+ requests>=2.23.0
7
+ scipy>=1.4.1
8
+ torch>=1.7.0
9
+ torchvision>=0.8.1
10
+ tqdm>=4.64.0
11
+
12
+ pandas>=1.1.4
13
+ seaborn>=0.11.0
14
+
15
+ gradio==3.35.2
16
+
17
+ # Ultralytics-----------------------------------
18
+ ultralytics == 8.0.120
19
+
20
+ # clip----
21
+ git+https://github.com/openai/CLIP.git
22
+