Spaces:
Running
Running
admin
commited on
Commit
·
837bdb5
1
Parent(s):
5ac967b
2 en
Browse files
app.py
CHANGED
@@ -6,15 +6,14 @@ from torchvision.transforms import transforms
|
|
6 |
from modelscope import snapshot_download
|
7 |
|
8 |
MODEL_DIR = snapshot_download("Genius-Society/HEp2", cache_dir="./__pycache__")
|
9 |
-
|
10 |
-
"Centromere"
|
11 |
-
"Golgi"
|
12 |
-
"Homogeneous"
|
13 |
-
"NuMem"
|
14 |
-
"Nucleolar"
|
15 |
-
"Speckled"
|
16 |
-
|
17 |
-
CLASSES = list(TRANSLATE.keys())
|
18 |
|
19 |
|
20 |
def embeding(img_path: str):
|
@@ -34,13 +33,13 @@ def embeding(img_path: str):
|
|
34 |
def infer(target: str):
|
35 |
model = torch.load(f"{MODEL_DIR}/save.pt", map_location=torch.device("cpu"))
|
36 |
if not target:
|
37 |
-
return None, "
|
38 |
|
39 |
torch.cuda.empty_cache()
|
40 |
input: torch.Tensor = embeding(target)
|
41 |
output: torch.Tensor = model(input.unsqueeze(0))
|
42 |
predict = torch.max(output.data, 1)[1]
|
43 |
-
return os.path.basename(target),
|
44 |
|
45 |
|
46 |
if __name__ == "__main__":
|
@@ -51,14 +50,12 @@ if __name__ == "__main__":
|
|
51 |
with gr.Blocks() as demo:
|
52 |
gr.Interface(
|
53 |
fn=infer,
|
54 |
-
inputs=gr.Image(
|
55 |
-
type="filepath", label="上传细胞图像 Upload a cell picture"
|
56 |
-
),
|
57 |
outputs=[
|
58 |
-
gr.Textbox(label="
|
59 |
-
gr.Textbox(label="
|
60 |
],
|
61 |
-
title="
|
62 |
examples=example_imgs,
|
63 |
flagging_mode="never",
|
64 |
cache_examples=False,
|
|
|
6 |
from modelscope import snapshot_download
|
7 |
|
8 |
MODEL_DIR = snapshot_download("Genius-Society/HEp2", cache_dir="./__pycache__")
|
9 |
+
CLASSES = [
|
10 |
+
"Centromere",
|
11 |
+
"Golgi",
|
12 |
+
"Homogeneous",
|
13 |
+
"NuMem",
|
14 |
+
"Nucleolar",
|
15 |
+
"Speckled",
|
16 |
+
]
|
|
|
17 |
|
18 |
|
19 |
def embeding(img_path: str):
|
|
|
33 |
def infer(target: str):
|
34 |
model = torch.load(f"{MODEL_DIR}/save.pt", map_location=torch.device("cpu"))
|
35 |
if not target:
|
36 |
+
return None, "Please upload a cell picture!"
|
37 |
|
38 |
torch.cuda.empty_cache()
|
39 |
input: torch.Tensor = embeding(target)
|
40 |
output: torch.Tensor = model(input.unsqueeze(0))
|
41 |
predict = torch.max(output.data, 1)[1]
|
42 |
+
return os.path.basename(target), CLASSES[predict]
|
43 |
|
44 |
|
45 |
if __name__ == "__main__":
|
|
|
50 |
with gr.Blocks() as demo:
|
51 |
gr.Interface(
|
52 |
fn=infer,
|
53 |
+
inputs=gr.Image(type="filepath", label="Upload a cell picture"),
|
|
|
|
|
54 |
outputs=[
|
55 |
+
gr.Textbox(label="Picture name", show_copy_button=True),
|
56 |
+
gr.Textbox(label="Recognition result", show_copy_button=True),
|
57 |
],
|
58 |
+
title="It is recommended to upload HEp2 cell images in PNG format.",
|
59 |
examples=example_imgs,
|
60 |
flagging_mode="never",
|
61 |
cache_examples=False,
|