A-PolarBear katielink commited on
Commit
edcffc5
0 Parent(s):

Duplicate from katielink/brain_tumor_segmentation

Browse files

Co-authored-by: Katie Link <katielink@users.noreply.huggingface.co>

.gitattributes ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ftz filter=lfs diff=lfs merge=lfs -text
6
+ *.gz filter=lfs diff=lfs merge=lfs -text
7
+ *.h5 filter=lfs diff=lfs merge=lfs -text
8
+ *.joblib filter=lfs diff=lfs merge=lfs -text
9
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
10
+ *.model filter=lfs diff=lfs merge=lfs -text
11
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
12
+ *.npy filter=lfs diff=lfs merge=lfs -text
13
+ *.npz filter=lfs diff=lfs merge=lfs -text
14
+ *.onnx filter=lfs diff=lfs merge=lfs -text
15
+ *.ot filter=lfs diff=lfs merge=lfs -text
16
+ *.parquet filter=lfs diff=lfs merge=lfs -text
17
+ *.pickle filter=lfs diff=lfs merge=lfs -text
18
+ *.pkl filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pt filter=lfs diff=lfs merge=lfs -text
21
+ *.pth filter=lfs diff=lfs merge=lfs -text
22
+ *.rar filter=lfs diff=lfs merge=lfs -text
23
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
24
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
25
+ *.tflite filter=lfs diff=lfs merge=lfs -text
26
+ *.tgz filter=lfs diff=lfs merge=lfs -text
27
+ *.wasm filter=lfs diff=lfs merge=lfs -text
28
+ *.xz filter=lfs diff=lfs merge=lfs -text
29
+ *.zip filter=lfs diff=lfs merge=lfs -text
30
+ *.zstandard filter=lfs diff=lfs merge=lfs -text
31
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Brain Tumor Segmentation
3
+ emoji: 🧠
4
+ colorFrom: indigo
5
+ colorTo: red
6
+ sdk: gradio
7
+ sdk_version: 3.1.1
8
+ app_file: app.py
9
+ pinned: false
10
+ license: other
11
+ tags:
12
+ - monai
13
+ duplicated_from: katielink/brain_tumor_segmentation
14
+ ---
15
+
16
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app.py ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ import torch
4
+ from monai import bundle
5
+ from monai.transforms import (
6
+ Compose,
7
+ LoadImaged,
8
+ EnsureChannelFirstd,
9
+ Orientationd,
10
+ NormalizeIntensityd,
11
+ Activationsd,
12
+ AsDiscreted,
13
+ ScaleIntensityd,
14
+ )
15
+
16
+ # Define the bundle name and path for downloading
17
+ BUNDLE_NAME = 'spleen_ct_segmentation_v0.1.0'
18
+ BUNDLE_PATH = os.path.join(torch.hub.get_dir(), 'bundle', BUNDLE_NAME)
19
+
20
+ # Title and description
21
+ title = '<h1 style="text-align: center;">Segment Brain Tumors with MONAI! 🧠 </h1>'
22
+ description = """
23
+ ## 🚀 To run
24
+
25
+ Upload a brain MRI image file, or try out one of the examples below!
26
+ If you want to see a different slice, update the slider.
27
+
28
+ More details on the model can be found [here!](https://huggingface.co/katielink/brats_mri_segmentation_v0.1.0)
29
+
30
+ ## ⚠️ Disclaimer
31
+
32
+ This is an example, not to be used for diagnostic purposes.
33
+ """
34
+
35
+ references = """
36
+ ## 👀 References
37
+
38
+ 1. Myronenko, Andriy. "3D MRI brain tumor segmentation using autoencoder regularization." International MICCAI Brainlesion Workshop. Springer, Cham, 2018. https://arxiv.org/abs/1810.11654.
39
+ 2. Menze BH, et al. "The Multimodal Brain Tumor Image Segmentation Benchmark (BRATS)", IEEE Transactions on Medical Imaging 34(10), 1993-2024 (2015) DOI: 10.1109/TMI.2014.2377694
40
+ 3. Bakas S, et al. "Advancing The Cancer Genome Atlas glioma MRI collections with expert segmentation labels and radiomic features", Nature Scientific Data, 4:170117 (2017) DOI:10.1038/sdata.2017.117
41
+ """
42
+
43
+ examples = [
44
+ ['examples/BRATS_485.nii.gz', 65],
45
+ ['examples/BRATS_486.nii.gz', 80]
46
+ ]
47
+
48
+ # Load the MONAI pretrained model from Hugging Face Hub
49
+ model, _, _ = bundle.load(
50
+ name = BUNDLE_NAME,
51
+ source = 'huggingface_hub',
52
+ repo = 'katielink/brats_mri_segmentation_v0.1.0',
53
+ load_ts_module=True,
54
+ )
55
+
56
+ # Use GPU if available
57
+ device = "cuda:0" if torch.cuda.is_available() else "cpu"
58
+
59
+ # Load the parser from the MONAI bundle's inference config
60
+ parser = bundle.load_bundle_config(BUNDLE_PATH, 'inference.json')
61
+
62
+ # Compose the preprocessing transforms
63
+ preproc_transforms = Compose(
64
+ [
65
+ LoadImaged(keys=["image"]),
66
+ EnsureChannelFirstd(keys="image"),
67
+ Orientationd(keys=["image"], axcodes="RAS"),
68
+ NormalizeIntensityd(keys="image", nonzero=True, channel_wise=True),
69
+ ]
70
+ )
71
+
72
+ # Get the inferer from the bundle's inference config
73
+ inferer = parser.get_parsed_content(
74
+ 'inferer',
75
+ lazy=True, eval_expr=True, instantiate=True
76
+ )
77
+
78
+ # Compose the postprocessing transforms
79
+ post_transforms = Compose(
80
+ [
81
+ Activationsd(keys='pred', sigmoid=True),
82
+ AsDiscreted(keys='pred', threshold=0.5),
83
+ ScaleIntensityd(keys='image', minv=0., maxv=1.)
84
+ ]
85
+ )
86
+
87
+
88
+ # Define the predict function for the demo
89
+ def predict(input_file, z_axis, model=model, device=device):
90
+ # Load and process data in MONAI format
91
+ data = {'image': [input_file.name]}
92
+ data = preproc_transforms(data)
93
+
94
+ # Run inference and post-process predicted labels
95
+ model.to(device)
96
+ model.eval()
97
+ with torch.no_grad():
98
+ inputs = data['image'].to(device)
99
+ data['pred'] = inferer(inputs=inputs[None,...], network=model)
100
+ data = post_transforms(data)
101
+
102
+ # Convert tensors back to numpy arrays
103
+ data['image'] = data['image'].numpy()
104
+ data['pred'] = data['pred'].cpu().detach().numpy()
105
+
106
+ # Magnetic resonance imaging sequences
107
+ t1c = data['image'][0, :, :, z_axis] # T1-weighted, post contrast
108
+ t1 = data['image'][1, :, :, z_axis] # T1-weighted, pre contrast
109
+ t2 = data['image'][2, :, :, z_axis] # T2-weighted
110
+ flair = data['image'][3, :, :, z_axis] # FLAIR
111
+
112
+ # BraTS labels
113
+ tc = data['pred'][0, 0, :, :, z_axis] # Tumor core
114
+ wt = data['pred'][0, 1, :, :, z_axis] # Whole tumor
115
+ et = data['pred'][0, 2, :, :, z_axis] # Enhancing tumor
116
+
117
+ return [t1c, t1, t2, flair], [tc, wt, et]
118
+
119
+
120
+ # Use blocks to set up a more complex demo
121
+ with gr.Blocks() as demo:
122
+
123
+ # Show title and description
124
+ gr.Markdown(title)
125
+ gr.Markdown(description)
126
+
127
+ with gr.Row():
128
+ # Get the input file and slice slider as inputs
129
+ input_file = gr.File(label='input file')
130
+ z_axis = gr.Slider(0, 200, label='slice', value=50)
131
+
132
+ with gr.Row():
133
+ # Show the button with custom label
134
+ button = gr.Button("Segment Tumor!")
135
+
136
+ with gr.Row():
137
+ with gr.Column():
138
+ # Show the input image with different MR sequences
139
+ input_image = gr.Gallery(label='input MRI sequences (T1+, T1, T2, FLAIR)')
140
+
141
+ with gr.Column():
142
+ # Show the segmentation labels
143
+ output_segmentation = gr.Gallery(label='output segmentations (TC, WT, ET)')
144
+
145
+
146
+ # Run prediction on button click
147
+ button.click(
148
+ predict,
149
+ inputs=[input_file, z_axis],
150
+ outputs=[input_image, output_segmentation]
151
+ )
152
+
153
+ # Have some example for the user to try out
154
+ examples = gr.Examples(
155
+ examples=examples,
156
+ inputs=[input_file, z_axis],
157
+ outputs=[input_image, output_segmentation],
158
+ fn=predict,
159
+ cache_examples=False
160
+ )
161
+
162
+ # Show references at the bottom of the demo
163
+ gr.Markdown(references)
164
+
165
+
166
+ # Launch the demo
167
+ demo.launch()
examples/BRATS_485.nii.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1de6be1eeb49c788baa286a21d71546b2974bc300d5bc6ce4541e41854a0fefb
3
+ size 8327084
examples/BRATS_486.nii.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8957d67a50b39afd8210f3ca51a20c77ef1c92642800f91b50f16b27778f2b2
3
+ size 11111216
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ git+https://github.com/katielink/MONAI.git@4042-download-hf-hub-bundle
2
+ nibabel