mmrech commited on
Commit
b674bdd
Β·
verified Β·
1 Parent(s): 94edc4e

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +453 -31
app.py CHANGED
@@ -1,16 +1,18 @@
1
  """
2
  Image Processing Studio + NPH Neuroimaging Analysis
3
- Unified Gradio app with filters, ML models, and NPH-specific segmentation.
 
4
  """
5
 
6
  import gradio as gr
7
  import numpy as np
8
- from PIL import Image, ImageFilter, ImageEnhance, ImageOps
9
  from transformers import pipeline
10
  import cv2
11
- import json
12
  import tempfile
13
  import os
 
 
14
 
15
  from segment_neuroimaging import (
16
  segment_nph, segment_ventricles, compute_evans_index,
@@ -22,21 +24,120 @@ from segment_neuroimaging import (
22
  CSFAppearance, COLORS
23
  )
24
 
 
 
 
25
  # ---- Load ML models (cached on first use) ----
26
  classifier = pipeline("image-classification", model="google/vit-base-patch16-224")
27
  detector = pipeline("object-detection", model="facebook/detr-resnet-50")
28
  segmenter = pipeline("image-segmentation", model="facebook/detr-resnet-50-panoptic")
29
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
 
31
  # ===========================================================================
32
- # Tab 1: NPH Neuroimaging Analysis (Enhanced)
33
  # ===========================================================================
34
 
35
  def analyze_nph(image, modality, sensitivity, overlay_alpha, pixel_spacing_str):
36
  if image is None:
37
  raise gr.Error("Please upload a brain MRI or CT image first.")
38
 
39
- # Save temp file for the pipeline
40
  with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as f:
41
  Image.fromarray(image).save(f.name)
42
  temp_path = f.name
@@ -55,7 +156,6 @@ def analyze_nph(image, modality, sensitivity, overlay_alpha, pixel_spacing_str):
55
  mod = Modality[mod_key]
56
  is_coronal = "Coronal" in modality
57
 
58
- # Parse pixel spacing
59
  pixel_spacing = None
60
  if pixel_spacing_str and pixel_spacing_str.strip():
61
  try:
@@ -63,13 +163,11 @@ def analyze_nph(image, modality, sensitivity, overlay_alpha, pixel_spacing_str):
63
  except ValueError:
64
  pass
65
 
66
- # Load and preprocess
67
  img_rgb, gray, _ = preprocess_image(temp_path)
68
  h, w = gray.shape[:2]
69
  blurred = cv2.GaussianBlur(gray, (5, 5), 0)
70
  roi_mask = create_roi_mask(blurred, threshold=30)
71
 
72
- # Sensitivity-adjusted thresholds
73
  orig_thresh = dict(VENTRICLE_THRESHOLDS[mod])
74
  sens_adj = (sensitivity - 50) / 50.0
75
 
@@ -81,33 +179,26 @@ def analyze_nph(image, modality, sensitivity, overlay_alpha, pixel_spacing_str):
81
  custom_thresholds["csf_low"] = max(100, min(220,
82
  int(orig_thresh["csf_low"] - sens_adj * 30)))
83
 
84
- # Segment ventricles
85
  vent_mask = segment_ventricles(gray, mod, roi_mask, custom_thresholds=custom_thresholds)
86
 
87
- # Default pixel spacing estimate for non-DICOM
88
  if pixel_spacing is None:
89
  pixel_spacing = round(180.0 / max(w, 256), 2)
90
 
91
- # Compute all biomarkers
92
  ei_data = compute_evans_index(vent_mask, image_width=w, pixel_spacing_mm=pixel_spacing)
93
  th_data = compute_temporal_horn_width(vent_mask, pixel_spacing)
94
  tv_data = compute_third_ventricle_width(vent_mask, pixel_spacing)
95
  desh_data = assess_desh(vent_mask, gray, roi_mask, mod, pixel_spacing)
96
 
97
- # PVH (FLAIR only)
98
  pvh_data = None
99
  if mod == Modality.FLAIR:
100
  pvh_data = score_pvh(gray, vent_mask)
101
 
102
- # Callosal angle
103
  ca_data = compute_callosal_angle(vent_mask) if is_coronal else {}
104
 
105
- # Ventricle stats
106
  vent_area = int((vent_mask > 0).sum())
107
  brain_area = int((roi_mask > 0).sum())
108
  vent_brain_ratio = round(vent_area / brain_area, 4) if brain_area > 0 else 0
109
 
110
- # Build display masks
111
  display_masks = {"ventricles": vent_mask}
112
  parenchyma = cv2.bitwise_and(roi_mask, cv2.bitwise_not(vent_mask))
113
  display_masks["parenchyma"] = parenchyma
@@ -119,7 +210,6 @@ def analyze_nph(image, modality, sensitivity, overlay_alpha, pixel_spacing_str):
119
  if "convexity_mask" in desh_data:
120
  display_masks["high_convexity_sulci"] = desh_data["convexity_mask"]
121
 
122
- # Visualization
123
  overlay = create_overlay(img_rgb, display_masks, alpha=overlay_alpha)
124
 
125
  biomarkers_for_annotation = dict(ei_data)
@@ -136,7 +226,6 @@ def analyze_nph(image, modality, sensitivity, overlay_alpha, pixel_spacing_str):
136
  biomarkers_for_annotation,
137
  )
138
 
139
- # Draw Evans' index measurement line
140
  row = ei_data.get("measurement_row", 0)
141
  if row > 0:
142
  cols = np.where(vent_mask[row, :] > 0)[0]
@@ -151,7 +240,6 @@ def analyze_nph(image, modality, sensitivity, overlay_alpha, pixel_spacing_str):
151
 
152
  comparison = create_comparison(img_rgb, annotated, f"{modality} -- NPH Analysis")
153
 
154
- # Build report
155
  report_lines = ["## NPH Biomarker Report\n"]
156
 
157
  ei = ei_data.get("evans_index", 0)
@@ -207,7 +295,241 @@ def analyze_nph(image, modality, sensitivity, overlay_alpha, pixel_spacing_str):
207
 
208
 
209
  # ===========================================================================
210
- # Tab 2: Filters & Effects
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
211
  # ===========================================================================
212
 
213
  def apply_filter(image, effect, intensity):
@@ -252,10 +574,6 @@ def apply_filter(image, effect, intensity):
252
  return np.array(filtered)
253
 
254
 
255
- # ===========================================================================
256
- # Tab 3-5: ML Models
257
- # ===========================================================================
258
-
259
  def classify_image(image):
260
  if image is None:
261
  raise gr.Error("Please upload an image first.")
@@ -299,11 +617,12 @@ css = """
299
  with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
300
  gr.Markdown("# Image Processing Studio", elem_classes="main-title")
301
  gr.Markdown(
302
- "Filters, classification, object detection, panoptic segmentation, and **NPH neuroimaging analysis** -- all in one place.",
 
303
  elem_classes="subtitle"
304
  )
305
 
306
- # ── NPH Analysis Tab ──
307
  with gr.Tab("NPH Analysis"):
308
  gr.Markdown(
309
  "### Normal Pressure Hydrocephalus -- Segmentation & Biomarkers\n"
@@ -368,7 +687,110 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
368
  "For clinical use, provide pixel spacing from the DICOM header.*"
369
  )
370
 
371
- # ── Client-Side NPH Detector Tab ──
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
372
  with gr.Tab("NPH Detector (Browser)"):
373
  gr.Markdown(
374
  "### Client-Side NPH Detector\n"
@@ -383,7 +805,7 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
383
  'style="border-radius: 12px; border: 1px solid #333;"></iframe>',
384
  )
385
 
386
- # ── Video Demo Tab ──
387
  with gr.Tab("Video Demo"):
388
  gr.Markdown(
389
  "### Whole-Brain Segmentation Demo\n"
@@ -395,7 +817,7 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
395
  autoplay=False,
396
  )
397
 
398
- # ── Filters Tab ──
399
  with gr.Tab("Filters & Effects"):
400
  with gr.Row():
401
  with gr.Column():
@@ -411,7 +833,7 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
411
  filter_output = gr.Image(label="Result", type="numpy")
412
  filter_btn.click(fn=apply_filter, inputs=[filter_input, filter_effect, filter_intensity], outputs=filter_output)
413
 
414
- # ── Classification Tab ──
415
  with gr.Tab("Image Classification"):
416
  with gr.Row():
417
  with gr.Column():
@@ -421,7 +843,7 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
421
  cls_output = gr.Label(label="Predictions", num_top_classes=5)
422
  cls_btn.click(fn=classify_image, inputs=cls_input, outputs=cls_output)
423
 
424
- # ── Object Detection Tab ──
425
  with gr.Tab("Object Detection"):
426
  with gr.Row():
427
  with gr.Column():
@@ -432,7 +854,7 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
432
  det_output = gr.AnnotatedImage(label="Detections")
433
  det_btn.click(fn=detect_objects, inputs=[det_input, det_threshold], outputs=det_output)
434
 
435
- # ── Segmentation Tab ──
436
  with gr.Tab("Segmentation"):
437
  with gr.Row():
438
  with gr.Column():
 
1
  """
2
  Image Processing Studio + NPH Neuroimaging Analysis
3
+ Unified Gradio app with filters, ML models, YOLO NPH detection, clinical scoring,
4
+ and intensity-based NPH segmentation.
5
  """
6
 
7
  import gradio as gr
8
  import numpy as np
9
+ from PIL import Image, ImageFilter, ImageEnhance, ImageOps, ImageDraw, ImageFont
10
  from transformers import pipeline
11
  import cv2
 
12
  import tempfile
13
  import os
14
+ import threading
15
+ import logging
16
 
17
  from segment_neuroimaging import (
18
  segment_nph, segment_ventricles, compute_evans_index,
 
24
  CSFAppearance, COLORS
25
  )
26
 
27
+ logging.basicConfig(level=logging.INFO)
28
+ logger = logging.getLogger(__name__)
29
+
30
  # ---- Load ML models (cached on first use) ----
31
  classifier = pipeline("image-classification", model="google/vit-base-patch16-224")
32
  detector = pipeline("object-detection", model="facebook/detr-resnet-50")
33
  segmenter = pipeline("image-segmentation", model="facebook/detr-resnet-50-panoptic")
34
 
35
+ # ---- YOLO model for NPH structure detection ----
36
+ YOLO_MODEL_PATH = "best.pt"
37
+ _yolo_model = None
38
+ _yolo_lock = threading.Lock()
39
+
40
+ # Class-specific colors for YOLO detections (BGR for OpenCV, RGB for display)
41
+ YOLO_COLORS = {
42
+ "ventricle": (0, 150, 255), # bright blue
43
+ "sylvian_fissure": (200, 100, 255), # purple
44
+ "tight_convexity": (255, 150, 100), # orange
45
+ "pvh": (255, 200, 0), # yellow
46
+ "skull_inner": (200, 200, 200), # gray
47
+ }
48
+
49
+
50
+ def _get_yolo_model():
51
+ """Lazy-load YOLOv8 model."""
52
+ global _yolo_model
53
+ if _yolo_model is None:
54
+ with _yolo_lock:
55
+ if _yolo_model is None and os.path.exists(YOLO_MODEL_PATH):
56
+ try:
57
+ from ultralytics import YOLO
58
+ _yolo_model = YOLO(YOLO_MODEL_PATH)
59
+ logger.info("YOLO model loaded from %s", YOLO_MODEL_PATH)
60
+ except Exception as e:
61
+ logger.error("Failed to load YOLO model: %s", e)
62
+ return _yolo_model
63
+
64
+
65
+ def _compute_nph_score(data: dict) -> dict:
66
+ """
67
+ Compute NPH probability score from structured metrics.
68
+ Weighted formula: VSR(40%) + Evans Index(25%) + Callosal Angle(20%) + DESH(10%) + Sylvian(5%)
69
+ With triad bonus (+15%) and cortical atrophy penalty (-30%).
70
+ """
71
+ score = 0.0
72
+ evans = data.get("evansIndex") or 0.0
73
+ callosal = data.get("callosalAngle")
74
+ desh = data.get("deshScore") or 0
75
+ sylvian = bool(data.get("sylvianDilation"))
76
+ vsr = data.get("vsr")
77
+ triad = data.get("triad") or []
78
+ atrophy = data.get("corticalAtrophy") or "unknown"
79
+
80
+ has_vsr = vsr is not None
81
+ has_callosal = callosal is not None
82
+
83
+ if has_vsr:
84
+ if vsr > 2.0:
85
+ score += 40 * min((vsr - 2.0) / 2.0, 1)
86
+ if evans > 0.3:
87
+ score += 25 * min((evans - 0.3) / 0.15, 1)
88
+ if has_callosal and callosal < 90:
89
+ score += 20 * min((90 - callosal) / 50, 1)
90
+ score += (desh / 3) * 10
91
+ if sylvian:
92
+ score += 5
93
+ else:
94
+ # Redistribute VSR weight across remaining criteria
95
+ scale = 100 / 60
96
+ if evans > 0.3:
97
+ score += 25 * scale * min((evans - 0.3) / 0.15, 1)
98
+ if has_callosal and callosal < 90:
99
+ score += 20 * scale * min((90 - callosal) / 50, 1)
100
+ score += (desh / 3) * 10 * scale
101
+ if sylvian:
102
+ score += 5 * scale
103
+
104
+ triad_count = sum(1 for v in triad if v)
105
+ if triad_count == 3:
106
+ score = min(score * 1.15, 100)
107
+ elif triad_count == 2:
108
+ score = min(score * 1.05, 100)
109
+
110
+ if atrophy == "significant":
111
+ score *= 0.7
112
+ elif atrophy == "moderate":
113
+ score *= 0.85
114
+
115
+ score = int(round(min(score, 100)))
116
+
117
+ if score >= 75:
118
+ label = "Probable NPH"
119
+ recommendation = "Strongly consider CSF tap test and neurosurgical referral for VP shunt evaluation."
120
+ elif score >= 50:
121
+ label = "Possible NPH"
122
+ recommendation = "CSF tap test recommended. Consider supplementary MRI for DESH confirmation."
123
+ elif score >= 30:
124
+ label = "Low Suspicion"
125
+ recommendation = "NPH less likely. Consider alternative diagnoses. Follow-up imaging in 6 months if clinical concern persists."
126
+ else:
127
+ label = "Unlikely NPH"
128
+ recommendation = "Ventriculomegaly likely ex-vacuo or other etiology. Investigate alternative causes of symptoms."
129
+
130
+ return {"score": score, "label": label, "recommendation": recommendation}
131
+
132
 
133
  # ===========================================================================
134
+ # Tab 1: NPH Neuroimaging Analysis (Intensity-based Segmentation)
135
  # ===========================================================================
136
 
137
  def analyze_nph(image, modality, sensitivity, overlay_alpha, pixel_spacing_str):
138
  if image is None:
139
  raise gr.Error("Please upload a brain MRI or CT image first.")
140
 
 
141
  with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as f:
142
  Image.fromarray(image).save(f.name)
143
  temp_path = f.name
 
156
  mod = Modality[mod_key]
157
  is_coronal = "Coronal" in modality
158
 
 
159
  pixel_spacing = None
160
  if pixel_spacing_str and pixel_spacing_str.strip():
161
  try:
 
163
  except ValueError:
164
  pass
165
 
 
166
  img_rgb, gray, _ = preprocess_image(temp_path)
167
  h, w = gray.shape[:2]
168
  blurred = cv2.GaussianBlur(gray, (5, 5), 0)
169
  roi_mask = create_roi_mask(blurred, threshold=30)
170
 
 
171
  orig_thresh = dict(VENTRICLE_THRESHOLDS[mod])
172
  sens_adj = (sensitivity - 50) / 50.0
173
 
 
179
  custom_thresholds["csf_low"] = max(100, min(220,
180
  int(orig_thresh["csf_low"] - sens_adj * 30)))
181
 
 
182
  vent_mask = segment_ventricles(gray, mod, roi_mask, custom_thresholds=custom_thresholds)
183
 
 
184
  if pixel_spacing is None:
185
  pixel_spacing = round(180.0 / max(w, 256), 2)
186
 
 
187
  ei_data = compute_evans_index(vent_mask, image_width=w, pixel_spacing_mm=pixel_spacing)
188
  th_data = compute_temporal_horn_width(vent_mask, pixel_spacing)
189
  tv_data = compute_third_ventricle_width(vent_mask, pixel_spacing)
190
  desh_data = assess_desh(vent_mask, gray, roi_mask, mod, pixel_spacing)
191
 
 
192
  pvh_data = None
193
  if mod == Modality.FLAIR:
194
  pvh_data = score_pvh(gray, vent_mask)
195
 
 
196
  ca_data = compute_callosal_angle(vent_mask) if is_coronal else {}
197
 
 
198
  vent_area = int((vent_mask > 0).sum())
199
  brain_area = int((roi_mask > 0).sum())
200
  vent_brain_ratio = round(vent_area / brain_area, 4) if brain_area > 0 else 0
201
 
 
202
  display_masks = {"ventricles": vent_mask}
203
  parenchyma = cv2.bitwise_and(roi_mask, cv2.bitwise_not(vent_mask))
204
  display_masks["parenchyma"] = parenchyma
 
210
  if "convexity_mask" in desh_data:
211
  display_masks["high_convexity_sulci"] = desh_data["convexity_mask"]
212
 
 
213
  overlay = create_overlay(img_rgb, display_masks, alpha=overlay_alpha)
214
 
215
  biomarkers_for_annotation = dict(ei_data)
 
226
  biomarkers_for_annotation,
227
  )
228
 
 
229
  row = ei_data.get("measurement_row", 0)
230
  if row > 0:
231
  cols = np.where(vent_mask[row, :] > 0)[0]
 
240
 
241
  comparison = create_comparison(img_rgb, annotated, f"{modality} -- NPH Analysis")
242
 
 
243
  report_lines = ["## NPH Biomarker Report\n"]
244
 
245
  ei = ei_data.get("evans_index", 0)
 
295
 
296
 
297
  # ===========================================================================
298
+ # Tab 2: YOLO NPH Detection
299
+ # ===========================================================================
300
+
301
+ def yolo_detect_nph(image, conf_threshold):
302
+ """Run YOLO model on a brain scan to detect NPH structures."""
303
+ if image is None:
304
+ raise gr.Error("Please upload a brain CT or MRI image first.")
305
+
306
+ model = _get_yolo_model()
307
+ if model is None:
308
+ raise gr.Error(
309
+ "YOLO model (best.pt) not available. "
310
+ "Make sure the model file is in the Space repository."
311
+ )
312
+
313
+ with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as f:
314
+ Image.fromarray(image).save(f.name)
315
+ temp_path = f.name
316
+
317
+ try:
318
+ results = model(temp_path, verbose=False)[0]
319
+
320
+ h, w = image.shape[:2]
321
+ annotated_img = image.copy()
322
+ boxes_data = []
323
+
324
+ for box in results.boxes:
325
+ conf = float(box.conf[0])
326
+ if conf < conf_threshold:
327
+ continue
328
+ x1, y1, x2, y2 = [int(round(v)) for v in box.xyxy[0].tolist()]
329
+ cls_id = int(box.cls[0])
330
+ cls_name = model.names.get(cls_id, str(cls_id))
331
+ color = YOLO_COLORS.get(cls_name, (255, 255, 255))
332
+
333
+ boxes_data.append({
334
+ "class": cls_name,
335
+ "x1": x1, "y1": y1, "x2": x2, "y2": y2,
336
+ "confidence": round(conf, 4),
337
+ })
338
+
339
+ # Draw bounding box
340
+ cv2.rectangle(annotated_img, (x1, y1), (x2, y2), color, 2)
341
+
342
+ # Label background
343
+ label = f"{cls_name} {conf:.0%}"
344
+ (lw, lh), _ = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, 0.5, 1)
345
+ cv2.rectangle(annotated_img, (x1, y1 - lh - 8), (x1 + lw + 4, y1), color, -1)
346
+ cv2.putText(annotated_img, label, (x1 + 2, y1 - 4),
347
+ cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 1, cv2.LINE_AA)
348
+
349
+ # Compute metrics from detected boxes
350
+ metrics = _derive_yolo_metrics(boxes_data, w, h)
351
+
352
+ # Build Gradio annotations for the AnnotatedImage output
353
+ annotations = []
354
+ for b in boxes_data:
355
+ annotations.append((
356
+ (b["x1"], b["y1"], b["x2"], b["y2"]),
357
+ f"{b['class']} ({b['confidence']:.0%})"
358
+ ))
359
+
360
+ # Build report
361
+ report_lines = ["## YOLO NPH Detection Report\n"]
362
+ report_lines.append(f"**Detections:** {len(boxes_data)} structures found at {conf_threshold:.0%} confidence\n")
363
+
364
+ for b in boxes_data:
365
+ bw = b["x2"] - b["x1"]
366
+ bh = b["y2"] - b["y1"]
367
+ report_lines.append(f"- **{b['class']}**: {b['confidence']:.1%} confidence, {bw}x{bh} px at ({b['x1']},{b['y1']})")
368
+
369
+ report_lines.append(f"\n### Derived Metrics")
370
+ ei = metrics.get("evans_index", 0)
371
+ ei_status = "ABNORMAL (>0.3)" if ei > 0.3 else "Normal"
372
+ report_lines.append(f"**Evans' Index:** {ei:.3f} -- {ei_status}")
373
+ report_lines.append(f"**DESH Score:** {metrics.get('desh_score', 0)}/3")
374
+ report_lines.append(f"**Sylvian Dilation:** {'Yes' if metrics.get('sylvian_dilation') else 'No'}")
375
+ report_lines.append(f"**PVH Detected:** {'Yes' if metrics.get('periventricular_changes') else 'No'}")
376
+
377
+ prob = metrics.get("nph_probability", 0)
378
+ report_lines.append(f"**NPH Probability:** {prob:.0%}")
379
+
380
+ # Auto-compute NPH score from YOLO metrics
381
+ score_input = {
382
+ "evansIndex": metrics["evans_index"],
383
+ "callosalAngle": metrics.get("callosal_angle"),
384
+ "deshScore": metrics.get("desh_score", 0),
385
+ "sylvianDilation": metrics.get("sylvian_dilation", False),
386
+ "vsr": metrics.get("vsr"),
387
+ "triad": [],
388
+ "corticalAtrophy": metrics.get("cortical_atrophy", "unknown"),
389
+ }
390
+ nph_score = _compute_nph_score(score_input)
391
+
392
+ report_lines.append(f"\n### Clinical NPH Score: **{nph_score['score']}/100** -- {nph_score['label']}")
393
+ report_lines.append(f"*{nph_score['recommendation']}*")
394
+
395
+ report = "\n".join(report_lines)
396
+
397
+ return (image, annotations), annotated_img, report
398
+
399
+ finally:
400
+ os.unlink(temp_path)
401
+
402
+
403
+ def _derive_yolo_metrics(boxes, image_width, image_height):
404
+ """Compute NPH metrics from YOLO detection boxes."""
405
+ ventricle = next((b for b in boxes if b["class"] == "ventricle"), None)
406
+ skull = next((b for b in boxes if b["class"] == "skull_inner"), None)
407
+
408
+ if ventricle and skull:
409
+ vent_w = ventricle["x2"] - ventricle["x1"]
410
+ skull_w = skull["x2"] - skull["x1"]
411
+ evans_index = round(vent_w / skull_w, 4) if skull_w > 0 else 0.0
412
+ elif ventricle:
413
+ vent_w = ventricle["x2"] - ventricle["x1"]
414
+ evans_index = round(vent_w / image_width, 4) if image_width > 0 else 0.0
415
+ else:
416
+ evans_index = 0.0
417
+
418
+ desh_classes = {"tight_convexity", "sylvian_fissure", "pvh"}
419
+ detected_desh = {b["class"] for b in boxes if b["class"] in desh_classes}
420
+ desh_score = len(detected_desh)
421
+
422
+ sylvian_dilation = any(b["class"] == "sylvian_fissure" for b in boxes)
423
+ periventricular_changes = any(b["class"] == "pvh" for b in boxes)
424
+
425
+ return {
426
+ "evans_index": evans_index,
427
+ "callosal_angle": None,
428
+ "desh_score": desh_score,
429
+ "sylvian_dilation": sylvian_dilation,
430
+ "vsr": None,
431
+ "periventricular_changes": periventricular_changes,
432
+ "cortical_atrophy": "unknown",
433
+ "nph_probability": round(sum([evans_index > 0.3, desh_score >= 2, sylvian_dilation]) / 3, 4),
434
+ }
435
+
436
+
437
+ # ===========================================================================
438
+ # Tab 3: NPH Clinical Scoring Calculator
439
+ # ===========================================================================
440
+
441
+ def compute_clinical_score(
442
+ evans_index, callosal_angle_str, desh_score,
443
+ sylvian_dilation, vsr_str,
444
+ gait, cognition, urinary,
445
+ cortical_atrophy
446
+ ):
447
+ """Interactive NPH clinical scoring calculator."""
448
+ callosal = None
449
+ if callosal_angle_str and callosal_angle_str.strip():
450
+ try:
451
+ callosal = float(callosal_angle_str.strip())
452
+ except ValueError:
453
+ pass
454
+
455
+ vsr = None
456
+ if vsr_str and vsr_str.strip():
457
+ try:
458
+ vsr = float(vsr_str.strip())
459
+ except ValueError:
460
+ pass
461
+
462
+ triad = [gait, cognition, urinary]
463
+
464
+ atrophy_map = {
465
+ "None/Mild": "none",
466
+ "Moderate": "moderate",
467
+ "Significant": "significant",
468
+ }
469
+
470
+ score_data = {
471
+ "evansIndex": evans_index,
472
+ "callosalAngle": callosal,
473
+ "deshScore": int(desh_score),
474
+ "sylvianDilation": sylvian_dilation,
475
+ "vsr": vsr,
476
+ "triad": triad,
477
+ "corticalAtrophy": atrophy_map.get(cortical_atrophy, "unknown"),
478
+ }
479
+
480
+ result = _compute_nph_score(score_data)
481
+
482
+ # Build detailed breakdown
483
+ lines = []
484
+ lines.append(f"# NPH Score: {result['score']}/100")
485
+ lines.append(f"## {result['label']}\n")
486
+ lines.append(f"{result['recommendation']}\n")
487
+
488
+ lines.append("---\n### Input Summary\n")
489
+ lines.append(f"- **Evans' Index:** {evans_index:.3f}" + (" (>0.3 = abnormal)" if evans_index > 0.3 else ""))
490
+ if callosal is not None:
491
+ lines.append(f"- **Callosal Angle:** {callosal:.1f} deg" + (" (<90 = suggestive)" if callosal < 90 else ""))
492
+ else:
493
+ lines.append("- **Callosal Angle:** Not provided")
494
+ lines.append(f"- **DESH Score:** {int(desh_score)}/3")
495
+ lines.append(f"- **Sylvian Dilation:** {'Yes' if sylvian_dilation else 'No'}")
496
+ if vsr is not None:
497
+ lines.append(f"- **VSR:** {vsr:.2f}" + (" (>2.0 = strong NPH indicator)" if vsr > 2.0 else ""))
498
+ else:
499
+ lines.append("- **VSR:** Not available")
500
+ triad_count = sum(triad)
501
+ lines.append(f"- **Hakim Triad:** {triad_count}/3 (Gait: {'Yes' if gait else 'No'}, Cognition: {'Yes' if cognition else 'No'}, Urinary: {'Yes' if urinary else 'No'})")
502
+ lines.append(f"- **Cortical Atrophy:** {cortical_atrophy}")
503
+
504
+ lines.append("\n---\n### Scoring Weights\n")
505
+ if vsr is not None:
506
+ lines.append("| Component | Weight | Status |")
507
+ lines.append("|---|---|---|")
508
+ lines.append(f"| VSR | 40% | {'Contributing' if vsr and vsr > 2.0 else 'Not met'} |")
509
+ lines.append(f"| Evans Index | 25% | {'Contributing' if evans_index > 0.3 else 'Not met'} |")
510
+ lines.append(f"| Callosal Angle | 20% | {'Contributing' if callosal and callosal < 90 else 'N/A' if callosal is None else 'Not met'} |")
511
+ lines.append(f"| DESH Pattern | 10% | {int(desh_score)}/3 |")
512
+ lines.append(f"| Sylvian Fissure | 5% | {'Contributing' if sylvian_dilation else 'Not met'} |")
513
+ else:
514
+ lines.append("*VSR not available -- weights redistributed across remaining criteria.*\n")
515
+ lines.append("| Component | Weight (adjusted) | Status |")
516
+ lines.append("|---|---|---|")
517
+ lines.append(f"| Evans Index | 41.7% | {'Contributing' if evans_index > 0.3 else 'Not met'} |")
518
+ lines.append(f"| Callosal Angle | 33.3% | {'Contributing' if callosal and callosal < 90 else 'N/A' if callosal is None else 'Not met'} |")
519
+ lines.append(f"| DESH Pattern | 16.7% | {int(desh_score)}/3 |")
520
+ lines.append(f"| Sylvian Fissure | 8.3% | {'Contributing' if sylvian_dilation else 'Not met'} |")
521
+
522
+ if triad_count >= 2:
523
+ lines.append(f"\n**Triad Bonus:** +{15 if triad_count == 3 else 5}% (Hakim triad {'complete' if triad_count == 3 else 'partial'})")
524
+ if cortical_atrophy in ("Moderate", "Significant"):
525
+ penalty = 30 if cortical_atrophy == "Significant" else 15
526
+ lines.append(f"\n**Atrophy Penalty:** -{penalty}% (suggests ex-vacuo component)")
527
+
528
+ return "\n".join(lines)
529
+
530
+
531
+ # ===========================================================================
532
+ # Tabs 4-8: Filters & ML Models
533
  # ===========================================================================
534
 
535
  def apply_filter(image, effect, intensity):
 
574
  return np.array(filtered)
575
 
576
 
 
 
 
 
577
  def classify_image(image):
578
  if image is None:
579
  raise gr.Error("Please upload an image first.")
 
617
  with gr.Blocks(theme=gr.themes.Soft(), css=css) as demo:
618
  gr.Markdown("# Image Processing Studio", elem_classes="main-title")
619
  gr.Markdown(
620
+ "Filters, classification, object detection, panoptic segmentation, **YOLO NPH detection**, "
621
+ "clinical NPH scoring, and **intensity-based NPH analysis** -- all in one place.",
622
  elem_classes="subtitle"
623
  )
624
 
625
+ # ── Tab 1: NPH Analysis (Intensity-based) ──
626
  with gr.Tab("NPH Analysis"):
627
  gr.Markdown(
628
  "### Normal Pressure Hydrocephalus -- Segmentation & Biomarkers\n"
 
687
  "For clinical use, provide pixel spacing from the DICOM header.*"
688
  )
689
 
690
+ # ── Tab 2: YOLO NPH Detection ──
691
+ with gr.Tab("YOLO NPH Detection"):
692
+ gr.Markdown(
693
+ "### Deep Learning NPH Structure Detection\n"
694
+ "Uses a trained YOLOv8 model to detect NPH-related structures on brain CT/MRI slices: "
695
+ "**ventricle**, **sylvian fissure**, **tight convexity**, **PVH**, and **skull inner boundary**.\n\n"
696
+ "The model outputs bounding boxes with confidence scores, computes Evans' Index from "
697
+ "detected structures, and generates an overall NPH clinical score."
698
+ )
699
+ with gr.Row():
700
+ with gr.Column(scale=1):
701
+ yolo_input = gr.Image(label="Upload Brain Scan", type="numpy")
702
+ yolo_conf = gr.Slider(
703
+ minimum=0.1, maximum=0.95, value=0.25, step=0.05,
704
+ label="Confidence Threshold"
705
+ )
706
+ yolo_btn = gr.Button("Detect NPH Structures", variant="primary", size="lg")
707
+
708
+ with gr.Column(scale=2):
709
+ yolo_annotated = gr.AnnotatedImage(label="Detected Structures")
710
+ yolo_overlay = gr.Image(label="Annotated Image", type="numpy")
711
+
712
+ yolo_report = gr.Markdown(label="YOLO Detection Report")
713
+
714
+ yolo_btn.click(
715
+ fn=yolo_detect_nph,
716
+ inputs=[yolo_input, yolo_conf],
717
+ outputs=[yolo_annotated, yolo_overlay, yolo_report]
718
+ )
719
+
720
+ with gr.Accordion("YOLO Model Details", open=False):
721
+ gr.Markdown(
722
+ "**Model:** YOLOv8 fine-tuned on NPH brain CT/MRI dataset\n\n"
723
+ "**Detected Classes:**\n\n"
724
+ "| Class | Description | Color |\n"
725
+ "|---|---|---|\n"
726
+ "| ventricle | Lateral ventricles | Blue |\n"
727
+ "| sylvian_fissure | Sylvian fissures (bilateral) | Purple |\n"
728
+ "| tight_convexity | Tight high-convexity sulci | Orange |\n"
729
+ "| pvh | Periventricular hyperintensities | Yellow |\n"
730
+ "| skull_inner | Inner skull boundary | Gray |\n\n"
731
+ "**Evans' Index** is computed from the ventricle and skull inner boundary boxes. "
732
+ "If no skull boundary is detected, the image width is used as fallback.\n\n"
733
+ "**NPH Score** is computed using the weighted formula: "
734
+ "VSR (40%) + Evans Index (25%) + Callosal Angle (20%) + DESH (10%) + Sylvian (5%), "
735
+ "with bonuses for Hakim triad and penalties for cortical atrophy."
736
+ )
737
+
738
+ # ── Tab 3: NPH Clinical Scoring Calculator ──
739
+ with gr.Tab("NPH Score Calculator"):
740
+ gr.Markdown(
741
+ "### Clinical NPH Scoring Calculator\n"
742
+ "Enter imaging biomarkers and clinical findings to compute a weighted NPH probability score.\n\n"
743
+ "This calculator uses the same scoring formula as the YOLO detection tab but lets you "
744
+ "input values manually -- useful for combining measurements from different imaging studies."
745
+ )
746
+ with gr.Row():
747
+ with gr.Column():
748
+ gr.Markdown("#### Imaging Biomarkers")
749
+ calc_evans = gr.Slider(
750
+ minimum=0.0, maximum=0.6, value=0.30, step=0.01,
751
+ label="Evans' Index"
752
+ )
753
+ calc_callosal = gr.Textbox(
754
+ label="Callosal Angle (degrees)",
755
+ placeholder="e.g. 85 (leave blank if not measured)",
756
+ value=""
757
+ )
758
+ calc_desh = gr.Slider(
759
+ minimum=0, maximum=3, value=0, step=1,
760
+ label="DESH Score (0-3)"
761
+ )
762
+ calc_sylvian = gr.Checkbox(label="Sylvian Fissure Dilation", value=False)
763
+ calc_vsr = gr.Textbox(
764
+ label="VSR (Ventricle-to-SAS Ratio)",
765
+ placeholder="e.g. 2.5 (leave blank if not measured)",
766
+ value=""
767
+ )
768
+
769
+ with gr.Column():
770
+ gr.Markdown("#### Clinical Findings (Hakim Triad)")
771
+ calc_gait = gr.Checkbox(label="Gait disturbance", value=False)
772
+ calc_cognition = gr.Checkbox(label="Cognitive impairment", value=False)
773
+ calc_urinary = gr.Checkbox(label="Urinary incontinence", value=False)
774
+
775
+ gr.Markdown("#### Modifiers")
776
+ calc_atrophy = gr.Radio(
777
+ choices=["None/Mild", "Moderate", "Significant"],
778
+ value="None/Mild",
779
+ label="Cortical Atrophy"
780
+ )
781
+
782
+ calc_btn = gr.Button("Calculate NPH Score", variant="primary", size="lg")
783
+
784
+ calc_report = gr.Markdown(label="NPH Score Report")
785
+
786
+ calc_btn.click(
787
+ fn=compute_clinical_score,
788
+ inputs=[calc_evans, calc_callosal, calc_desh, calc_sylvian, calc_vsr,
789
+ calc_gait, calc_cognition, calc_urinary, calc_atrophy],
790
+ outputs=calc_report
791
+ )
792
+
793
+ # ── Tab 4: Client-Side NPH Detector ──
794
  with gr.Tab("NPH Detector (Browser)"):
795
  gr.Markdown(
796
  "### Client-Side NPH Detector\n"
 
805
  'style="border-radius: 12px; border: 1px solid #333;"></iframe>',
806
  )
807
 
808
+ # ── Tab 5: Video Demo ──
809
  with gr.Tab("Video Demo"):
810
  gr.Markdown(
811
  "### Whole-Brain Segmentation Demo\n"
 
817
  autoplay=False,
818
  )
819
 
820
+ # ── Tab 6: Filters ──
821
  with gr.Tab("Filters & Effects"):
822
  with gr.Row():
823
  with gr.Column():
 
833
  filter_output = gr.Image(label="Result", type="numpy")
834
  filter_btn.click(fn=apply_filter, inputs=[filter_input, filter_effect, filter_intensity], outputs=filter_output)
835
 
836
+ # ── Tab 7: Classification ──
837
  with gr.Tab("Image Classification"):
838
  with gr.Row():
839
  with gr.Column():
 
843
  cls_output = gr.Label(label="Predictions", num_top_classes=5)
844
  cls_btn.click(fn=classify_image, inputs=cls_input, outputs=cls_output)
845
 
846
+ # ── Tab 8: Object Detection ──
847
  with gr.Tab("Object Detection"):
848
  with gr.Row():
849
  with gr.Column():
 
854
  det_output = gr.AnnotatedImage(label="Detections")
855
  det_btn.click(fn=detect_objects, inputs=[det_input, det_threshold], outputs=det_output)
856
 
857
+ # ── Tab 9: Segmentation ──
858
  with gr.Tab("Segmentation"):
859
  with gr.Row():
860
  with gr.Column():