sjtans commited on
Commit
d7e453e
·
verified ·
1 Parent(s): 506daf6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -11
app.py CHANGED
@@ -566,6 +566,12 @@ with gr.Blocks(title = "Hello",
566
 
567
  send_btn = gr.Button("Run cell detection")
568
 
 
 
 
 
 
 
569
 
570
  #gr.Examples(sample_list, fn = update_with_example, inputs=input_image, outputs = [input_image, up_btn, output_image], examples_per_page=50, label = "Click on an example to try it")
571
  example_image = gr.Image(visible=False, type='filepath')
@@ -588,11 +594,7 @@ with gr.Blocks(title = "Hello",
588
  down_btn = gr.DownloadButton("Download distance map (.tif)", visible=True)
589
  down_btn2 = gr.DownloadButton("Download cell detections (.csv)", visible=True)
590
 
591
- sample_list = os.listdir("./gradio_examples/jpegs")
592
- #sample_list = [ ("./gradio_examples/jpegs/"+sample, [((5, 5, 10, 10), 'nothing')]) for sample in sample_list]
593
-
594
- print(sample_list)
595
- sample_list = [ "./gradio_examples/jpegs/"+sample for sample in sample_list]
596
  #sample_list = []
597
  #for j in range(23):
598
  # sample_list.append("samples/img%0.2d.png"%j)
@@ -603,16 +605,26 @@ with gr.Blocks(title = "Hello",
603
  #down_btn.click(download_function, None, [down_btn, down_btn2])
604
 
605
  gr.HTML("""<h4 style="color:white;"> Notes:<br>
606
- <li>you can load and process 3D, single-channel tifs (Z,Y,X).
607
- <li>withour GPU access, cell detection might take ~120 seconds.
608
- <li>locally OrganoidTracker wil run much faste even on CPU.
609
  </h4>""")
610
 
611
  gr.HTML("""<h4 style="color:white;"> Caveats:<br>
612
- <li>for this demo, an agressive background subtraction step is implemented before prediction, which we find benefits most usecases. For transperency, users have to preprocess the data themselves in OrganoidTracker 2.0.
613
- <li>withour GPU access, cell detection might take ~120 seconds.
614
- <li>locally OrganoidTracker wil run much faste even on CPU.
615
  </h4>""")
616
 
 
 
 
 
 
 
 
 
 
 
 
617
 
618
  demo.queue().launch()
 
566
 
567
  send_btn = gr.Button("Run cell detection")
568
 
569
+
570
+ sample_list = os.listdir("./gradio_examples/jpegs")
571
+ #sample_list = [ ("./gradio_examples/jpegs/"+sample, [((5, 5, 10, 10), 'nothing')]) for sample in sample_list]
572
+
573
+ print(sample_list)
574
+ sample_list = [ "./gradio_examples/jpegs/"+sample for sample in sample_list]
575
 
576
  #gr.Examples(sample_list, fn = update_with_example, inputs=input_image, outputs = [input_image, up_btn, output_image], examples_per_page=50, label = "Click on an example to try it")
577
  example_image = gr.Image(visible=False, type='filepath')
 
594
  down_btn = gr.DownloadButton("Download distance map (.tif)", visible=True)
595
  down_btn2 = gr.DownloadButton("Download cell detections (.csv)", visible=True)
596
 
597
+
 
 
 
 
598
  #sample_list = []
599
  #for j in range(23):
600
  # sample_list.append("samples/img%0.2d.png"%j)
 
605
  #down_btn.click(download_function, None, [down_btn, down_btn2])
606
 
607
  gr.HTML("""<h4 style="color:white;"> Notes:<br>
608
+ <li>You can load and process 3D tifs in the following dimensions: (T),Z,(C),Y,X. We automatically pick the first timepoint.
609
+ <li>Without GPU access, cell detection might take ~30 seconds.
610
+ <li>Locally OrganoidTracker wil run faster: ~2 seconds per frame on a dedicated GPU, ~10 seconds on a CPU.
611
  </h4>""")
612
 
613
  gr.HTML("""<h4 style="color:white;"> Caveats:<br>
614
+ <li>For this demo, an agressive background subtraction step is implemented before prediction, which we find benefits most usecases. For transperency, users have to preprocess the data themselves in OrganoidTracker 2.0.
615
+ <li>Because of incompatibilities between TensorFlow and HuggingFace the models here are trained with the upcoming PyTorch version of OrganoidTracker (currently in beta). There might be performance differences when using the TensorFlow-versions presented in our paper.
 
616
  </h4>""")
617
 
618
+ gr.HTML("""<h4 style="color:white;"> References:<br>
619
+ <li>The blastocyst sample data is from the BlastoSPIM dataset (Nunley et al., Development, 2024):
620
+ href="https://blastospim.flatironinstitute.org/html/index1.html" target="_blank">[website]</a>,
621
+ href=https://journals.biologists.com/dev/article/151/21/dev202817/362603/Nuclear-instance-segmentation-and-tracking-for target="_blank">[paper]</a>
622
+
623
+ <li>The blastocyst sample data is from the Cell Tracking Challenge (Murray et al., Nature Methods, 2008):
624
+ href="https://celltrackingchallenge.net/3d-datasets/" target="_blank">[website]</a>,
625
+ href=https://www.nature.com/articles/nmeth.1228 target="_blank">[paper]</a>
626
+ </h4>""")
627
+
628
+
629
 
630
  demo.queue().launch()