Abubakar Abid commited on
Commit
96dcbb5
1 Parent(s): b9e6b57

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -82,9 +82,10 @@ i = gr.inputs.Image(shape=(112, 112))
82
  o = gr.outputs.Image()
83
 
84
  examples = [["img1.jpg"], ["img2.jpg"]]
85
- title = "Left Ventricle Segmentation"
86
- description = "This semantic segmentation model identifies the left ventricle in echocardiogram videos. Accurate evaluation of the motion and size of the left ventricle is crucial for the assessment of cardiac function and ejection fraction. In this interface, the user inputs apical-4-chamber images from echocardiography videos and the model will output a prediction of the localization of the left ventricle in blue. This model was trained on the publicly released EchoNet-Dynamic dataset of 10k echocardiogram videos with 20k expert annotations of the left ventricle and published as part of ‘Video-based AI for beat-to-beat assessment of cardiac function’ by Ouyang et al. in Nature, 2020."
 
87
  thumbnail = "https://raw.githubusercontent.com/gradio-app/hub-echonet/master/thumbnail.png"
88
-
89
- gr.Interface(segment, i, o, examples=examples, allow_flagging=False, analytics_enabled=False,
90
  title=title, description=description, thumbnail=thumbnail).launch()
 
82
  o = gr.outputs.Image()
83
 
84
  examples = [["img1.jpg"], ["img2.jpg"]]
85
+ title = None #"Left Ventricle Segmentation"
86
+ description = "This semantic segmentation model identifies the left ventricle in echocardiogram images."
87
+ # videos. Accurate evaluation of the motion and size of the left ventricle is crucial for the assessment of cardiac function and ejection fraction. In this interface, the user inputs apical-4-chamber images from echocardiography videos and the model will output a prediction of the localization of the left ventricle in blue. This model was trained on the publicly released EchoNet-Dynamic dataset of 10k echocardiogram videos with 20k expert annotations of the left ventricle and published as part of ‘Video-based AI for beat-to-beat assessment of cardiac function’ by Ouyang et al. in Nature, 2020."
88
  thumbnail = "https://raw.githubusercontent.com/gradio-app/hub-echonet/master/thumbnail.png"
89
+ css = ".footer {display:none !important}"
90
+ gr.Interface(segment, i, o, examples=examples, css=css, allow_flagging=False, analytics_enabled=False,
91
  title=title, description=description, thumbnail=thumbnail).launch()