Commit
·
30b7c80
1
Parent(s):
bbdbef6
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,3 @@
|
|
1 |
-
|
2 |
-
|
3 |
import streamlit as st
|
4 |
|
5 |
import tensorflow as tf
|
@@ -11,7 +9,7 @@ model=tf.keras.models.load_model("dental_xray_seg.h5")
|
|
11 |
|
12 |
st.header("Segmentation of Teeth in Panoramic X-ray Image Using UNet")
|
13 |
|
14 |
-
|
15 |
link='Check Out Our Github Repo ! [link](https://github.com/SerdarHelli/Segmentation-of-Teeth-in-Panoramic-X-ray-Image-Using-U-Net)'
|
16 |
st.markdown(link,unsafe_allow_html=True)
|
17 |
|
@@ -28,13 +26,42 @@ def convert_one_channel(img):
|
|
28 |
else:
|
29 |
return img
|
30 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
31 |
st.subheader("Upload Dental Panoramic X-ray Image Image")
|
32 |
image_file = st.file_uploader("Upload Images", type=["png","jpg","jpeg"])
|
33 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
34 |
if image_file is not None:
|
35 |
-
|
36 |
-
"filesize":image_file.size}
|
37 |
-
st.write(file_details)
|
38 |
img=load_image(image_file)
|
39 |
|
40 |
st.text("Making A Prediction ....")
|
@@ -51,14 +78,17 @@ if image_file is not None:
|
|
51 |
predicted=prediction[0]
|
52 |
predicted = cv2.resize(predicted, (img.shape[1],img.shape[0]), interpolation=cv2.INTER_LANCZOS4)
|
53 |
mask=np.uint8(predicted*255)#
|
54 |
-
_, mask = cv2.threshold(mask, thresh=
|
|
|
|
|
|
|
55 |
cnts,hieararch=cv2.findContours(mask,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
|
56 |
-
output = cv2.drawContours(
|
|
|
57 |
|
58 |
if output is not None :
|
59 |
-
st.subheader("Predicted Image")
|
|
|
60 |
st.image(output,width=850)
|
61 |
|
62 |
st.text("DONE ! ....")
|
63 |
-
|
64 |
-
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
|
3 |
import tensorflow as tf
|
|
|
9 |
|
10 |
st.header("Segmentation of Teeth in Panoramic X-ray Image Using UNet")
|
11 |
|
12 |
+
examples=["107.png","108.png","109.png"]
|
13 |
link='Check Out Our Github Repo ! [link](https://github.com/SerdarHelli/Segmentation-of-Teeth-in-Panoramic-X-ray-Image-Using-U-Net)'
|
14 |
st.markdown(link,unsafe_allow_html=True)
|
15 |
|
|
|
26 |
else:
|
27 |
return img
|
28 |
|
29 |
+
def convert_rgb(img):
|
30 |
+
#some images have 3 channels , although they are grayscale image
|
31 |
+
if len(img.shape)==2:
|
32 |
+
img= cv2.cvtColor(img,cv2.COLOR_GRAY2RGB)
|
33 |
+
return img
|
34 |
+
else:
|
35 |
+
return img
|
36 |
+
|
37 |
+
|
38 |
st.subheader("Upload Dental Panoramic X-ray Image Image")
|
39 |
image_file = st.file_uploader("Upload Images", type=["png","jpg","jpeg"])
|
40 |
+
|
41 |
+
|
42 |
+
col1, col2, col3 = st.columns(3)
|
43 |
+
with col1:
|
44 |
+
ex=load_image(examples[0])
|
45 |
+
st.image(ex,width=200)
|
46 |
+
if st.button('Example 1'):
|
47 |
+
image_file=examples[0]
|
48 |
+
|
49 |
+
with col2:
|
50 |
+
ex1=load_image(examples[1])
|
51 |
+
st.image(ex1,width=200)
|
52 |
+
if st.button('Example 2'):
|
53 |
+
image_file=examples[1]
|
54 |
+
|
55 |
+
|
56 |
+
with col3:
|
57 |
+
ex2=load_image(examples[2])
|
58 |
+
st.image(ex2,width=200)
|
59 |
+
if st.button('Example 3'):
|
60 |
+
image_file=examples[2]
|
61 |
+
|
62 |
+
|
63 |
if image_file is not None:
|
64 |
+
|
|
|
|
|
65 |
img=load_image(image_file)
|
66 |
|
67 |
st.text("Making A Prediction ....")
|
|
|
78 |
predicted=prediction[0]
|
79 |
predicted = cv2.resize(predicted, (img.shape[1],img.shape[0]), interpolation=cv2.INTER_LANCZOS4)
|
80 |
mask=np.uint8(predicted*255)#
|
81 |
+
_, mask = cv2.threshold(mask, thresh=0, maxval=255, type=cv2.THRESH_BINARY+cv2.THRESH_OTSU)
|
82 |
+
kernel =( np.ones((5,5), dtype=np.float32))
|
83 |
+
mask=cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel,iterations=1 )
|
84 |
+
mask=cv2.morphologyEx(mask, cv2.MORPH_CLOSE, kernel,iterations=1 )
|
85 |
cnts,hieararch=cv2.findContours(mask,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
|
86 |
+
output = cv2.drawContours(convert_rgb(img), cnts, -1, (255, 0, 0) , 3)
|
87 |
+
|
88 |
|
89 |
if output is not None :
|
90 |
+
st.subheader("Predicted Image")
|
91 |
+
st.write(output.shape)
|
92 |
st.image(output,width=850)
|
93 |
|
94 |
st.text("DONE ! ....")
|
|
|
|