oral_cancer / app.py
Ibrahim Animashaun
Add new examples
4b37e48
raw
history blame contribute delete
No virus
2.33 kB
import gradio as gr
import pathlib
import tensorflow as tf
current_dir = pathlib.Path(__file__).parent
# images = [str(current_dir / "cheetah1.jpeg"), str(current_dir / "cheetah1.jpg"), str(current_dir / "lion.jpg")]
images = [str(current_dir / "data/benign/benign_4.jpg"), str(current_dir / "data/benign/benign_5.jpg"), str(current_dir / "data/benign/benign_6.jpg"), str(current_dir / "data/malignant/malignant_4.jpg"), str(current_dir / "data/malignant/malignant_5.jpg"), str(current_dir / "data/malignant/malignant_6.jpg")]
# img_classifier = gr.Interface.load(
# "models/google/vit-base-patch16-224", examples=images, cache_examples=False
# )
# def func(img, text):
# return img_classifier(img), text
# using_img_classifier_as_function = gr.Interface(
# func,
# [gr.Image(type="filepath"), "text"],
# ["label", "text"],
# examples=[
# [str(current_dir / "cheetah1.jpeg"), None],
# [str(current_dir / "cheetah1.jpg"), "cheetah"],
# [str(current_dir / "lion.jpg"), "lion"],
# ],
# cache_examples=False,
# )
# demo = gr.TabbedInterface([using_img_classifier_as_function, img_classifier])
# if __name__ == "__main__":
# demo.launch()
# import gradio as gr
from tensorflow import keras
from skimage.transform import resize
# def greet(name):
# return "Hello " + name + "!!"
# iface = gr.Interface(fn=greet, inputs="text", outputs="text")
# iface.launch()
# oc_resnet50_model1 = keras.models.load_model('./models/oc_model.h5')
print("current_dir", current_dir)
oc_resnet50_model2 = keras.models.load_model(f"{current_dir}/models/mendeley_oc_model_v2.h5")
labels = ['Benign Lesion', 'Malignant Lesion']
def classify_image(inp):
# inp =resize(inp, (300, 300, 3))
inp = inp.reshape((-1, 300, 300, 3))
# inp = tf.keras.applications.mobilenet_v2.preprocess_input(inp)
inp = tf.keras.applications.resnet50.preprocess_input(inp)
prediction = oc_resnet50_model2.predict(inp).flatten()
confidences = {labels[i]: float(prediction[i]) for i in range(2)}
return confidences
gr.Interface(fn=classify_image,
inputs=gr.Image(shape=(300, 300)),
outputs=gr.Label(num_top_classes=2),
examples=images, cache_examples=False,
# interpretation="shap", num_shap=5
).launch()