Spaces:
Sleeping
Sleeping
!pip install -q -U google-generativeai | |
import tensorflow as tf | |
import numpy as np | |
from PIL import Image | |
from numpy import asarray | |
import gradio as gr | |
import google.generativeai as genai | |
#import cv2 | |
print("Dependency Imported Successfully!") | |
GOOGLE_API_KEY = "AIzaSyA4pL0voDE0py8q8iXtQNQRYYMx_UdFeLk" | |
genai.configure(api_key=GOOGLE_API_KEY) | |
model = genai.GenerativeModel('gemini-pro') | |
#kindly add model path here (Save model in your drive and copy path and paste it below) | |
model_path="./Models/" | |
sugarcane_model = tf.keras.models.load_model(f"{model_path}sugracane.h5") | |
tomato_model = tf.keras.models.load_model(f"{model_path}Tomato.h5") | |
corn_model = tf.keras.models.load_model(f"{model_path}Corn.h5") | |
potato_model = tf.keras.models.load_model(f"{model_path}Potato.h5") | |
rice_model = tf.keras.models.load_model(f"{model_path}Rice.h5") | |
wheat_model = tf.keras.models.load_model(f"{model_path}Wheat.h5") | |
print("Models Imported Successfully!") | |
pred_class = "Plant" | |
def predict(model,class_name,img): | |
global pred_class | |
img = tf.image.resize(img, (256, 256)) | |
img_array = tf.keras.preprocessing.image.img_to_array(img) | |
img_array = tf.expand_dims(img_array, 0) | |
predictions = model.predict(img_array) | |
predicted_class = class_name[np.argmax(predictions[0])] | |
confidence = round(100 * (np.max(predictions[0])), 2) | |
pred_class = predicted_class | |
return predicted_class,f"{confidence} %" | |
def generate_output(crop,Leaf_Image): | |
if crop=="Sugarcane": | |
sugarcane_classes = ['Sugarcane___Bacterial_Blight', 'Sugarcane___Healthy', 'Sugarcane___Red_Rot'] | |
#sugarcane_model = tf.keras.models.load_model(f"{model_path}sugracane.h5") | |
return predict(sugarcane_model,sugarcane_classes,Leaf_Image) | |
if crop=="Tomato": | |
tomato_classes = ['Tomato___Bacterial_spot','Tomato___Early_blight','Tomato___Late_blight','Tomato___healthy'] | |
#tomato_model = tf.keras.models.load_model(f"{model_path}Tomato.h5") | |
return predict(tomato_model,tomato_classes,Leaf_Image) | |
if crop=="Corn": | |
corn_classes = ['Corn_(maize)___Cercospora_leaf_spot Gray_leaf_spot','Corn_(maize)___Common_rust_','Corn_(maize)___Northern_Leaf_Blight','Corn_(maize)___healthy'] | |
#corn_model = tf.keras.models.load_model(f"{model_path}Corn.h5") | |
return predict(corn_model,corn_classes,Leaf_Image) | |
if crop=="Potato": | |
potato_classes = ['Potato___Early_blight', 'Potato___Late_blight', 'Potato___healthy'] | |
#potato_model = tf.keras.models.load_model(f"{model_path}Potato.h5") | |
return predict(potato_model,potato_classes,Leaf_Image) | |
if crop=="Rice": | |
rice_classes = ['Rice___Brown_Spot', 'Rice___Healthy', 'Rice___Hispa', 'Rice___Leaf_Blast'] | |
#rice_model = tf.keras.models.load_model(f"{model_path}Rice.h5") | |
return predict(rice_model,rice_classes,Leaf_Image) | |
if crop=="Wheat": | |
wheat_classes = ['Wheat___Brown_Rust', 'Wheat___Healthy', 'Wheat___Yellow_Rust'] | |
#wheat_model = tf.keras.models.load_model(f"{model_path}Wheat.h5") | |
return predict(wheat_model,wheat_classes,Leaf_Image) | |
def transform_history(history): | |
new_history = [] | |
new_history.append({"parts": [{"text": "You are a Agriculture experts named PlantDoc, that specializes in Plant Diseases"}], "role": "user"}) | |
new_history.append({"parts": [{"text": "ok"}], "role": "model"}) | |
print(history) | |
for chat in history: | |
new_history.append({"parts": [{"text": chat[0]}], "role": "user"}) | |
new_history.append({"parts": [{"text": chat[1]}], "role": "model"}) | |
return new_history | |
def response(message, history): | |
global chat | |
# The history will be the same as in Gradio, the 'Undo' and 'Clear' buttons will work correctly. | |
chat.history = transform_history(history) | |
response = chat.send_message(message) | |
response.resolve() | |
# Each character of the answer is displayed | |
for i in range(len(response.text)): | |
time.sleep(0.01) | |
yield response.text[: i+1] | |
css = """ | |
#textbox {height: 700px;} | |
""" | |
with gr.Blocks(css=css) as demo: | |
with gr.Row(): | |
with gr.Column(): | |
leaf_img = gr.Image(type="numpy",label="Upload Plant's Leaf Image") | |
with gr.Column(): | |
crop = gr.Radio(["Potato", "Tomato", "Wheat","Rice","Corn","Sugarcane"], label="Crop", info="Please Select a Crop?") | |
disease = gr.Textbox(label="Disease Predicted :- ") | |
confi = gr.Textbox(label="Level of Condidence:- ") | |
with gr.Row(): | |
with gr.Column(elem_id="textbox"): | |
greet_btn = gr.Button("Predict") | |
greet_btn.click(fn=generate_output, inputs=[crop,leaf_img], outputs=[disease,confi], api_name="predict") | |
gr.ChatInterface(fn=response,examples=["Give me more Info about this disease!", "How to treat this Plant disease!"], title="Talk to PlantDoc(AI Expert).") | |
if __name__ == "__main__": | |
chat = model.start_chat(history=[]) | |
demo.launch() | |