File size: 2,783 Bytes
8596595
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
# -*- coding: utf-8 -*-
"""shadman_Image_classification.ipynb

Automatically generated by Colaboratory.

Original file is located at
    https://colab.research.google.com/drive/1DCVsfqR-kOz3CHdB99IT75IHBzUtIf3M
"""

import tensorflow as tf
import PIL
import matplotlib.pyplot as plt
from tensorflow.keras import layers
import os

import pathlib

flower_dataset = "https://storage.googleapis.com/download.tensorflow.org/example_images/flower_photos.tgz"
dataset_path = tf.keras.utils.get_file('flower_photos',origin=flower_dataset,untar=True)
dataset_path = pathlib.Path(dataset_path)

roses = list(dataset_path.glob('roses/*'))
daisy = list(dataset_path.glob('daisy/*'))
print(roses[1])

PIL.Image.open(roses[10])
PIL.Image.open(daisy[10])

# training the dataset 

training_images = tf.keras.preprocessing.image_dataset_from_directory(
    dataset_path,
    subset = "training",
    validation_split = 0.25,
    seed = 123,
    image_size = (180, 180),
    batch_size = 32

)

# validation of images


validation_images = tf.keras.preprocessing.image_dataset_from_directory(
    dataset_path,
    subset = "validation",
    validation_split = 0.25,
    seed = 123,
    image_size = (180, 180),
    batch_size = 32

)

flower_classes = training_images.class_names
print(flower_classes)

from tensorflow.python.framework.func_graph import flatten
# if there are 5 classes then

dataset_classes = 5

from tensorflow.keras.models import Sequential

model = Sequential([
      #rescaling
      layers.experimental.preprocessing.Rescaling(1./255, input_shape = (180,180,3)),
      layers.Conv2D(16, 3 , padding='same' ,  activation='relu'),
      layers.MaxPooling2D(),
      layers.Conv2D(32, 3, padding ='same' , activation = 'relu'),
      layers.MaxPooling2D(),
      layers.Conv2D(64 , 3, padding='same', activation='relu'),
      layers.MaxPooling2D(),
      layers.Flatten(),
      layers.Dense(128, activation='relu'),
      layers.Dense(dataset_classes, activation='softmax')
])

model.compile(
    optimizer='adam',
    loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
    metrics=['accuracy'])

mymodel = model.fit(
    training_images,
    validation_data=validation_images,
    epochs=10
)

def predict_input_image(img):
  img_4d=img.reshape(-1,180,180,3)
  prediction=model.predict(img_4d)[0]
  return {flower_classes[i]: float(prediction[i]) for i in range(5)}

import gradio as gr
def predict_input_image(img):
  img_4d=img.reshape(-1,180,180,3)
  prediction=model.predict(img_4d)[0]
  return {flower_classes[i]: float(prediction[i]) for i in range(5)}

image = gr.inputs.Image(shape=(180,180))
label = gr.outputs.Label(num_top_classes=5)

gr.Interface(fn=predict_input_image, inputs=image, outputs=label,interpretation='default').launch(debug='True')