File size: 1,583 Bytes
f54b9cb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dff5bda
f54b9cb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
from datasets import load_dataset
import matplotlib.pyplot as plt
import numpy as np 
import tensorflow as tf
from PIL import Image

from tensorflow import keras
import tensorflow_datasets as tfds
from tensorflow.keras import layers
from tensorflow.keras.models import Sequential

(train_data, test_data), dataset_info = tfds.load(name = 'food101', split = ['train', 'validation'], shuffle_files = True, as_supervised = True, with_info = True)

labels = dataset_info.features['label'].names
num_labels = len(labels)

def preprocess_img(image, label, img_size = 224):
    image = tf.image.resize(image, [img_size, img_size])
    image = tf.cast(image, tf.float32)
    return image, label

train_data = train_data.map(preprocess_img, num_parallel_calls = tf.data.AUTOTUNE).batch(batch_size = num_labels)
test_data = test_data.map(preprocess_img, num_parallel_calls = tf.data.AUTOTUNE).batch(batch_size = num_labels)


model = keras.Sequential()

inputs = layers.Input(shape = (224, 224, 3))
layer1 = layers.GlobalAveragePooling2D()
layer2 = layers.Dense(101, activation = 'relu')
layer3 = layers.Activation('softmax', dtype = tf.float32)

model.add(inputs)
model.add(layer1)
model.add(layer2)
model.add(layer3)



model.compile(
    loss = keras.losses.SparseCategoricalCrossentropy(from_logits = False),
    optimizer = keras.optimizers.legacy.Adam(learning_rate = 0.001),
    metrics = ['accuracy'],

)

model.summary()

model.fit(train_data, epochs=5, verbose = 2, batch_size=101)
model.evaluate(test_data, batch_size=32)

model.save("foodClassifierModel.h5", save_format= "h5", )