File size: 1,859 Bytes
f54b9cb
 
 
 
 
7d41884
f54b9cb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e4cf1a2
f54b9cb
9f0bc0e
 
 
cbb71bf
f29f192
cbb71bf
 
 
7d41884
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
from datasets import load_dataset
import matplotlib.pyplot as plt
import numpy as np 
import tensorflow as tf
from PIL import Image
import json

from tensorflow import keras
import tensorflow_datasets as tfds
from tensorflow.keras import layers
from tensorflow.keras.models import Sequential

(train_data, test_data), dataset_info = tfds.load(name = 'food101', split = ['train', 'validation'], shuffle_files = True, as_supervised = True, with_info = True)

labels = dataset_info.features['label'].names
num_labels = len(labels)

def preprocess_img(image, label, img_size = 224):
    image = tf.image.resize(image, [img_size, img_size])
    image = tf.cast(image, tf.float32)
    return image, label

train_data = train_data.map(preprocess_img, num_parallel_calls = tf.data.AUTOTUNE).batch(batch_size = num_labels)
test_data = test_data.map(preprocess_img, num_parallel_calls = tf.data.AUTOTUNE).batch(batch_size = num_labels)


model = keras.Sequential()

inputs = layers.Input(shape = (224, 224, 3))
layer1 = layers.GlobalAveragePooling2D()
layer2 = layers.Dense(101, activation = 'relu')
layer3 = layers.Activation('softmax', dtype = tf.float32)

model.add(inputs)
model.add(layer1)
model.add(layer2)
model.add(layer3)



model.compile(
    loss = keras.losses.SparseCategoricalCrossentropy(from_logits = False),
    optimizer = keras.optimizers.legacy.Adam(learning_rate = 0.001),
    metrics = ['accuracy'],

)

model.summary()

model.fit(train_data, epochs=5, verbose = 2, batch_size=101)
model.evaluate(test_data, batch_size=101)

model.save("./foodTrain.pd")
model.save_weights("./foodTrainWeights.h5")
loaded_saved_model = tf.keras.models.load_model("./foodTrain.pd")


results_loaded_saved_model = loaded_saved_model.evaluate(test_data)
results_loaded_saved_model

with open("config.json", "w") as outfile:
    json.dump(model.get_config(), outfile)