File size: 2,598 Bytes
4a725b9
446f661
1aabf09
 
 
 
446f661
 
 
414d5ce
446f661
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
414d5ce
446f661
 
 
 
 
 
 
 
 
 
 
 
 
af8d2aa
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1aabf09
 
 
 
 
343c4bb
1aabf09
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
import tensorflow as tf
import matplotlib.pyplot as plt
from tensorflow.keras.callbacks import ModelCheckpoint
import tensorflow as tf
import datetime


#Preprocesamiento de imagenes del conjunto de entrenamiento
training_set = tf.keras.utils.image_dataset_from_directory(
    'FruitTrainingDataset/train',
    labels="inferred",
    label_mode="categorical",
    class_names=None,
    color_mode="rgb",
    batch_size=32,
    image_size=(64, 64),
    shuffle=True,
    seed=None,
    validation_split=None,
    subset=None,
    interpolation="bilinear",
    follow_links=False,
    crop_to_aspect_ratio=False
)

#Preprocesamiento de imagenes del conjunto de validacion
validation_set = tf.keras.utils.image_dataset_from_directory(
    'FruitTrainingDataset/validation',
    labels="inferred",
    label_mode="categorical",
    class_names=None,
    color_mode="rgb",
    batch_size=32,
    image_size=(64, 64),
    shuffle=True,
    seed=None,
    validation_split=None,
    subset=None,
    interpolation="bilinear",
    follow_links=False,
    crop_to_aspect_ratio=False
)

model = tf.keras.models.Sequential()

model.add(tf.keras.layers.Conv2D(filters=32,kernel_size=3,padding='same',activation='relu',input_shape=[64,64,3]))
model.add(tf.keras.layers.Conv2D(filters=32,kernel_size=3,activation='relu'))
model.add(tf.keras.layers.MaxPool2D(pool_size=2,strides=2))


model.add(tf.keras.layers.Dropout(0.25))


model.add(tf.keras.layers.Conv2D(filters=64,kernel_size=3,padding='same',activation='relu'))
model.add(tf.keras.layers.Conv2D(filters=64,kernel_size=3,activation='relu'))
model.add(tf.keras.layers.MaxPool2D(pool_size=2,strides=2))


model.add(tf.keras.layers.Dropout(0.25))


model.add(tf.keras.layers.Flatten())


model.add(tf.keras.layers.Dense(units=512,activation='relu'))


model.add(tf.keras.layers.Dense(units=256,activation='relu'))


model.add(tf.keras.layers.Dropout(0.5)) #To avoid overfitting


#Output Layer
model.add(tf.keras.layers.Dense(units=36,activation='softmax'))

model.compile(optimizer='adam',loss='categorical_crossentropy',metrics=['accuracy',"mean_absolute_error","Precision","Recall",tf.keras.metrics.AUC()])

#Entrenar el modelo desde la ultima epoca almacenada usando el parametro initial_epoch
history = model.fit(x=training_set,validation_data=validation_set, epochs=5, initial_epoch=10)

#Precisi贸n del conjunto de entrenamiento
train_loss, train_acc = model.evaluate(training_set)
print('Training accuracy:', train_acc)

#Precisi贸n del conjunto de validaci贸n
val_loss, val_acc = model.evaluate(validation_set)
print('Validation accuracy:', val_acc)