dogs-cats / main.py
carlos-pino's picture
Upload 28 files
d4c9761
raw
history blame contribute delete
No virus
4.02 kB
# Press Mayús+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
import os
import cv2
import random
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from tensorflow.keras.callbacks import TensorBoard
from tensorflow.keras.preprocessing.image import ImageDataGenerator
training_images = './Dataset/training'
validation_images = './Dataset/validation'
training_images_list = os.listdir(training_images)
validation_images_list = os.listdir(validation_images)
IMAGE_SIZE = 100
# Get images from dataset
def get_dataset_image(is_training_data):
images = []
tags = []
data = []
count = 0
image_list = validation_images_list
image_rute = validation_images
if is_training_data:
image_list = training_images_list
image_rute = training_images
for dir_name in image_list:
name = image_rute + '/' + dir_name
for file_name in os.listdir(name):
tags.append(count)
img = cv2.imread(name + '/' + file_name, 0)
if img is None:
print('Wrong path:', name + '/' + file_name)
else:
img = cv2.resize(img, (IMAGE_SIZE, IMAGE_SIZE), interpolation=cv2.INTER_CUBIC)
img = img.reshape(IMAGE_SIZE, IMAGE_SIZE, 1)
data.append([img, count])
images.append(img)
count = count + 1
return images, tags, data, count
# Normalize images - white and black
def normalize_images(images):
new_images = np.array(images).astype(float) / 255
return new_images
# Avoid over fitting
def avoid_over_fitting(images, tags):
rotation_range = random.randint(0, 90)
width_shift_range = random.uniform(0, 1)
height_shift_range = random.uniform(0, 1)
shear_range = random.randint(0, 25)
img_train_gen = ImageDataGenerator(
rotation_range=rotation_range,
width_shift_range=width_shift_range,
height_shift_range=height_shift_range,
shear_range=shear_range,
zoom_range=[0.5, 1.5],
vertical_flip=True,
horizontal_flip=True
)
img_train_gen.fit(images)
img_train = img_train_gen.flow(images, tags, batch_size=38)
return img_train
# Training lists
train_images, train_tags, train_data, train_count = get_dataset_image(True)
# Validation lists
val_images, val_tags, val_data, val_count = get_dataset_image(False)
print('Read images finalized!')
# Normalize
train_images = normalize_images(train_images)
val_images = normalize_images(val_images)
train_tags = np.array(train_tags)
val_tags = np.array(val_tags)
img_to_train = avoid_over_fitting(train_images, train_tags)
# Set layers and config CNN
CNN_model = tf.keras.models.Sequential([
tf.keras.layers.Conv2D(32, (3, 3), activation='relu', input_shape=(IMAGE_SIZE, IMAGE_SIZE, 1)),
tf.keras.layers.MaxPooling2D(2, 2),
tf.keras.layers.Conv2D(64, (3, 3), activation='relu'),
tf.keras.layers.MaxPooling2D(2, 2),
tf.keras.layers.Conv2D(64, (3, 3), activation='relu'),
tf.keras.layers.MaxPooling2D(2, 2),
tf.keras.layers.Conv2D(128, (3, 3), activation='relu'),
tf.keras.layers.MaxPooling2D(2, 2),
# Clasification dense layers
tf.keras.layers.Dropout(0.2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(250, activation='relu'),
tf.keras.layers.Dense(1, activation='sigmoid')
])
CNN_model.compile(
optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy']
)
# Train model
BoardCNN = TensorBoard(log_dir='./logs/cnn')
CNN_model.fit(
img_to_train,
batch_size=38,
validation_data=(val_images, val_tags),
epochs=500,
callbacks=[BoardCNN],
steps_per_epoch=int(np.ceil(len(train_images)/float(38))),
validation_steps=int(np.ceil(len(val_images)/float(38)))
)
# Save model
CNN_model.save('./saves/dogs-cats.h5')
CNN_model.save_weights('./saves/wights-dogs-cats.h5')
print("Finish!")