{ "cells": [ { "cell_type": "code", "execution_count": 10, "id": "aac52aaf", "metadata": {}, "outputs": [], "source": [ "import tensorflow as tf\n", "from tensorflow.keras.applications import InceptionV3\n", "from tensorflow.keras.models import Model\n", "from tensorflow.keras.layers import Dropout,Input,Flatten,Dense,MaxPooling2D\n", "from tensorflow.keras.optimizers import Adam\n", "from tensorflow.keras.preprocessing.image import ImageDataGenerator #data Augmentation" ] }, { "cell_type": "code", "execution_count": 11, "id": "54ba6829", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "WARNING:tensorflow:From C:\\Users\\Ashu\\AppData\\Local\\Temp/ipykernel_948/337460670.py:1: is_gpu_available (from tensorflow.python.framework.test_util) is deprecated and will be removed in a future version.\n", "Instructions for updating:\n", "Use `tf.config.list_physical_devices('GPU')` instead.\n" ] }, { "data": { "text/plain": [ "False" ] }, "execution_count": 11, "metadata": {}, "output_type": "execute_result" } ], "source": [ "tf.test.is_gpu_available()" ] }, { "cell_type": "code", "execution_count": 54, "id": "5d25419e", "metadata": {}, "outputs": [], "source": [ "\n", "batchsize=8" ] }, { "cell_type": "code", "execution_count": 24, "id": "97b30652", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Found 61128 images belonging to 2 classes.\n", "Found 15281 images belonging to 2 classes.\n" ] } ], "source": [ "train_datagen = ImageDataGenerator(rescale=1./255,rotation_range=0.2,shear_range=0.2,zoom_range=0.2,width_shift_range=0.2,height_shift_range=0.2\n", " ,validation_split=0.2)\n", "train_data = train_datagen.flow_from_directory(r'E:\\study\\Sem-7\\finalProject\\Drivers Drowsiness Detection using Deep Learning\\mrlEyes_2018_01\\Prepared_Data\\Train'\n", " ,target_size=(80,80),batch_size=8,class_mode='categorical',subset='training')\n", "validation_data = train_datagen.flow_from_directory(r'E:\\study\\Sem-7\\finalProject\\Drivers Drowsiness Detection using Deep Learning\\mrlEyes_2018_01\\Prepared_Data\\Train'\n", " ,target_size=(80,80),batch_size=8,class_mode='categorical'\n", " ,subset='validation')" ] }, { "cell_type": "code", "execution_count": 34, "id": "f6f3df3a", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Found 8489 images belonging to 2 classes.\n" ] } ], "source": [ "test_datagen = ImageDataGenerator(rescale=1./255)\n", "test_data = train_datagen.flow_from_directory(r'E:\\study\\Sem-7\\finalProject\\Drivers Drowsiness Detection using Deep Learning\\mrlEyes_2018_01\\Prepared_Data\\Test'\n", " ,target_size=(80,80),batch_size=8,class_mode='categorical')\n" ] }, { "cell_type": "code", "execution_count": 35, "id": "3e3d4286", "metadata": {}, "outputs": [], "source": [ "bmodel = InceptionV3(include_top = False, weights = 'imagenet',input_tensor = Input(shape = (80,80,3),batch_size=8 ))\n", "hmodel = bmodel.output" ] }, { "cell_type": "code", "execution_count": 41, "id": "022a0cde", "metadata": {}, "outputs": [], "source": [ "hmodel = Flatten()(hmodel)\n", "hmodel = Dense(64,activation='relu')(hmodel)\n", "hmodel = Dropout(0.5)(hmodel)\n", "hmodel = Dense(2,activation= 'Softmax')(hmodel)\n", "\n", "model = Model(inputs = bmodel.input, outputs=hmodel)\n", "for layer in bmodel.layers:\n", " layer.trainable = False" ] }, { "cell_type": "code", "execution_count": 42, "id": "ebfe3cd9", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Model: \"model_1\"\n", "__________________________________________________________________________________________________\n", "Layer (type) Output Shape Param # Connected to \n", "==================================================================================================\n", "input_5 (InputLayer) [(8, 80, 80, 3)] 0 \n", "__________________________________________________________________________________________________\n", "conv2d_376 (Conv2D) (8, 39, 39, 32) 864 input_5[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_376 (BatchN (8, 39, 39, 32) 96 conv2d_376[0][0] \n", "__________________________________________________________________________________________________\n", "activation_376 (Activation) (8, 39, 39, 32) 0 batch_normalization_376[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_377 (Conv2D) (8, 37, 37, 32) 9216 activation_376[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_377 (BatchN (8, 37, 37, 32) 96 conv2d_377[0][0] \n", "__________________________________________________________________________________________________\n", "activation_377 (Activation) (8, 37, 37, 32) 0 batch_normalization_377[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_378 (Conv2D) (8, 37, 37, 64) 18432 activation_377[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_378 (BatchN (8, 37, 37, 64) 192 conv2d_378[0][0] \n", "__________________________________________________________________________________________________\n", "activation_378 (Activation) (8, 37, 37, 64) 0 batch_normalization_378[0][0] \n", "__________________________________________________________________________________________________\n", "max_pooling2d_16 (MaxPooling2D) (8, 18, 18, 64) 0 activation_378[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_379 (Conv2D) (8, 18, 18, 80) 5120 max_pooling2d_16[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_379 (BatchN (8, 18, 18, 80) 240 conv2d_379[0][0] \n", "__________________________________________________________________________________________________\n", "activation_379 (Activation) (8, 18, 18, 80) 0 batch_normalization_379[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_380 (Conv2D) (8, 16, 16, 192) 138240 activation_379[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_380 (BatchN (8, 16, 16, 192) 576 conv2d_380[0][0] \n", "__________________________________________________________________________________________________\n", "activation_380 (Activation) (8, 16, 16, 192) 0 batch_normalization_380[0][0] \n", "__________________________________________________________________________________________________\n", "max_pooling2d_17 (MaxPooling2D) (8, 7, 7, 192) 0 activation_380[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_384 (Conv2D) (8, 7, 7, 64) 12288 max_pooling2d_17[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_384 (BatchN (8, 7, 7, 64) 192 conv2d_384[0][0] \n", "__________________________________________________________________________________________________\n", "activation_384 (Activation) (8, 7, 7, 64) 0 batch_normalization_384[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_382 (Conv2D) (8, 7, 7, 48) 9216 max_pooling2d_17[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_385 (Conv2D) (8, 7, 7, 96) 55296 activation_384[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_382 (BatchN (8, 7, 7, 48) 144 conv2d_382[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_385 (BatchN (8, 7, 7, 96) 288 conv2d_385[0][0] \n", "__________________________________________________________________________________________________\n", "activation_382 (Activation) (8, 7, 7, 48) 0 batch_normalization_382[0][0] \n", "__________________________________________________________________________________________________\n", "activation_385 (Activation) (8, 7, 7, 96) 0 batch_normalization_385[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_36 (AveragePo (8, 7, 7, 192) 0 max_pooling2d_17[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_381 (Conv2D) (8, 7, 7, 64) 12288 max_pooling2d_17[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_383 (Conv2D) (8, 7, 7, 64) 76800 activation_382[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_386 (Conv2D) (8, 7, 7, 96) 82944 activation_385[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_387 (Conv2D) (8, 7, 7, 32) 6144 average_pooling2d_36[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_381 (BatchN (8, 7, 7, 64) 192 conv2d_381[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_383 (BatchN (8, 7, 7, 64) 192 conv2d_383[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_386 (BatchN (8, 7, 7, 96) 288 conv2d_386[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_387 (BatchN (8, 7, 7, 32) 96 conv2d_387[0][0] \n", "__________________________________________________________________________________________________\n", "activation_381 (Activation) (8, 7, 7, 64) 0 batch_normalization_381[0][0] \n", "__________________________________________________________________________________________________\n", "activation_383 (Activation) (8, 7, 7, 64) 0 batch_normalization_383[0][0] \n", "__________________________________________________________________________________________________\n", "activation_386 (Activation) (8, 7, 7, 96) 0 batch_normalization_386[0][0] \n", "__________________________________________________________________________________________________\n", "activation_387 (Activation) (8, 7, 7, 32) 0 batch_normalization_387[0][0] \n", "__________________________________________________________________________________________________\n", "mixed0 (Concatenate) (8, 7, 7, 256) 0 activation_381[0][0] \n", " activation_383[0][0] \n", " activation_386[0][0] \n", " activation_387[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_391 (Conv2D) (8, 7, 7, 64) 16384 mixed0[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_391 (BatchN (8, 7, 7, 64) 192 conv2d_391[0][0] \n", "__________________________________________________________________________________________________\n", "activation_391 (Activation) (8, 7, 7, 64) 0 batch_normalization_391[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_389 (Conv2D) (8, 7, 7, 48) 12288 mixed0[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_392 (Conv2D) (8, 7, 7, 96) 55296 activation_391[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_389 (BatchN (8, 7, 7, 48) 144 conv2d_389[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_392 (BatchN (8, 7, 7, 96) 288 conv2d_392[0][0] \n", "__________________________________________________________________________________________________\n", "activation_389 (Activation) (8, 7, 7, 48) 0 batch_normalization_389[0][0] \n", "__________________________________________________________________________________________________\n", "activation_392 (Activation) (8, 7, 7, 96) 0 batch_normalization_392[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_37 (AveragePo (8, 7, 7, 256) 0 mixed0[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_388 (Conv2D) (8, 7, 7, 64) 16384 mixed0[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_390 (Conv2D) (8, 7, 7, 64) 76800 activation_389[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_393 (Conv2D) (8, 7, 7, 96) 82944 activation_392[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_394 (Conv2D) (8, 7, 7, 64) 16384 average_pooling2d_37[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_388 (BatchN (8, 7, 7, 64) 192 conv2d_388[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_390 (BatchN (8, 7, 7, 64) 192 conv2d_390[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_393 (BatchN (8, 7, 7, 96) 288 conv2d_393[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_394 (BatchN (8, 7, 7, 64) 192 conv2d_394[0][0] \n", "__________________________________________________________________________________________________\n", "activation_388 (Activation) (8, 7, 7, 64) 0 batch_normalization_388[0][0] \n", "__________________________________________________________________________________________________\n", "activation_390 (Activation) (8, 7, 7, 64) 0 batch_normalization_390[0][0] \n", "__________________________________________________________________________________________________\n", "activation_393 (Activation) (8, 7, 7, 96) 0 batch_normalization_393[0][0] \n", "__________________________________________________________________________________________________\n", "activation_394 (Activation) (8, 7, 7, 64) 0 batch_normalization_394[0][0] \n", "__________________________________________________________________________________________________\n", "mixed1 (Concatenate) (8, 7, 7, 288) 0 activation_388[0][0] \n", " activation_390[0][0] \n", " activation_393[0][0] \n", " activation_394[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_398 (Conv2D) (8, 7, 7, 64) 18432 mixed1[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_398 (BatchN (8, 7, 7, 64) 192 conv2d_398[0][0] \n", "__________________________________________________________________________________________________\n", "activation_398 (Activation) (8, 7, 7, 64) 0 batch_normalization_398[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_396 (Conv2D) (8, 7, 7, 48) 13824 mixed1[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_399 (Conv2D) (8, 7, 7, 96) 55296 activation_398[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_396 (BatchN (8, 7, 7, 48) 144 conv2d_396[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_399 (BatchN (8, 7, 7, 96) 288 conv2d_399[0][0] \n", "__________________________________________________________________________________________________\n", "activation_396 (Activation) (8, 7, 7, 48) 0 batch_normalization_396[0][0] \n", "__________________________________________________________________________________________________\n", "activation_399 (Activation) (8, 7, 7, 96) 0 batch_normalization_399[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_38 (AveragePo (8, 7, 7, 288) 0 mixed1[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_395 (Conv2D) (8, 7, 7, 64) 18432 mixed1[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_397 (Conv2D) (8, 7, 7, 64) 76800 activation_396[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_400 (Conv2D) (8, 7, 7, 96) 82944 activation_399[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_401 (Conv2D) (8, 7, 7, 64) 18432 average_pooling2d_38[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_395 (BatchN (8, 7, 7, 64) 192 conv2d_395[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_397 (BatchN (8, 7, 7, 64) 192 conv2d_397[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_400 (BatchN (8, 7, 7, 96) 288 conv2d_400[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_401 (BatchN (8, 7, 7, 64) 192 conv2d_401[0][0] \n", "__________________________________________________________________________________________________\n", "activation_395 (Activation) (8, 7, 7, 64) 0 batch_normalization_395[0][0] \n", "__________________________________________________________________________________________________\n", "activation_397 (Activation) (8, 7, 7, 64) 0 batch_normalization_397[0][0] \n", "__________________________________________________________________________________________________\n", "activation_400 (Activation) (8, 7, 7, 96) 0 batch_normalization_400[0][0] \n", "__________________________________________________________________________________________________\n", "activation_401 (Activation) (8, 7, 7, 64) 0 batch_normalization_401[0][0] \n", "__________________________________________________________________________________________________\n", "mixed2 (Concatenate) (8, 7, 7, 288) 0 activation_395[0][0] \n", " activation_397[0][0] \n", " activation_400[0][0] \n", " activation_401[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_403 (Conv2D) (8, 7, 7, 64) 18432 mixed2[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_403 (BatchN (8, 7, 7, 64) 192 conv2d_403[0][0] \n", "__________________________________________________________________________________________________\n", "activation_403 (Activation) (8, 7, 7, 64) 0 batch_normalization_403[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_404 (Conv2D) (8, 7, 7, 96) 55296 activation_403[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_404 (BatchN (8, 7, 7, 96) 288 conv2d_404[0][0] \n", "__________________________________________________________________________________________________\n", "activation_404 (Activation) (8, 7, 7, 96) 0 batch_normalization_404[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_402 (Conv2D) (8, 3, 3, 384) 995328 mixed2[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_405 (Conv2D) (8, 3, 3, 96) 82944 activation_404[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_402 (BatchN (8, 3, 3, 384) 1152 conv2d_402[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_405 (BatchN (8, 3, 3, 96) 288 conv2d_405[0][0] \n", "__________________________________________________________________________________________________\n", "activation_402 (Activation) (8, 3, 3, 384) 0 batch_normalization_402[0][0] \n", "__________________________________________________________________________________________________\n", "activation_405 (Activation) (8, 3, 3, 96) 0 batch_normalization_405[0][0] \n", "__________________________________________________________________________________________________\n", "max_pooling2d_18 (MaxPooling2D) (8, 3, 3, 288) 0 mixed2[0][0] \n", "__________________________________________________________________________________________________\n", "mixed3 (Concatenate) (8, 3, 3, 768) 0 activation_402[0][0] \n", " activation_405[0][0] \n", " max_pooling2d_18[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_410 (Conv2D) (8, 3, 3, 128) 98304 mixed3[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_410 (BatchN (8, 3, 3, 128) 384 conv2d_410[0][0] \n", "__________________________________________________________________________________________________\n", "activation_410 (Activation) (8, 3, 3, 128) 0 batch_normalization_410[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_411 (Conv2D) (8, 3, 3, 128) 114688 activation_410[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_411 (BatchN (8, 3, 3, 128) 384 conv2d_411[0][0] \n", "__________________________________________________________________________________________________\n", "activation_411 (Activation) (8, 3, 3, 128) 0 batch_normalization_411[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_407 (Conv2D) (8, 3, 3, 128) 98304 mixed3[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_412 (Conv2D) (8, 3, 3, 128) 114688 activation_411[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_407 (BatchN (8, 3, 3, 128) 384 conv2d_407[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_412 (BatchN (8, 3, 3, 128) 384 conv2d_412[0][0] \n", "__________________________________________________________________________________________________\n", "activation_407 (Activation) (8, 3, 3, 128) 0 batch_normalization_407[0][0] \n", "__________________________________________________________________________________________________\n", "activation_412 (Activation) (8, 3, 3, 128) 0 batch_normalization_412[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_408 (Conv2D) (8, 3, 3, 128) 114688 activation_407[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_413 (Conv2D) (8, 3, 3, 128) 114688 activation_412[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_408 (BatchN (8, 3, 3, 128) 384 conv2d_408[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_413 (BatchN (8, 3, 3, 128) 384 conv2d_413[0][0] \n", "__________________________________________________________________________________________________\n", "activation_408 (Activation) (8, 3, 3, 128) 0 batch_normalization_408[0][0] \n", "__________________________________________________________________________________________________\n", "activation_413 (Activation) (8, 3, 3, 128) 0 batch_normalization_413[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_39 (AveragePo (8, 3, 3, 768) 0 mixed3[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_406 (Conv2D) (8, 3, 3, 192) 147456 mixed3[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_409 (Conv2D) (8, 3, 3, 192) 172032 activation_408[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_414 (Conv2D) (8, 3, 3, 192) 172032 activation_413[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_415 (Conv2D) (8, 3, 3, 192) 147456 average_pooling2d_39[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_406 (BatchN (8, 3, 3, 192) 576 conv2d_406[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_409 (BatchN (8, 3, 3, 192) 576 conv2d_409[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_414 (BatchN (8, 3, 3, 192) 576 conv2d_414[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_415 (BatchN (8, 3, 3, 192) 576 conv2d_415[0][0] \n", "__________________________________________________________________________________________________\n", "activation_406 (Activation) (8, 3, 3, 192) 0 batch_normalization_406[0][0] \n", "__________________________________________________________________________________________________\n", "activation_409 (Activation) (8, 3, 3, 192) 0 batch_normalization_409[0][0] \n", "__________________________________________________________________________________________________\n", "activation_414 (Activation) (8, 3, 3, 192) 0 batch_normalization_414[0][0] \n", "__________________________________________________________________________________________________\n", "activation_415 (Activation) (8, 3, 3, 192) 0 batch_normalization_415[0][0] \n", "__________________________________________________________________________________________________\n", "mixed4 (Concatenate) (8, 3, 3, 768) 0 activation_406[0][0] \n", " activation_409[0][0] \n", " activation_414[0][0] \n", " activation_415[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_420 (Conv2D) (8, 3, 3, 160) 122880 mixed4[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_420 (BatchN (8, 3, 3, 160) 480 conv2d_420[0][0] \n", "__________________________________________________________________________________________________\n", "activation_420 (Activation) (8, 3, 3, 160) 0 batch_normalization_420[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_421 (Conv2D) (8, 3, 3, 160) 179200 activation_420[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_421 (BatchN (8, 3, 3, 160) 480 conv2d_421[0][0] \n", "__________________________________________________________________________________________________\n", "activation_421 (Activation) (8, 3, 3, 160) 0 batch_normalization_421[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_417 (Conv2D) (8, 3, 3, 160) 122880 mixed4[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_422 (Conv2D) (8, 3, 3, 160) 179200 activation_421[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_417 (BatchN (8, 3, 3, 160) 480 conv2d_417[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_422 (BatchN (8, 3, 3, 160) 480 conv2d_422[0][0] \n", "__________________________________________________________________________________________________\n", "activation_417 (Activation) (8, 3, 3, 160) 0 batch_normalization_417[0][0] \n", "__________________________________________________________________________________________________\n", "activation_422 (Activation) (8, 3, 3, 160) 0 batch_normalization_422[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_418 (Conv2D) (8, 3, 3, 160) 179200 activation_417[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_423 (Conv2D) (8, 3, 3, 160) 179200 activation_422[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_418 (BatchN (8, 3, 3, 160) 480 conv2d_418[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_423 (BatchN (8, 3, 3, 160) 480 conv2d_423[0][0] \n", "__________________________________________________________________________________________________\n", "activation_418 (Activation) (8, 3, 3, 160) 0 batch_normalization_418[0][0] \n", "__________________________________________________________________________________________________\n", "activation_423 (Activation) (8, 3, 3, 160) 0 batch_normalization_423[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_40 (AveragePo (8, 3, 3, 768) 0 mixed4[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_416 (Conv2D) (8, 3, 3, 192) 147456 mixed4[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_419 (Conv2D) (8, 3, 3, 192) 215040 activation_418[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_424 (Conv2D) (8, 3, 3, 192) 215040 activation_423[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_425 (Conv2D) (8, 3, 3, 192) 147456 average_pooling2d_40[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_416 (BatchN (8, 3, 3, 192) 576 conv2d_416[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_419 (BatchN (8, 3, 3, 192) 576 conv2d_419[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_424 (BatchN (8, 3, 3, 192) 576 conv2d_424[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_425 (BatchN (8, 3, 3, 192) 576 conv2d_425[0][0] \n", "__________________________________________________________________________________________________\n", "activation_416 (Activation) (8, 3, 3, 192) 0 batch_normalization_416[0][0] \n", "__________________________________________________________________________________________________\n", "activation_419 (Activation) (8, 3, 3, 192) 0 batch_normalization_419[0][0] \n", "__________________________________________________________________________________________________\n", "activation_424 (Activation) (8, 3, 3, 192) 0 batch_normalization_424[0][0] \n", "__________________________________________________________________________________________________\n", "activation_425 (Activation) (8, 3, 3, 192) 0 batch_normalization_425[0][0] \n", "__________________________________________________________________________________________________\n", "mixed5 (Concatenate) (8, 3, 3, 768) 0 activation_416[0][0] \n", " activation_419[0][0] \n", " activation_424[0][0] \n", " activation_425[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_430 (Conv2D) (8, 3, 3, 160) 122880 mixed5[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_430 (BatchN (8, 3, 3, 160) 480 conv2d_430[0][0] \n", "__________________________________________________________________________________________________\n", "activation_430 (Activation) (8, 3, 3, 160) 0 batch_normalization_430[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_431 (Conv2D) (8, 3, 3, 160) 179200 activation_430[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_431 (BatchN (8, 3, 3, 160) 480 conv2d_431[0][0] \n", "__________________________________________________________________________________________________\n", "activation_431 (Activation) (8, 3, 3, 160) 0 batch_normalization_431[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_427 (Conv2D) (8, 3, 3, 160) 122880 mixed5[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_432 (Conv2D) (8, 3, 3, 160) 179200 activation_431[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_427 (BatchN (8, 3, 3, 160) 480 conv2d_427[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_432 (BatchN (8, 3, 3, 160) 480 conv2d_432[0][0] \n", "__________________________________________________________________________________________________\n", "activation_427 (Activation) (8, 3, 3, 160) 0 batch_normalization_427[0][0] \n", "__________________________________________________________________________________________________\n", "activation_432 (Activation) (8, 3, 3, 160) 0 batch_normalization_432[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_428 (Conv2D) (8, 3, 3, 160) 179200 activation_427[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_433 (Conv2D) (8, 3, 3, 160) 179200 activation_432[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_428 (BatchN (8, 3, 3, 160) 480 conv2d_428[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_433 (BatchN (8, 3, 3, 160) 480 conv2d_433[0][0] \n", "__________________________________________________________________________________________________\n", "activation_428 (Activation) (8, 3, 3, 160) 0 batch_normalization_428[0][0] \n", "__________________________________________________________________________________________________\n", "activation_433 (Activation) (8, 3, 3, 160) 0 batch_normalization_433[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_41 (AveragePo (8, 3, 3, 768) 0 mixed5[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_426 (Conv2D) (8, 3, 3, 192) 147456 mixed5[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_429 (Conv2D) (8, 3, 3, 192) 215040 activation_428[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_434 (Conv2D) (8, 3, 3, 192) 215040 activation_433[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_435 (Conv2D) (8, 3, 3, 192) 147456 average_pooling2d_41[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_426 (BatchN (8, 3, 3, 192) 576 conv2d_426[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_429 (BatchN (8, 3, 3, 192) 576 conv2d_429[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_434 (BatchN (8, 3, 3, 192) 576 conv2d_434[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_435 (BatchN (8, 3, 3, 192) 576 conv2d_435[0][0] \n", "__________________________________________________________________________________________________\n", "activation_426 (Activation) (8, 3, 3, 192) 0 batch_normalization_426[0][0] \n", "__________________________________________________________________________________________________\n", "activation_429 (Activation) (8, 3, 3, 192) 0 batch_normalization_429[0][0] \n", "__________________________________________________________________________________________________\n", "activation_434 (Activation) (8, 3, 3, 192) 0 batch_normalization_434[0][0] \n", "__________________________________________________________________________________________________\n", "activation_435 (Activation) (8, 3, 3, 192) 0 batch_normalization_435[0][0] \n", "__________________________________________________________________________________________________\n", "mixed6 (Concatenate) (8, 3, 3, 768) 0 activation_426[0][0] \n", " activation_429[0][0] \n", " activation_434[0][0] \n", " activation_435[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_440 (Conv2D) (8, 3, 3, 192) 147456 mixed6[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_440 (BatchN (8, 3, 3, 192) 576 conv2d_440[0][0] \n", "__________________________________________________________________________________________________\n", "activation_440 (Activation) (8, 3, 3, 192) 0 batch_normalization_440[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_441 (Conv2D) (8, 3, 3, 192) 258048 activation_440[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_441 (BatchN (8, 3, 3, 192) 576 conv2d_441[0][0] \n", "__________________________________________________________________________________________________\n", "activation_441 (Activation) (8, 3, 3, 192) 0 batch_normalization_441[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_437 (Conv2D) (8, 3, 3, 192) 147456 mixed6[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_442 (Conv2D) (8, 3, 3, 192) 258048 activation_441[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_437 (BatchN (8, 3, 3, 192) 576 conv2d_437[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_442 (BatchN (8, 3, 3, 192) 576 conv2d_442[0][0] \n", "__________________________________________________________________________________________________\n", "activation_437 (Activation) (8, 3, 3, 192) 0 batch_normalization_437[0][0] \n", "__________________________________________________________________________________________________\n", "activation_442 (Activation) (8, 3, 3, 192) 0 batch_normalization_442[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_438 (Conv2D) (8, 3, 3, 192) 258048 activation_437[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_443 (Conv2D) (8, 3, 3, 192) 258048 activation_442[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_438 (BatchN (8, 3, 3, 192) 576 conv2d_438[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_443 (BatchN (8, 3, 3, 192) 576 conv2d_443[0][0] \n", "__________________________________________________________________________________________________\n", "activation_438 (Activation) (8, 3, 3, 192) 0 batch_normalization_438[0][0] \n", "__________________________________________________________________________________________________\n", "activation_443 (Activation) (8, 3, 3, 192) 0 batch_normalization_443[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_42 (AveragePo (8, 3, 3, 768) 0 mixed6[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_436 (Conv2D) (8, 3, 3, 192) 147456 mixed6[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_439 (Conv2D) (8, 3, 3, 192) 258048 activation_438[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_444 (Conv2D) (8, 3, 3, 192) 258048 activation_443[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_445 (Conv2D) (8, 3, 3, 192) 147456 average_pooling2d_42[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_436 (BatchN (8, 3, 3, 192) 576 conv2d_436[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_439 (BatchN (8, 3, 3, 192) 576 conv2d_439[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_444 (BatchN (8, 3, 3, 192) 576 conv2d_444[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_445 (BatchN (8, 3, 3, 192) 576 conv2d_445[0][0] \n", "__________________________________________________________________________________________________\n", "activation_436 (Activation) (8, 3, 3, 192) 0 batch_normalization_436[0][0] \n", "__________________________________________________________________________________________________\n", "activation_439 (Activation) (8, 3, 3, 192) 0 batch_normalization_439[0][0] \n", "__________________________________________________________________________________________________\n", "activation_444 (Activation) (8, 3, 3, 192) 0 batch_normalization_444[0][0] \n", "__________________________________________________________________________________________________\n", "activation_445 (Activation) (8, 3, 3, 192) 0 batch_normalization_445[0][0] \n", "__________________________________________________________________________________________________\n", "mixed7 (Concatenate) (8, 3, 3, 768) 0 activation_436[0][0] \n", " activation_439[0][0] \n", " activation_444[0][0] \n", " activation_445[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_448 (Conv2D) (8, 3, 3, 192) 147456 mixed7[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_448 (BatchN (8, 3, 3, 192) 576 conv2d_448[0][0] \n", "__________________________________________________________________________________________________\n", "activation_448 (Activation) (8, 3, 3, 192) 0 batch_normalization_448[0][0] \n", "__________________________________________________________________________________________________\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "conv2d_449 (Conv2D) (8, 3, 3, 192) 258048 activation_448[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_449 (BatchN (8, 3, 3, 192) 576 conv2d_449[0][0] \n", "__________________________________________________________________________________________________\n", "activation_449 (Activation) (8, 3, 3, 192) 0 batch_normalization_449[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_446 (Conv2D) (8, 3, 3, 192) 147456 mixed7[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_450 (Conv2D) (8, 3, 3, 192) 258048 activation_449[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_446 (BatchN (8, 3, 3, 192) 576 conv2d_446[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_450 (BatchN (8, 3, 3, 192) 576 conv2d_450[0][0] \n", "__________________________________________________________________________________________________\n", "activation_446 (Activation) (8, 3, 3, 192) 0 batch_normalization_446[0][0] \n", "__________________________________________________________________________________________________\n", "activation_450 (Activation) (8, 3, 3, 192) 0 batch_normalization_450[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_447 (Conv2D) (8, 1, 1, 320) 552960 activation_446[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_451 (Conv2D) (8, 1, 1, 192) 331776 activation_450[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_447 (BatchN (8, 1, 1, 320) 960 conv2d_447[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_451 (BatchN (8, 1, 1, 192) 576 conv2d_451[0][0] \n", "__________________________________________________________________________________________________\n", "activation_447 (Activation) (8, 1, 1, 320) 0 batch_normalization_447[0][0] \n", "__________________________________________________________________________________________________\n", "activation_451 (Activation) (8, 1, 1, 192) 0 batch_normalization_451[0][0] \n", "__________________________________________________________________________________________________\n", "max_pooling2d_19 (MaxPooling2D) (8, 1, 1, 768) 0 mixed7[0][0] \n", "__________________________________________________________________________________________________\n", "mixed8 (Concatenate) (8, 1, 1, 1280) 0 activation_447[0][0] \n", " activation_451[0][0] \n", " max_pooling2d_19[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_456 (Conv2D) (8, 1, 1, 448) 573440 mixed8[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_456 (BatchN (8, 1, 1, 448) 1344 conv2d_456[0][0] \n", "__________________________________________________________________________________________________\n", "activation_456 (Activation) (8, 1, 1, 448) 0 batch_normalization_456[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_453 (Conv2D) (8, 1, 1, 384) 491520 mixed8[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_457 (Conv2D) (8, 1, 1, 384) 1548288 activation_456[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_453 (BatchN (8, 1, 1, 384) 1152 conv2d_453[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_457 (BatchN (8, 1, 1, 384) 1152 conv2d_457[0][0] \n", "__________________________________________________________________________________________________\n", "activation_453 (Activation) (8, 1, 1, 384) 0 batch_normalization_453[0][0] \n", "__________________________________________________________________________________________________\n", "activation_457 (Activation) (8, 1, 1, 384) 0 batch_normalization_457[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_454 (Conv2D) (8, 1, 1, 384) 442368 activation_453[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_455 (Conv2D) (8, 1, 1, 384) 442368 activation_453[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_458 (Conv2D) (8, 1, 1, 384) 442368 activation_457[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_459 (Conv2D) (8, 1, 1, 384) 442368 activation_457[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_43 (AveragePo (8, 1, 1, 1280) 0 mixed8[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_452 (Conv2D) (8, 1, 1, 320) 409600 mixed8[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_454 (BatchN (8, 1, 1, 384) 1152 conv2d_454[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_455 (BatchN (8, 1, 1, 384) 1152 conv2d_455[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_458 (BatchN (8, 1, 1, 384) 1152 conv2d_458[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_459 (BatchN (8, 1, 1, 384) 1152 conv2d_459[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_460 (Conv2D) (8, 1, 1, 192) 245760 average_pooling2d_43[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_452 (BatchN (8, 1, 1, 320) 960 conv2d_452[0][0] \n", "__________________________________________________________________________________________________\n", "activation_454 (Activation) (8, 1, 1, 384) 0 batch_normalization_454[0][0] \n", "__________________________________________________________________________________________________\n", "activation_455 (Activation) (8, 1, 1, 384) 0 batch_normalization_455[0][0] \n", "__________________________________________________________________________________________________\n", "activation_458 (Activation) (8, 1, 1, 384) 0 batch_normalization_458[0][0] \n", "__________________________________________________________________________________________________\n", "activation_459 (Activation) (8, 1, 1, 384) 0 batch_normalization_459[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_460 (BatchN (8, 1, 1, 192) 576 conv2d_460[0][0] \n", "__________________________________________________________________________________________________\n", "activation_452 (Activation) (8, 1, 1, 320) 0 batch_normalization_452[0][0] \n", "__________________________________________________________________________________________________\n", "mixed9_0 (Concatenate) (8, 1, 1, 768) 0 activation_454[0][0] \n", " activation_455[0][0] \n", "__________________________________________________________________________________________________\n", "concatenate_8 (Concatenate) (8, 1, 1, 768) 0 activation_458[0][0] \n", " activation_459[0][0] \n", "__________________________________________________________________________________________________\n", "activation_460 (Activation) (8, 1, 1, 192) 0 batch_normalization_460[0][0] \n", "__________________________________________________________________________________________________\n", "mixed9 (Concatenate) (8, 1, 1, 2048) 0 activation_452[0][0] \n", " mixed9_0[0][0] \n", " concatenate_8[0][0] \n", " activation_460[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_465 (Conv2D) (8, 1, 1, 448) 917504 mixed9[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_465 (BatchN (8, 1, 1, 448) 1344 conv2d_465[0][0] \n", "__________________________________________________________________________________________________\n", "activation_465 (Activation) (8, 1, 1, 448) 0 batch_normalization_465[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_462 (Conv2D) (8, 1, 1, 384) 786432 mixed9[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_466 (Conv2D) (8, 1, 1, 384) 1548288 activation_465[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_462 (BatchN (8, 1, 1, 384) 1152 conv2d_462[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_466 (BatchN (8, 1, 1, 384) 1152 conv2d_466[0][0] \n", "__________________________________________________________________________________________________\n", "activation_462 (Activation) (8, 1, 1, 384) 0 batch_normalization_462[0][0] \n", "__________________________________________________________________________________________________\n", "activation_466 (Activation) (8, 1, 1, 384) 0 batch_normalization_466[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_463 (Conv2D) (8, 1, 1, 384) 442368 activation_462[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_464 (Conv2D) (8, 1, 1, 384) 442368 activation_462[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_467 (Conv2D) (8, 1, 1, 384) 442368 activation_466[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_468 (Conv2D) (8, 1, 1, 384) 442368 activation_466[0][0] \n", "__________________________________________________________________________________________________\n", "average_pooling2d_44 (AveragePo (8, 1, 1, 2048) 0 mixed9[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_461 (Conv2D) (8, 1, 1, 320) 655360 mixed9[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_463 (BatchN (8, 1, 1, 384) 1152 conv2d_463[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_464 (BatchN (8, 1, 1, 384) 1152 conv2d_464[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_467 (BatchN (8, 1, 1, 384) 1152 conv2d_467[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_468 (BatchN (8, 1, 1, 384) 1152 conv2d_468[0][0] \n", "__________________________________________________________________________________________________\n", "conv2d_469 (Conv2D) (8, 1, 1, 192) 393216 average_pooling2d_44[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_461 (BatchN (8, 1, 1, 320) 960 conv2d_461[0][0] \n", "__________________________________________________________________________________________________\n", "activation_463 (Activation) (8, 1, 1, 384) 0 batch_normalization_463[0][0] \n", "__________________________________________________________________________________________________\n", "activation_464 (Activation) (8, 1, 1, 384) 0 batch_normalization_464[0][0] \n", "__________________________________________________________________________________________________\n", "activation_467 (Activation) (8, 1, 1, 384) 0 batch_normalization_467[0][0] \n", "__________________________________________________________________________________________________\n", "activation_468 (Activation) (8, 1, 1, 384) 0 batch_normalization_468[0][0] \n", "__________________________________________________________________________________________________\n", "batch_normalization_469 (BatchN (8, 1, 1, 192) 576 conv2d_469[0][0] \n", "__________________________________________________________________________________________________\n", "activation_461 (Activation) (8, 1, 1, 320) 0 batch_normalization_461[0][0] \n", "__________________________________________________________________________________________________\n", "mixed9_1 (Concatenate) (8, 1, 1, 768) 0 activation_463[0][0] \n", " activation_464[0][0] \n", "__________________________________________________________________________________________________\n", "concatenate_9 (Concatenate) (8, 1, 1, 768) 0 activation_467[0][0] \n", " activation_468[0][0] \n", "__________________________________________________________________________________________________\n", "activation_469 (Activation) (8, 1, 1, 192) 0 batch_normalization_469[0][0] \n", "__________________________________________________________________________________________________\n", "mixed10 (Concatenate) (8, 1, 1, 2048) 0 activation_461[0][0] \n", " mixed9_1[0][0] \n", " concatenate_9[0][0] \n", " activation_469[0][0] \n", "__________________________________________________________________________________________________\n", "flatten_2 (Flatten) (8, 2048) 0 mixed10[0][0] \n", "__________________________________________________________________________________________________\n", "dense_4 (Dense) (8, 64) 131136 flatten_2[0][0] \n", "__________________________________________________________________________________________________\n", "dropout_2 (Dropout) (8, 64) 0 dense_4[0][0] \n", "__________________________________________________________________________________________________\n", "dense_5 (Dense) (8, 2) 130 dropout_2[0][0] \n", "__________________________________________________________________________________________________\n", "flatten_3 (Flatten) (8, 2) 0 dense_5[0][0] \n", "__________________________________________________________________________________________________\n", "dense_6 (Dense) (8, 64) 192 flatten_3[0][0] \n", "__________________________________________________________________________________________________\n", "dropout_3 (Dropout) (8, 64) 0 dense_6[0][0] \n", "__________________________________________________________________________________________________\n", "dense_7 (Dense) (8, 2) 130 dropout_3[0][0] \n", "__________________________________________________________________________________________________\n", "flatten_4 (Flatten) (8, 2) 0 dense_7[0][0] \n", "__________________________________________________________________________________________________\n", "dense_8 (Dense) (8, 64) 192 flatten_4[0][0] \n", "__________________________________________________________________________________________________\n", "dropout_4 (Dropout) (8, 64) 0 dense_8[0][0] \n", "__________________________________________________________________________________________________\n", "dense_9 (Dense) (8, 2) 130 dropout_4[0][0] \n", "__________________________________________________________________________________________________\n", "flatten_5 (Flatten) (8, 2) 0 dense_9[0][0] \n", "__________________________________________________________________________________________________\n", "dense_10 (Dense) (8, 64) 192 flatten_5[0][0] \n", "__________________________________________________________________________________________________\n", "dropout_5 (Dropout) (8, 64) 0 dense_10[0][0] \n", "__________________________________________________________________________________________________\n", "dense_11 (Dense) (8, 2) 130 dropout_5[0][0] \n", "__________________________________________________________________________________________________\n", "flatten_6 (Flatten) (8, 2) 0 dense_11[0][0] \n", "__________________________________________________________________________________________________\n", "dense_12 (Dense) (8, 64) 192 flatten_6[0][0] \n", "__________________________________________________________________________________________________\n", "dropout_6 (Dropout) (8, 64) 0 dense_12[0][0] \n", "__________________________________________________________________________________________________\n", "dense_13 (Dense) (8, 2) 130 dropout_6[0][0] \n", "__________________________________________________________________________________________________\n", "flatten_7 (Flatten) (8, 2) 0 dense_13[0][0] \n", "__________________________________________________________________________________________________\n", "dense_14 (Dense) (8, 64) 192 flatten_7[0][0] \n", "__________________________________________________________________________________________________\n", "dropout_7 (Dropout) (8, 64) 0 dense_14[0][0] \n", "__________________________________________________________________________________________________\n", "dense_15 (Dense) (8, 2) 130 dropout_7[0][0] \n", "==================================================================================================\n", "Total params: 21,935,660\n", "Trainable params: 132,876\n", "Non-trainable params: 21,802,784\n", "__________________________________________________________________________________________________\n" ] } ], "source": [ "model.summary()" ] }, { "cell_type": "code", "execution_count": 43, "id": "225b6a54", "metadata": {}, "outputs": [], "source": [ "from tensorflow.keras.callbacks import ModelCheckpoint,EarlyStopping, ReduceLROnPlateau" ] }, { "cell_type": "code", "execution_count": 47, "id": "c0fbd7f6", "metadata": {}, "outputs": [], "source": [ "checkpoint = ModelCheckpoint(r'E:\\study\\Sem-7\\finalProject\\Drivers Drowsiness Detection using Deep Learning\\Models',\n", " monitor = 'val_loss',save_best_only = True,verbose = 3)" ] }, { "cell_type": "code", "execution_count": 53, "id": "5c998a2c", "metadata": {}, "outputs": [], "source": [ "earlystop = EarlyStopping(monitor = 'val_loss' , patience=7,verbose = 3, restore_best_weights=True)\n", "learning_rate= ReduceLROnPlateau(monitor='val_loss',patience=3,verbose=3)" ] }, { "cell_type": "code", "execution_count": 55, "id": "e658c8c1", "metadata": {}, "outputs": [], "source": [ "callbacks=[checkpoint,earlystop,learning_rate]\n" ] }, { "cell_type": "code", "execution_count": 56, "id": "99ab0e6b", "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "C:\\ProgramData\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py:1940: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n", " warnings.warn('`Model.fit_generator` is deprecated and '\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/50\n", "7641/7641 [==============================] - 2656s 347ms/step - loss: 0.4920 - accuracy: 0.7714 - val_loss: 0.4017 - val_accuracy: 0.8552\n", "\n", "Epoch 00001: val_loss improved from inf to 0.40173, saving model to E:\\study\\Sem-7\\finalProject\\Drivers Drowsiness Detection using Deep Learning\\Models\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "WARNING:absl:Found untraced functions such as softmax_2_layer_call_fn, softmax_2_layer_call_and_return_conditional_losses, softmax_3_layer_call_fn, softmax_3_layer_call_and_return_conditional_losses, softmax_4_layer_call_fn while saving (showing 5 of 30). These functions will not be directly callable after loading.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "INFO:tensorflow:Assets written to: E:\\study\\Sem-7\\finalProject\\Drivers Drowsiness Detection using Deep Learning\\Models\\assets\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "INFO:tensorflow:Assets written to: E:\\study\\Sem-7\\finalProject\\Drivers Drowsiness Detection using Deep Learning\\Models\\assets\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 2/50\n", "7641/7641 [==============================] - 2455s 321ms/step - loss: 0.4202 - accuracy: 0.8507 - val_loss: 0.4206 - val_accuracy: 0.8512\n", "\n", "Epoch 00002: val_loss did not improve from 0.40173\n", "Epoch 3/50\n", "7641/7641 [==============================] - 3070s 402ms/step - loss: 0.3943 - accuracy: 0.8605 - val_loss: 0.3772 - val_accuracy: 0.8706\n", "\n", "Epoch 00003: val_loss improved from 0.40173 to 0.37724, saving model to E:\\study\\Sem-7\\finalProject\\Drivers Drowsiness Detection using Deep Learning\\Models\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "WARNING:absl:Found untraced functions such as softmax_2_layer_call_fn, softmax_2_layer_call_and_return_conditional_losses, softmax_3_layer_call_fn, softmax_3_layer_call_and_return_conditional_losses, softmax_4_layer_call_fn while saving (showing 5 of 30). These functions will not be directly callable after loading.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "INFO:tensorflow:Assets written to: E:\\study\\Sem-7\\finalProject\\Drivers Drowsiness Detection using Deep Learning\\Models\\assets\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "INFO:tensorflow:Assets written to: E:\\study\\Sem-7\\finalProject\\Drivers Drowsiness Detection using Deep Learning\\Models\\assets\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 4/50\n", " 577/7641 [=>............................] - ETA: 59:35 - loss: 0.3983 - accuracy: 0.8681" ] }, { "ename": "KeyboardInterrupt", "evalue": "", "output_type": "error", "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", "\u001b[1;32m~\\AppData\\Local\\Temp/ipykernel_948/567684715.py\u001b[0m in \u001b[0;36m\u001b[1;34m\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[0mmodel\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mcompile\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0moptimizer\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;34m'Adam'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mloss\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;34m'categorical_crossentropy'\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mmetrics\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m'accuracy'\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 3\u001b[1;33m model.fit_generator(train_data,steps_per_epoch=train_data.samples//batchsize,\n\u001b[0m\u001b[0;32m 4\u001b[0m \u001b[0mvalidation_data\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mvalidation_data\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[0mvalidation_steps\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mvalidation_data\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msamples\u001b[0m\u001b[1;33m//\u001b[0m\u001b[0mbatchsize\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py\u001b[0m in \u001b[0;36mfit_generator\u001b[1;34m(self, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, validation_freq, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch)\u001b[0m\n\u001b[0;32m 1941\u001b[0m \u001b[1;34m'will be removed in a future version. '\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1942\u001b[0m 'Please use `Model.fit`, which supports generators.')\n\u001b[1;32m-> 1943\u001b[1;33m return self.fit(\n\u001b[0m\u001b[0;32m 1944\u001b[0m \u001b[0mgenerator\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1945\u001b[0m \u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\keras\\engine\\training.py\u001b[0m in \u001b[0;36mfit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)\u001b[0m\n\u001b[0;32m 1181\u001b[0m _r=1):\n\u001b[0;32m 1182\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mon_train_batch_begin\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mstep\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1183\u001b[1;33m \u001b[0mtmp_logs\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtrain_function\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0miterator\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1184\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mdata_handler\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mshould_sync\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1185\u001b[0m \u001b[0mcontext\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0masync_wait\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 887\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 888\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0mOptionalXlaContext\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_jit_compile\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 889\u001b[1;33m \u001b[0mresult\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 890\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 891\u001b[0m \u001b[0mnew_tracing_count\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mexperimental_get_tracing_count\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\eager\\def_function.py\u001b[0m in \u001b[0;36m_call\u001b[1;34m(self, *args, **kwds)\u001b[0m\n\u001b[0;32m 915\u001b[0m \u001b[1;31m# In this case we have created variables on the first call, so we run the\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 916\u001b[0m \u001b[1;31m# defunned version which is guaranteed to never create variables.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 917\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_stateless_fn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;33m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;31m# pylint: disable=not-callable\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 918\u001b[0m \u001b[1;32melif\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_stateful_fn\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 919\u001b[0m \u001b[1;31m# Release the lock early so that multiple threads can perform the call\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m 3021\u001b[0m (graph_function,\n\u001b[0;32m 3022\u001b[0m filtered_flat_args) = self._maybe_define_function(args, kwargs)\n\u001b[1;32m-> 3023\u001b[1;33m return graph_function._call_flat(\n\u001b[0m\u001b[0;32m 3024\u001b[0m filtered_flat_args, captured_inputs=graph_function.captured_inputs) # pylint: disable=protected-access\n\u001b[0;32m 3025\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n", "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36m_call_flat\u001b[1;34m(self, args, captured_inputs, cancellation_manager)\u001b[0m\n\u001b[0;32m 1958\u001b[0m and executing_eagerly):\n\u001b[0;32m 1959\u001b[0m \u001b[1;31m# No tape is watching; skip to running the function.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1960\u001b[1;33m return self._build_call_outputs(self._inference_function.call(\n\u001b[0m\u001b[0;32m 1961\u001b[0m ctx, args, cancellation_manager=cancellation_manager))\n\u001b[0;32m 1962\u001b[0m forward_backward = self._select_forward_and_backward_functions(\n", "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\eager\\function.py\u001b[0m in \u001b[0;36mcall\u001b[1;34m(self, ctx, args, cancellation_manager)\u001b[0m\n\u001b[0;32m 589\u001b[0m \u001b[1;32mwith\u001b[0m \u001b[0m_InterpolateFunctionError\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 590\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mcancellation_manager\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 591\u001b[1;33m outputs = execute.execute(\n\u001b[0m\u001b[0;32m 592\u001b[0m \u001b[0mstr\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msignature\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mname\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 593\u001b[0m \u001b[0mnum_outputs\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_num_outputs\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\eager\\execute.py\u001b[0m in \u001b[0;36mquick_execute\u001b[1;34m(op_name, num_outputs, inputs, attrs, ctx, name)\u001b[0m\n\u001b[0;32m 57\u001b[0m \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 58\u001b[0m \u001b[0mctx\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mensure_initialized\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 59\u001b[1;33m tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name,\n\u001b[0m\u001b[0;32m 60\u001b[0m inputs, attrs, num_outputs)\n\u001b[0;32m 61\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mcore\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_NotOkStatusException\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } ], "source": [ "model.compile(optimizer='Adam', loss='categorical_crossentropy',metrics=['accuracy'])\n", "\n", "model.fit_generator(train_data,steps_per_epoch=train_data.samples//batchsize,\n", " validation_data=validation_data,\n", " validation_steps=validation_data.samples//batchsize,\n", " callbacks=callbacks,\n", " epochs=50)" ] }, { "cell_type": "code", "execution_count": null, "id": "87271003", "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "id": "07d5b272", "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.7" } }, "nbformat": 4, "nbformat_minor": 5 }