eddydecena commited on
Commit
dbeb7c3
1 Parent(s): 2da2830

First model version

Browse files
Files changed (28) hide show
  1. .gitattributes +1 -1
  2. .gitignore +244 -0
  3. examples/cat1.jpg +0 -0
  4. examples/cat2.jpg +0 -0
  5. examples/dog1.jpeg +0 -0
  6. examples/dog2.jpeg +0 -0
  7. inference.py +26 -0
  8. requirements.txt +6 -0
  9. server.py +37 -0
  10. src/config.py +15 -0
  11. src/draw.py +21 -0
  12. src/models.py +77 -0
  13. src/preprocessing.py +29 -0
  14. train.py +62 -0
  15. tuner_model/cat-vs-dog/oracle.json +3 -0
  16. tuner_model/cat-vs-dog/trial_0484d8d758a5ef7b91ca97d334ba7870/checkpoints/epoch_0/checkpoint +3 -0
  17. tuner_model/cat-vs-dog/trial_0484d8d758a5ef7b91ca97d334ba7870/checkpoints/epoch_0/checkpoint.data-00000-of-00001 +3 -0
  18. tuner_model/cat-vs-dog/trial_0484d8d758a5ef7b91ca97d334ba7870/checkpoints/epoch_0/checkpoint.index +3 -0
  19. tuner_model/cat-vs-dog/trial_0484d8d758a5ef7b91ca97d334ba7870/trial.json +3 -0
  20. tuner_model/cat-vs-dog/trial_7d8a24b4163e3b3211079dbc5be02dac/checkpoints/epoch_0/checkpoint +3 -0
  21. tuner_model/cat-vs-dog/trial_7d8a24b4163e3b3211079dbc5be02dac/checkpoints/epoch_0/checkpoint.data-00000-of-00001 +3 -0
  22. tuner_model/cat-vs-dog/trial_7d8a24b4163e3b3211079dbc5be02dac/checkpoints/epoch_0/checkpoint.index +3 -0
  23. tuner_model/cat-vs-dog/trial_7d8a24b4163e3b3211079dbc5be02dac/trial.json +3 -0
  24. tuner_model/cat-vs-dog/trial_ee38b0cfcac1da6bbf8baa912585407e/checkpoints/epoch_0/checkpoint +3 -0
  25. tuner_model/cat-vs-dog/trial_ee38b0cfcac1da6bbf8baa912585407e/checkpoints/epoch_0/checkpoint.data-00000-of-00001 +3 -0
  26. tuner_model/cat-vs-dog/trial_ee38b0cfcac1da6bbf8baa912585407e/checkpoints/epoch_0/checkpoint.index +3 -0
  27. tuner_model/cat-vs-dog/trial_ee38b0cfcac1da6bbf8baa912585407e/trial.json +3 -0
  28. tuner_model/cat-vs-dog/tuner0.json +3 -0
.gitattributes CHANGED
@@ -17,7 +17,7 @@
17
  *.pt filter=lfs diff=lfs merge=lfs -text
18
  *.pth filter=lfs diff=lfs merge=lfs -text
19
  *.rar filter=lfs diff=lfs merge=lfs -text
20
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
21
  *.tar.* filter=lfs diff=lfs merge=lfs -text
22
  *.tflite filter=lfs diff=lfs merge=lfs -text
23
  *.tgz filter=lfs diff=lfs merge=lfs -text
 
17
  *.pt filter=lfs diff=lfs merge=lfs -text
18
  *.pth filter=lfs diff=lfs merge=lfs -text
19
  *.rar filter=lfs diff=lfs merge=lfs -text
20
+ tuner_model/**/* filter=lfs diff=lfs merge=lfs -text
21
  *.tar.* filter=lfs diff=lfs merge=lfs -text
22
  *.tflite filter=lfs diff=lfs merge=lfs -text
23
  *.tgz filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,244 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ # Created by https://www.toptal.com/developers/gitignore/api/python,virtualenv,linux,windows,macos,git
3
+ # Edit at https://www.toptal.com/developers/gitignore?templates=python,virtualenv,linux,windows,macos,git
4
+
5
+ ### Git ###
6
+ # Created by git for backups. To disable backups in Git:
7
+ # $ git config --global mergetool.keepBackup false
8
+ *.orig
9
+
10
+ # Created by git when using merge tools for conflicts
11
+ *.BACKUP.*
12
+ *.BASE.*
13
+ *.LOCAL.*
14
+ *.REMOTE.*
15
+ *_BACKUP_*.txt
16
+ *_BASE_*.txt
17
+ *_LOCAL_*.txt
18
+ *_REMOTE_*.txt
19
+
20
+ ### Linux ###
21
+ *~
22
+
23
+ # temporary files which can be created if a process still has a handle open of a deleted file
24
+ .fuse_hidden*
25
+
26
+ # KDE directory preferences
27
+ .directory
28
+
29
+ # Linux trash folder which might appear on any partition or disk
30
+ .Trash-*
31
+
32
+ # .nfs files are created when an open file is removed but is still being accessed
33
+ .nfs*
34
+
35
+ ### macOS ###
36
+ # General
37
+ .DS_Store
38
+ .AppleDouble
39
+ .LSOverride
40
+
41
+ # Icon must end with two \r
42
+ Icon
43
+
44
+
45
+ # Thumbnails
46
+ ._*
47
+
48
+ # Files that might appear in the root of a volume
49
+ .DocumentRevisions-V100
50
+ .fseventsd
51
+ .Spotlight-V100
52
+ .TemporaryItems
53
+ .Trashes
54
+ .VolumeIcon.icns
55
+ .com.apple.timemachine.donotpresent
56
+
57
+ # Directories potentially created on remote AFP share
58
+ .AppleDB
59
+ .AppleDesktop
60
+ Network Trash Folder
61
+ Temporary Items
62
+ .apdisk
63
+
64
+ ### Python ###
65
+ # Byte-compiled / optimized / DLL files
66
+ __pycache__/
67
+ *.py[cod]
68
+ *$py.class
69
+
70
+ # C extensions
71
+ *.so
72
+
73
+ # Distribution / packaging
74
+ .Python
75
+ build/
76
+ develop-eggs/
77
+ dist/
78
+ downloads/
79
+ eggs/
80
+ .eggs/
81
+ lib/
82
+ lib64/
83
+ parts/
84
+ sdist/
85
+ var/
86
+ wheels/
87
+ share/python-wheels/
88
+ *.egg-info/
89
+ .installed.cfg
90
+ *.egg
91
+ MANIFEST
92
+
93
+ # PyInstaller
94
+ # Usually these files are written by a python script from a template
95
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
96
+ *.manifest
97
+ *.spec
98
+
99
+ # Installer logs
100
+ pip-log.txt
101
+ pip-delete-this-directory.txt
102
+
103
+ # Unit test / coverage reports
104
+ htmlcov/
105
+ .tox/
106
+ .nox/
107
+ .coverage
108
+ .coverage.*
109
+ .cache
110
+ nosetests.xml
111
+ coverage.xml
112
+ *.cover
113
+ *.py,cover
114
+ .hypothesis/
115
+ .pytest_cache/
116
+ cover/
117
+
118
+ # Translations
119
+ *.mo
120
+ *.pot
121
+
122
+ # Django stuff:
123
+ *.log
124
+ local_settings.py
125
+ db.sqlite3
126
+ db.sqlite3-journal
127
+
128
+ # Flask stuff:
129
+ instance/
130
+ .webassets-cache
131
+
132
+ # Scrapy stuff:
133
+ .scrapy
134
+
135
+ # Sphinx documentation
136
+ docs/_build/
137
+
138
+ # PyBuilder
139
+ .pybuilder/
140
+ target/
141
+
142
+ # Jupyter Notebook
143
+ .ipynb_checkpoints
144
+
145
+ # IPython
146
+ profile_default/
147
+ ipython_config.py
148
+
149
+ # pyenv
150
+ # For a library or package, you might want to ignore these files since the code is
151
+ # intended to run in multiple environments; otherwise, check them in:
152
+ # .python-version
153
+
154
+ # pipenv
155
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
156
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
157
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
158
+ # install all needed dependencies.
159
+ #Pipfile.lock
160
+
161
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow
162
+ __pypackages__/
163
+
164
+ # Celery stuff
165
+ celerybeat-schedule
166
+ celerybeat.pid
167
+
168
+ # SageMath parsed files
169
+ *.sage.py
170
+
171
+ # Environments
172
+ .env
173
+ .venv
174
+ env/
175
+ venv/
176
+ ENV/
177
+ env.bak/
178
+ venv.bak/
179
+
180
+ # Spyder project settings
181
+ .spyderproject
182
+ .spyproject
183
+
184
+ # Rope project settings
185
+ .ropeproject
186
+
187
+ # mkdocs documentation
188
+ /site
189
+
190
+ # mypy
191
+ .mypy_cache/
192
+ .dmypy.json
193
+ dmypy.json
194
+
195
+ # Pyre type checker
196
+ .pyre/
197
+
198
+ # pytype static type analyzer
199
+ .pytype/
200
+
201
+ # Cython debug symbols
202
+ cython_debug/
203
+
204
+ ### VirtualEnv ###
205
+ # Virtualenv
206
+ # http://iamzed.com/2009/05/07/a-primer-on-virtualenv/
207
+ [Bb]in
208
+ [Ii]nclude
209
+ [Ll]ib
210
+ [Ll]ib64
211
+ [Ll]ocal
212
+ [Ss]cripts
213
+ pyvenv.cfg
214
+ pip-selfcheck.json
215
+
216
+ ### Windows ###
217
+ # Windows thumbnail cache files
218
+ Thumbs.db
219
+ Thumbs.db:encryptable
220
+ ehthumbs.db
221
+ ehthumbs_vista.db
222
+
223
+ # Dump file
224
+ *.stackdump
225
+
226
+ # Folder config file
227
+ [Dd]esktop.ini
228
+
229
+ # Recycle Bin used on file shares
230
+ $RECYCLE.BIN/
231
+
232
+ # Windows Installer files
233
+ *.cab
234
+ *.msi
235
+ *.msix
236
+ *.msm
237
+ *.msp
238
+
239
+ # Windows shortcuts
240
+ *.lnk
241
+
242
+ # End of https://www.toptal.com/developers/gitignore/api/python,virtualenv,linux,windows,macos,git
243
+
244
+ data
examples/cat1.jpg ADDED
examples/cat2.jpg ADDED
examples/dog1.jpeg ADDED
examples/dog2.jpeg ADDED
inference.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import tensorflow as tf
2
+ from keras_tuner import HyperParameters
3
+ from src.models import MakeHyperModel
4
+ from src.preprocessing import get_data_augmentation
5
+ from src.config import IMAGE_SIZE
6
+
7
+ data_augmentation = get_data_augmentation()
8
+
9
+ img = tf.keras.preprocessing.image.load_img(
10
+ "examples/cat2.jpg", target_size=IMAGE_SIZE
11
+ )
12
+ img_array = tf.keras.preprocessing.image.img_to_array(img)
13
+ img_array = tf.expand_dims(img_array, 0)
14
+
15
+ latest = tf.train.latest_checkpoint('./tuner_model/cat-vs-dog/trial_0484d8d758a5ef7b91ca97d334ba7870/checkpoints/epoch_0')
16
+ hypermodel = MakeHyperModel(input_shape=IMAGE_SIZE + (3,), num_classes=2, data_augmentation=data_augmentation)
17
+ model = hypermodel.build(hp=HyperParameters())
18
+ model.load_weights(latest).expect_partial()
19
+
20
+
21
+ predictions = model.predict(img_array)
22
+ score = predictions[0]
23
+ print(
24
+ "This image is %.2f percent cat and %.2f percent dog."
25
+ % (100 * (1 - score), 100 * score)
26
+ )
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ tensorflow==2.6.0
2
+ keras-tuner==1.0.4
3
+ matplotlib==3.4.3
4
+ pydot==1.4.2
5
+ pandas==1.3.4
6
+ gradio==2.4.5
server.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import tensorflow as tf
3
+ from keras_tuner import HyperParameters
4
+ from src.models import MakeHyperModel
5
+ from src.preprocessing import get_data_augmentation
6
+
7
+ from src.config import IMAGE_SIZE
8
+
9
+ data_augmentation = get_data_augmentation()
10
+
11
+ latest = tf.train.latest_checkpoint('./tuner_model/cat-vs-dog/trial_0484d8d758a5ef7b91ca97d334ba7870/checkpoints/epoch_0')
12
+ hypermodel = MakeHyperModel(input_shape=IMAGE_SIZE + (3,), num_classes=2, data_augmentation=data_augmentation)
13
+ model = hypermodel.build(hp=HyperParameters())
14
+ model.load_weights(latest).expect_partial()
15
+
16
+ def cat_vs_dog(image):
17
+ img_array = tf.constant(image, dtype=tf.float32)
18
+ img_array = tf.expand_dims(img_array, 0)
19
+ predictions = model.predict(img_array)
20
+ score = predictions[0]
21
+ return {'cat': float((1 - score)), 'dog': float(score)}
22
+
23
+ iface = gr.Interface(
24
+ cat_vs_dog,
25
+ gr.inputs.Image(shape=IMAGE_SIZE),
26
+ gr.outputs.Label(num_top_classes=2),
27
+ capture_session=True,
28
+ interpretation="default",
29
+ examples=[
30
+ ["examples/cat1.jpg"],
31
+ ["examples/cat2.jpg"],
32
+ ["examples/dog1.jpeg"],
33
+ ["examples/dog2.jpeg"]
34
+ ])
35
+
36
+ if __name__ == "__main__":
37
+ iface.launch()
src/config.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ DATASET_URL = 'https://download.microsoft.com/download/3/E/1/3E1C3F21-ECDB-4869-8368-6DEBA77B919F/kagglecatsanddogs_3367a.zip'
4
+
5
+ CACHE_DIR = os.getcwd()
6
+ CACHE_SUBDIR = 'data'
7
+
8
+ if not os.path.isdir(CACHE_SUBDIR):
9
+ os.mkdir(CACHE_SUBDIR)
10
+
11
+ DATASET_PATH = os.path.join(CACHE_DIR, CACHE_SUBDIR, 'PetImages')
12
+
13
+ IMAGE_SIZE = (180, 180)
14
+ BATCH_SIZE = 32
15
+ EPOCHS = 50
src/draw.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional
2
+
3
+ import tensorflow as tf
4
+ import matplotlib.pyplot as plt
5
+
6
+
7
+ def visualize_data(dataset: tf.data.Dataset, data_augmentation: Optional[tf.keras.Sequential]=None) -> None:
8
+ plt.figure(figsize=(10, 10))
9
+ for images, labels in dataset.take(1):
10
+ for i in range(9):
11
+ _ = plt.subplot(3, 3, i + 1)
12
+
13
+ if data_augmentation != None:
14
+ augmented_image = data_augmentation(images)
15
+ plt.imshow(augmented_image[0].numpy().astype('uint8'))
16
+ else:
17
+ plt.imshow(images[i].numpy().astype('uint8'))
18
+
19
+ plt.title(int(labels[i]))
20
+ plt.axis('off')
21
+ plt.show()
src/models.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Tuple
2
+ from typing import Optional
3
+
4
+ import tensorflow as tf
5
+ from tensorflow.keras import layers
6
+ from keras_tuner import HyperModel
7
+
8
+ class MakeHyperModel(HyperModel):
9
+ def __init__(self, input_shape: Tuple[int, int, int], num_classes: int, data_augmentation: Optional[tf.keras.Sequential] = None) -> None:
10
+ self.input_shape = input_shape
11
+ self.num_classes = num_classes
12
+ self.data_augmentation = data_augmentation
13
+
14
+ def build(self, hp) -> tf.keras.Model:
15
+ inputs = tf.keras.Input(shape=self.input_shape)
16
+
17
+ if self.data_augmentation != None:
18
+ x = self.data_augmentation(inputs)
19
+ else:
20
+ x = inputs
21
+
22
+ x = layers.Rescaling(1.0/255)(x)
23
+ x = layers.Conv2D(32, 3, strides=2, padding='same')(x)
24
+ x = layers.BatchNormalization()(x)
25
+ x = layers.Activation('relu')(x)
26
+
27
+ x = layers.Conv2D(64, 3, padding='same')(x)
28
+ x = layers.BatchNormalization()(x)
29
+ x = layers.Activation('relu')(x)
30
+
31
+ previous_block_activation = x
32
+
33
+ for size in [128, 256, 512, 728]:
34
+ x = layers.Activation('relu')(x)
35
+ x = layers.SeparableConv2D(size, 3, padding='same')(x)
36
+ x = layers.BatchNormalization()(x)
37
+
38
+ x = layers.Activation("relu")(x)
39
+ x = layers.SeparableConv2D(size, 3, padding='same')(x)
40
+ x = layers.BatchNormalization()(x)
41
+
42
+ x = layers.MaxPooling2D(3, strides=2, padding='same')(x)
43
+
44
+ residual = layers.Conv2D(size, 1, strides=2, padding='same')(previous_block_activation)
45
+
46
+ x = layers.add([x, residual])
47
+ previous_block_activation = x
48
+
49
+ x = layers.SeparableConv2D(1024, 3, padding='same')(x)
50
+ x = layers.BatchNormalization()(x)
51
+ x = layers.Activation('relu')(x)
52
+
53
+ x = layers.GlobalAveragePooling2D()(x)
54
+
55
+ if self.num_classes == 2:
56
+ activation = 'sigmoid'
57
+ loss_fn = 'binary_crossentropy'
58
+ units = 1
59
+ else:
60
+ activation = 'softmax'
61
+ loss_fn = 'categorical_crossentropy'
62
+ units = self.num_classes
63
+
64
+ x = layers.Dropout(0.5)(x)
65
+ outputs = layers.Dense(units, activation=activation)(x)
66
+
67
+ model = tf.keras.Model(inputs, outputs)
68
+
69
+ model.compile(
70
+ optimizer=tf.keras.optimizers.Adam(
71
+ hp.Choice("learning_rate", values=[1e-2, 1e-3, 1e-4])
72
+ ),
73
+ loss=loss_fn,
74
+ metrics=['accuracy']
75
+ )
76
+
77
+ return model
src/preprocessing.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from typing import Tuple
3
+
4
+ import tensorflow as tf
5
+
6
+ def delete_corrupted_image(dataset_path: str, categories: Tuple[str]) -> int:
7
+ num_skipped = 0
8
+
9
+ for folder_name in categories:
10
+ folder_path = os.path.join(dataset_path, folder_name)
11
+ for fname in os.listdir(folder_path):
12
+ fpath = os.path.join(folder_path, fname)
13
+ try:
14
+ fobj = open(fpath, 'rb')
15
+ is_jfif = tf.compat.as_bytes("JFIF") in fobj.peek(10)
16
+ finally:
17
+ fobj.close()
18
+
19
+ if not is_jfif:
20
+ num_skipped += 1
21
+ os.remove(fpath)
22
+
23
+ return num_skipped
24
+
25
+ def get_data_augmentation() -> tf.keras.Sequential:
26
+ return tf.keras.Sequential([
27
+ tf.keras.layers.RandomFlip('horizontal'),
28
+ tf.keras.layers.RandomRotation(0.1)
29
+ ])
train.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from tensorflow.keras.utils import get_file
2
+ import tensorflow as tf
3
+ from keras_tuner import RandomSearch
4
+ from keras_tuner import Objective
5
+
6
+ from src.preprocessing import delete_corrupted_image
7
+ from src.draw import visualize_data
8
+ from src.preprocessing import get_data_augmentation
9
+ from src.models import MakeHyperModel
10
+
11
+ from src.config import DATASET_URL
12
+ from src.config import CACHE_DIR
13
+ from src.config import CACHE_SUBDIR
14
+ from src.config import DATASET_PATH
15
+ from src.config import IMAGE_SIZE
16
+ from src.config import BATCH_SIZE
17
+ from src.config import EPOCHS
18
+
19
+ get_file(origin=DATASET_URL, extract=True, cache_dir=CACHE_DIR, cache_subdir=CACHE_SUBDIR)
20
+ print(delete_corrupted_image(DATASET_PATH, ('Cat', 'Dog')))
21
+
22
+ train_ds = tf.keras.preprocessing.image_dataset_from_directory(
23
+ DATASET_PATH,
24
+ validation_split=0.2,
25
+ subset='training',
26
+ seed=1337,
27
+ image_size=IMAGE_SIZE,
28
+ batch_size=BATCH_SIZE
29
+ )
30
+
31
+ val_ds = tf.keras.preprocessing.image_dataset_from_directory(
32
+ DATASET_PATH,
33
+ validation_split=0.2,
34
+ subset='validation',
35
+ seed=1337,
36
+ image_size=IMAGE_SIZE,
37
+ batch_size=BATCH_SIZE
38
+ )
39
+
40
+ train_ds = train_ds.prefetch(buffer_size=BATCH_SIZE)
41
+ val_ds = val_ds.prefetch(buffer_size=BATCH_SIZE)
42
+
43
+ data_augmentation = get_data_augmentation()
44
+
45
+ visualize_data(train_ds, data_augmentation=data_augmentation)
46
+
47
+ hypermodel = MakeHyperModel(input_shape=IMAGE_SIZE + (3,), num_classes=2, data_augmentation=data_augmentation)
48
+ tuner = RandomSearch(
49
+ hypermodel,
50
+ objective=Objective("val_accuracy", direction="max"),
51
+ max_trials=3,
52
+ executions_per_trial=2,
53
+ overwrite=True,
54
+ directory='tuner_model',
55
+ project_name='cat-vs-dog'
56
+ )
57
+
58
+ tuner.search_space_summary()
59
+
60
+ tuner.search(train_ds, epochs=EPOCHS, validation_data=val_ds)
61
+
62
+ tuner.get_best_hyperparameters()
tuner_model/cat-vs-dog/oracle.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3c510f2e84cb8c3a174dc13dc711552e795b4ffcd4d471c089c35b24d5ac740
3
+ size 397
tuner_model/cat-vs-dog/trial_0484d8d758a5ef7b91ca97d334ba7870/checkpoints/epoch_0/checkpoint ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02988027faf3f16b4088ee83c2ade14098e8ffb325c23a576cc639dae48aa936
3
+ size 77
tuner_model/cat-vs-dog/trial_0484d8d758a5ef7b91ca97d334ba7870/checkpoints/epoch_0/checkpoint.data-00000-of-00001 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de31100bc5b28acb3fd12d6dee73ceb96ef500f28182f8d9c38b1fa4010ee607
3
+ size 33354255
tuner_model/cat-vs-dog/trial_0484d8d758a5ef7b91ca97d334ba7870/checkpoints/epoch_0/checkpoint.index ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a1d98098058e8affa700b53eddef713ad9972bbd8e402c4790abcce74020e40
3
+ size 15366
tuner_model/cat-vs-dog/trial_0484d8d758a5ef7b91ca97d334ba7870/trial.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:baba34ec678fb7cc21b79b825ec6860dd6005acfaf2fc2d0b1b408a918bf8264
3
+ size 739
tuner_model/cat-vs-dog/trial_7d8a24b4163e3b3211079dbc5be02dac/checkpoints/epoch_0/checkpoint ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02988027faf3f16b4088ee83c2ade14098e8ffb325c23a576cc639dae48aa936
3
+ size 77
tuner_model/cat-vs-dog/trial_7d8a24b4163e3b3211079dbc5be02dac/checkpoints/epoch_0/checkpoint.data-00000-of-00001 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a05b0504c5858cbeb7d2ea62da18609806d4a401a548f4279569b0fced62bff
3
+ size 33354255
tuner_model/cat-vs-dog/trial_7d8a24b4163e3b3211079dbc5be02dac/checkpoints/epoch_0/checkpoint.index ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d987efbc7b9c1ef4e26b0dd054afeb5a2b1c93450bd3904d1301ab51b8d91ec2
3
+ size 15366
tuner_model/cat-vs-dog/trial_7d8a24b4163e3b3211079dbc5be02dac/trial.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eeb6aaf581a6dd09cb4343a00792274e425f7cc666c2213839a16ae36cf47673
3
+ size 738
tuner_model/cat-vs-dog/trial_ee38b0cfcac1da6bbf8baa912585407e/checkpoints/epoch_0/checkpoint ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02988027faf3f16b4088ee83c2ade14098e8ffb325c23a576cc639dae48aa936
3
+ size 77
tuner_model/cat-vs-dog/trial_ee38b0cfcac1da6bbf8baa912585407e/checkpoints/epoch_0/checkpoint.data-00000-of-00001 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8583811f52f9fe6579853e104a56031c8692e11c2fe3b3fe913453f63634287f
3
+ size 33354255
tuner_model/cat-vs-dog/trial_ee38b0cfcac1da6bbf8baa912585407e/checkpoints/epoch_0/checkpoint.index ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca0876c4482e218982c25ab694ed94901bc7d2e9278719a1f5714cb6eb6523e9
3
+ size 15366
tuner_model/cat-vs-dog/trial_ee38b0cfcac1da6bbf8baa912585407e/trial.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3a35ec123a58b1af226a01c82051049340f712d85cda8fc6f595449598c2e87
3
+ size 743
tuner_model/cat-vs-dog/tuner0.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a
3
+ size 2