Spaces:
Runtime error
Runtime error
Commit
·
e4497d1
1
Parent(s):
74501a0
Add: Source
Browse files- src/__pycache__/config.cpython-38.pyc +0 -0
- src/__pycache__/draw.cpython-38.pyc +0 -0
- src/__pycache__/models.cpython-38.pyc +0 -0
- src/__pycache__/preprocessing.cpython-38.pyc +0 -0
- src/config.py +15 -0
- src/draw.py +21 -0
- src/models.py +77 -0
- src/preprocessing.py +29 -0
src/__pycache__/config.cpython-38.pyc
ADDED
Binary file (508 Bytes). View file
|
|
src/__pycache__/draw.cpython-38.pyc
ADDED
Binary file (820 Bytes). View file
|
|
src/__pycache__/models.cpython-38.pyc
ADDED
Binary file (2.11 kB). View file
|
|
src/__pycache__/preprocessing.cpython-38.pyc
ADDED
Binary file (985 Bytes). View file
|
|
src/config.py
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
DATASET_URL = 'https://download.microsoft.com/download/3/E/1/3E1C3F21-ECDB-4869-8368-6DEBA77B919F/kagglecatsanddogs_3367a.zip'
|
4 |
+
|
5 |
+
CACHE_DIR = os.getcwd()
|
6 |
+
CACHE_SUBDIR = 'data'
|
7 |
+
|
8 |
+
if not os.path.isdir(CACHE_SUBDIR):
|
9 |
+
os.mkdir(CACHE_SUBDIR)
|
10 |
+
|
11 |
+
DATASET_PATH = os.path.join(CACHE_DIR, CACHE_SUBDIR, 'PetImages')
|
12 |
+
|
13 |
+
IMAGE_SIZE = (180, 180)
|
14 |
+
BATCH_SIZE = 32
|
15 |
+
EPOCHS = 50
|
src/draw.py
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
import tensorflow as tf
|
4 |
+
import matplotlib.pyplot as plt
|
5 |
+
|
6 |
+
|
7 |
+
def visualize_data(dataset: tf.data.Dataset, data_augmentation: Optional[tf.keras.Sequential]=None) -> None:
|
8 |
+
plt.figure(figsize=(10, 10))
|
9 |
+
for images, labels in dataset.take(1):
|
10 |
+
for i in range(9):
|
11 |
+
_ = plt.subplot(3, 3, i + 1)
|
12 |
+
|
13 |
+
if data_augmentation != None:
|
14 |
+
augmented_image = data_augmentation(images)
|
15 |
+
plt.imshow(augmented_image[0].numpy().astype('uint8'))
|
16 |
+
else:
|
17 |
+
plt.imshow(images[i].numpy().astype('uint8'))
|
18 |
+
|
19 |
+
plt.title(int(labels[i]))
|
20 |
+
plt.axis('off')
|
21 |
+
plt.show()
|
src/models.py
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Tuple
|
2 |
+
from typing import Optional
|
3 |
+
|
4 |
+
import tensorflow as tf
|
5 |
+
from tensorflow.keras import layers
|
6 |
+
from keras_tuner import HyperModel
|
7 |
+
|
8 |
+
class MakeHyperModel(HyperModel):
|
9 |
+
def __init__(self, input_shape: Tuple[int, int, int], num_classes: int, data_augmentation: Optional[tf.keras.Sequential] = None) -> None:
|
10 |
+
self.input_shape = input_shape
|
11 |
+
self.num_classes = num_classes
|
12 |
+
self.data_augmentation = data_augmentation
|
13 |
+
|
14 |
+
def build(self, hp) -> tf.keras.Model:
|
15 |
+
inputs = tf.keras.Input(shape=self.input_shape)
|
16 |
+
|
17 |
+
if self.data_augmentation != None:
|
18 |
+
x = self.data_augmentation(inputs)
|
19 |
+
else:
|
20 |
+
x = inputs
|
21 |
+
|
22 |
+
x = layers.Rescaling(1.0/255)(x)
|
23 |
+
x = layers.Conv2D(32, 3, strides=2, padding='same')(x)
|
24 |
+
x = layers.BatchNormalization()(x)
|
25 |
+
x = layers.Activation('relu')(x)
|
26 |
+
|
27 |
+
x = layers.Conv2D(64, 3, padding='same')(x)
|
28 |
+
x = layers.BatchNormalization()(x)
|
29 |
+
x = layers.Activation('relu')(x)
|
30 |
+
|
31 |
+
previous_block_activation = x
|
32 |
+
|
33 |
+
for size in [128, 256, 512, 728]:
|
34 |
+
x = layers.Activation('relu')(x)
|
35 |
+
x = layers.SeparableConv2D(size, 3, padding='same')(x)
|
36 |
+
x = layers.BatchNormalization()(x)
|
37 |
+
|
38 |
+
x = layers.Activation("relu")(x)
|
39 |
+
x = layers.SeparableConv2D(size, 3, padding='same')(x)
|
40 |
+
x = layers.BatchNormalization()(x)
|
41 |
+
|
42 |
+
x = layers.MaxPooling2D(3, strides=2, padding='same')(x)
|
43 |
+
|
44 |
+
residual = layers.Conv2D(size, 1, strides=2, padding='same')(previous_block_activation)
|
45 |
+
|
46 |
+
x = layers.add([x, residual])
|
47 |
+
previous_block_activation = x
|
48 |
+
|
49 |
+
x = layers.SeparableConv2D(1024, 3, padding='same')(x)
|
50 |
+
x = layers.BatchNormalization()(x)
|
51 |
+
x = layers.Activation('relu')(x)
|
52 |
+
|
53 |
+
x = layers.GlobalAveragePooling2D()(x)
|
54 |
+
|
55 |
+
if self.num_classes == 2:
|
56 |
+
activation = 'sigmoid'
|
57 |
+
loss_fn = 'binary_crossentropy'
|
58 |
+
units = 1
|
59 |
+
else:
|
60 |
+
activation = 'softmax'
|
61 |
+
loss_fn = 'categorical_crossentropy'
|
62 |
+
units = self.num_classes
|
63 |
+
|
64 |
+
x = layers.Dropout(0.5)(x)
|
65 |
+
outputs = layers.Dense(units, activation=activation)(x)
|
66 |
+
|
67 |
+
model = tf.keras.Model(inputs, outputs)
|
68 |
+
|
69 |
+
model.compile(
|
70 |
+
optimizer=tf.keras.optimizers.Adam(
|
71 |
+
hp.Choice("learning_rate", values=[1e-2, 1e-3, 1e-4])
|
72 |
+
),
|
73 |
+
loss=loss_fn,
|
74 |
+
metrics=['accuracy']
|
75 |
+
)
|
76 |
+
|
77 |
+
return model
|
src/preprocessing.py
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from typing import Tuple
|
3 |
+
|
4 |
+
import tensorflow as tf
|
5 |
+
|
6 |
+
def delete_corrupted_image(dataset_path: str, categories: Tuple[str]) -> int:
|
7 |
+
num_skipped = 0
|
8 |
+
|
9 |
+
for folder_name in categories:
|
10 |
+
folder_path = os.path.join(dataset_path, folder_name)
|
11 |
+
for fname in os.listdir(folder_path):
|
12 |
+
fpath = os.path.join(folder_path, fname)
|
13 |
+
try:
|
14 |
+
fobj = open(fpath, 'rb')
|
15 |
+
is_jfif = tf.compat.as_bytes("JFIF") in fobj.peek(10)
|
16 |
+
finally:
|
17 |
+
fobj.close()
|
18 |
+
|
19 |
+
if not is_jfif:
|
20 |
+
num_skipped += 1
|
21 |
+
os.remove(fpath)
|
22 |
+
|
23 |
+
return num_skipped
|
24 |
+
|
25 |
+
def get_data_augmentation() -> tf.keras.Sequential:
|
26 |
+
return tf.keras.Sequential([
|
27 |
+
tf.keras.layers.RandomFlip('horizontal'),
|
28 |
+
tf.keras.layers.RandomRotation(0.1)
|
29 |
+
])
|