Kasamuday commited on
Commit
ab3475f
1 Parent(s): c357cc2

Upload Furnitre_tf.ipynb

Browse files
Files changed (1) hide show
  1. Furnitre_tf.ipynb +574 -0
Furnitre_tf.ipynb ADDED
@@ -0,0 +1,574 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "metadata": {
7
+ "id": "146BB11JpfDA"
8
+ },
9
+ "outputs": [],
10
+ "source": [
11
+ "import os"
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "metadata": {
18
+ "id": "42hJEdo_pfDB"
19
+ },
20
+ "outputs": [],
21
+ "source": [
22
+ "CUSTOM_MODEL_NAME = 'my_ssd_mobnet' \n",
23
+ "PRETRAINED_MODEL_NAME = 'ssd_mobilenet_v2_fpnlite_320x320_coco17_tpu-8'\n",
24
+ "PRETRAINED_MODEL_URL = 'http://download.tensorflow.org/models/object_detection/tf2/20200711/ssd_mobilenet_v2_fpnlite_320x320_coco17_tpu-8.tar.gz'\n",
25
+ "TF_RECORD_SCRIPT_NAME = 'generate_tfrecord.py'\n",
26
+ "LABEL_MAP_NAME = 'label_map.pbtxt'"
27
+ ]
28
+ },
29
+ {
30
+ "cell_type": "code",
31
+ "execution_count": null,
32
+ "metadata": {
33
+ "id": "hbPhYVy_pfDB"
34
+ },
35
+ "outputs": [],
36
+ "source": [
37
+ "paths = {\n",
38
+ " 'WORKSPACE_PATH': os.path.join('Tensorflow', 'workspace'),\n",
39
+ " 'SCRIPTS_PATH': os.path.join('Tensorflow','scripts'),\n",
40
+ " 'APIMODEL_PATH': os.path.join('Tensorflow','models'),\n",
41
+ " 'ANNOTATION_PATH': os.path.join('Tensorflow', 'workspace','annotations'),\n",
42
+ " 'IMAGE_PATH': os.path.join('Tensorflow', 'workspace','images'),\n",
43
+ " 'MODEL_PATH': os.path.join('Tensorflow', 'workspace','models'),\n",
44
+ " 'PRETRAINED_MODEL_PATH': os.path.join('Tensorflow', 'workspace','pre-trained-models'),\n",
45
+ " 'CHECKPOINT_PATH': os.path.join('Tensorflow', 'workspace','models',CUSTOM_MODEL_NAME), \n",
46
+ " 'OUTPUT_PATH': os.path.join('Tensorflow', 'workspace','models',CUSTOM_MODEL_NAME, 'export'), \n",
47
+ " 'TFJS_PATH':os.path.join('Tensorflow', 'workspace','models',CUSTOM_MODEL_NAME, 'tfjsexport'), \n",
48
+ " 'TFLITE_PATH':os.path.join('Tensorflow', 'workspace','models',CUSTOM_MODEL_NAME, 'tfliteexport'), \n",
49
+ " 'PROTOC_PATH':os.path.join('Tensorflow','protoc')\n",
50
+ " }"
51
+ ]
52
+ },
53
+ {
54
+ "cell_type": "code",
55
+ "execution_count": null,
56
+ "metadata": {
57
+ "id": "LwhWZMI0pfDC"
58
+ },
59
+ "outputs": [],
60
+ "source": [
61
+ "files = {\n",
62
+ " 'PIPELINE_CONFIG':os.path.join('Tensorflow', 'workspace','models', CUSTOM_MODEL_NAME, 'pipeline.config'),\n",
63
+ " 'TF_RECORD_SCRIPT': os.path.join(paths['SCRIPTS_PATH'], TF_RECORD_SCRIPT_NAME), \n",
64
+ " 'LABELMAP': os.path.join(paths['ANNOTATION_PATH'], LABEL_MAP_NAME)\n",
65
+ "}"
66
+ ]
67
+ },
68
+ {
69
+ "cell_type": "code",
70
+ "execution_count": null,
71
+ "metadata": {
72
+ "id": "HR-TfDGrpfDC"
73
+ },
74
+ "outputs": [],
75
+ "source": [
76
+ "for path in paths.values():\n",
77
+ " if not os.path.exists(path):\n",
78
+ " if os.name == 'posix':\n",
79
+ " !mkdir -p {path}\n",
80
+ " if os.name == 'nt':\n",
81
+ " !mkdir {path}"
82
+ ]
83
+ },
84
+ {
85
+ "cell_type": "code",
86
+ "execution_count": null,
87
+ "metadata": {
88
+ "id": "K-Cmz2edpfDE",
89
+ "scrolled": true
90
+ },
91
+ "outputs": [],
92
+ "source": [
93
+ "if os.name=='nt':\n",
94
+ " !pip install wget\n",
95
+ " import wget"
96
+ ]
97
+ },
98
+ {
99
+ "cell_type": "code",
100
+ "execution_count": null,
101
+ "metadata": {
102
+ "id": "iA1DIq5OpfDE"
103
+ },
104
+ "outputs": [],
105
+ "source": [
106
+ "if not os.path.exists(os.path.join(paths['APIMODEL_PATH'], 'research', 'object_detection')):\n",
107
+ " !git clone https://github.com/tensorflow/models {paths['APIMODEL_PATH']}"
108
+ ]
109
+ },
110
+ {
111
+ "cell_type": "code",
112
+ "execution_count": null,
113
+ "metadata": {
114
+ "id": "rJjMHbnDs3Tv"
115
+ },
116
+ "outputs": [],
117
+ "source": [
118
+ "# Install Tensorflow Object Detection \n",
119
+ "if os.name=='posix': \n",
120
+ " !apt-get install protobuf-compiler\n",
121
+ " !cd Tensorflow/models/research && protoc object_detection/protos/*.proto --python_out=. && cp object_detection/packages/tf2/setup.py . && python -m pip install . \n",
122
+ " \n",
123
+ "if os.name=='nt':\n",
124
+ " url=\"https://github.com/protocolbuffers/protobuf/releases/download/v3.15.6/protoc-3.15.6-win64.zip\"\n",
125
+ " wget.download(url)\n",
126
+ " !move protoc-3.15.6-win64.zip {paths['PROTOC_PATH']}\n",
127
+ " !cd {paths['PROTOC_PATH']} && tar -xf protoc-3.15.6-win64.zip\n",
128
+ " os.environ['PATH'] += os.pathsep + os.path.abspath(os.path.join(paths['PROTOC_PATH'], 'bin')) \n",
129
+ " !cd Tensorflow/models/research && protoc object_detection/protos/*.proto --python_out=. && copy object_detection\\\\packages\\\\tf2\\\\setup.py setup.py && python setup.py build && python setup.py install\n",
130
+ " !cd Tensorflow/models/research/slim && pip install -e . "
131
+ ]
132
+ },
133
+ {
134
+ "cell_type": "code",
135
+ "execution_count": null,
136
+ "metadata": {
137
+ "scrolled": true
138
+ },
139
+ "outputs": [],
140
+ "source": [
141
+ "VERIFICATION_SCRIPT = os.path.join(paths['APIMODEL_PATH'], 'research', 'object_detection', 'builders', 'model_builder_tf2_test.py')\n",
142
+ "# Verify Installation\n",
143
+ "!python {VERIFICATION_SCRIPT}"
144
+ ]
145
+ },
146
+ {
147
+ "cell_type": "code",
148
+ "execution_count": null,
149
+ "metadata": {},
150
+ "outputs": [],
151
+ "source": [
152
+ "import object_detection"
153
+ ]
154
+ },
155
+ {
156
+ "cell_type": "code",
157
+ "execution_count": null,
158
+ "metadata": {
159
+ "scrolled": true
160
+ },
161
+ "outputs": [],
162
+ "source": [
163
+ "!pip list"
164
+ ]
165
+ },
166
+ {
167
+ "cell_type": "code",
168
+ "execution_count": null,
169
+ "metadata": {
170
+ "colab": {
171
+ "base_uri": "https://localhost:8080/"
172
+ },
173
+ "id": "csofht2npfDE",
174
+ "outputId": "ff5471b2-bed2-43f2-959c-327a706527b6"
175
+ },
176
+ "outputs": [],
177
+ "source": [
178
+ "if os.name =='posix':\n",
179
+ " !wget {PRETRAINED_MODEL_URL}\n",
180
+ " !mv {PRETRAINED_MODEL_NAME+'.tar.gz'} {paths['PRETRAINED_MODEL_PATH']}\n",
181
+ " !cd {paths['PRETRAINED_MODEL_PATH']} && tar -zxvf {PRETRAINED_MODEL_NAME+'.tar.gz'}\n",
182
+ "if os.name == 'nt':\n",
183
+ " wget.download(PRETRAINED_MODEL_URL)\n",
184
+ " !move {PRETRAINED_MODEL_NAME+'.tar.gz'} {paths['PRETRAINED_MODEL_PATH']}\n",
185
+ " !cd {paths['PRETRAINED_MODEL_PATH']} && tar -zxvf {PRETRAINED_MODEL_NAME+'.tar.gz'}"
186
+ ]
187
+ },
188
+ {
189
+ "cell_type": "code",
190
+ "execution_count": null,
191
+ "metadata": {
192
+ "id": "p1BVDWo7pfDC"
193
+ },
194
+ "outputs": [],
195
+ "source": [
196
+ "labels = [{'name':'Density1Benign', 'id':1}, {'name':'Density1Malignant', 'id':2}]\n",
197
+ "\n",
198
+ "with open(files['LABELMAP'], 'w') as f:\n",
199
+ " for label in labels:\n",
200
+ " f.write('item { \\n')\n",
201
+ " f.write('\\tname:\\'{}\\'\\n'.format(label['name']))\n",
202
+ " f.write('\\tid:{}\\n'.format(label['id']))\n",
203
+ " f.write('}\\n')"
204
+ ]
205
+ },
206
+ {
207
+ "cell_type": "code",
208
+ "execution_count": null,
209
+ "metadata": {
210
+ "colab": {
211
+ "base_uri": "https://localhost:8080/"
212
+ },
213
+ "id": "KWpb_BVUpfDD",
214
+ "outputId": "56ce2a3f-3933-4ee6-8a9d-d5ec65f7d73c"
215
+ },
216
+ "outputs": [],
217
+ "source": [
218
+ "if not os.path.exists(files['TF_RECORD_SCRIPT']):\n",
219
+ " !git clone https://github.com/nicknochnack/GenerateTFRecord {paths['SCRIPTS_PATH']}"
220
+ ]
221
+ },
222
+ {
223
+ "cell_type": "code",
224
+ "execution_count": null,
225
+ "metadata": {
226
+ "colab": {
227
+ "base_uri": "https://localhost:8080/"
228
+ },
229
+ "id": "UPFToGZqpfDD",
230
+ "outputId": "0ebb456f-aadc-4a1f-96e6-fbfec1923e1c"
231
+ },
232
+ "outputs": [],
233
+ "source": [
234
+ "!python {files['TF_RECORD_SCRIPT']} -x {os.path.join(paths['IMAGE_PATH'], 'train')} -l {files['LABELMAP']} -o {os.path.join(paths['ANNOTATION_PATH'], 'train.record')} \n",
235
+ "!python {files['TF_RECORD_SCRIPT']} -x {os.path.join(paths['IMAGE_PATH'], 'test')} -l {files['LABELMAP']} -o {os.path.join(paths['ANNOTATION_PATH'], 'test.record')} "
236
+ ]
237
+ },
238
+ {
239
+ "cell_type": "code",
240
+ "execution_count": null,
241
+ "metadata": {
242
+ "id": "cOjuTFbwpfDF"
243
+ },
244
+ "outputs": [],
245
+ "source": [
246
+ "if os.name =='posix':\n",
247
+ " !cp {os.path.join(paths['PRETRAINED_MODEL_PATH'], PRETRAINED_MODEL_NAME, 'pipeline.config')} {os.path.join(paths['CHECKPOINT_PATH'])}\n",
248
+ "if os.name == 'nt':\n",
249
+ " !copy {os.path.join(paths['PRETRAINED_MODEL_PATH'], PRETRAINED_MODEL_NAME, 'pipeline.config')} {os.path.join(paths['CHECKPOINT_PATH'])}"
250
+ ]
251
+ },
252
+ {
253
+ "cell_type": "code",
254
+ "execution_count": null,
255
+ "metadata": {
256
+ "id": "Z9hRrO_ppfDF"
257
+ },
258
+ "outputs": [],
259
+ "source": [
260
+ "import tensorflow as tf\n",
261
+ "from object_detection.utils import config_util\n",
262
+ "from object_detection.protos import pipeline_pb2\n",
263
+ "from google.protobuf import text_format"
264
+ ]
265
+ },
266
+ {
267
+ "cell_type": "code",
268
+ "execution_count": null,
269
+ "metadata": {
270
+ "id": "c2A0mn4ipfDF"
271
+ },
272
+ "outputs": [],
273
+ "source": [
274
+ "config = config_util.get_configs_from_pipeline_file(files['PIPELINE_CONFIG'])"
275
+ ]
276
+ },
277
+ {
278
+ "cell_type": "code",
279
+ "execution_count": null,
280
+ "metadata": {
281
+ "colab": {
282
+ "base_uri": "https://localhost:8080/"
283
+ },
284
+ "id": "uQA13-afpfDF",
285
+ "outputId": "907496a4-a39d-4b13-8c2c-e5978ecb1f10"
286
+ },
287
+ "outputs": [],
288
+ "source": [
289
+ "config"
290
+ ]
291
+ },
292
+ {
293
+ "cell_type": "code",
294
+ "execution_count": null,
295
+ "metadata": {
296
+ "id": "9vK5lotDpfDF"
297
+ },
298
+ "outputs": [],
299
+ "source": [
300
+ "pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()\n",
301
+ "with tf.io.gfile.GFile(files['PIPELINE_CONFIG'], \"r\") as f: \n",
302
+ " proto_str = f.read() \n",
303
+ " text_format.Merge(proto_str, pipeline_config) "
304
+ ]
305
+ },
306
+ {
307
+ "cell_type": "code",
308
+ "execution_count": null,
309
+ "metadata": {
310
+ "id": "rP43Ph0JpfDG"
311
+ },
312
+ "outputs": [],
313
+ "source": [
314
+ "pipeline_config.model.ssd.num_classes = len(labels)\n",
315
+ "pipeline_config.train_config.batch_size = 4\n",
316
+ "pipeline_config.train_config.fine_tune_checkpoint = os.path.join(paths['PRETRAINED_MODEL_PATH'], PRETRAINED_MODEL_NAME, 'checkpoint', 'ckpt-0')\n",
317
+ "pipeline_config.train_config.fine_tune_checkpoint_type = \"detection\"\n",
318
+ "pipeline_config.train_input_reader.label_map_path= files['LABELMAP']\n",
319
+ "pipeline_config.train_input_reader.tf_record_input_reader.input_path[:] = [os.path.join(paths['ANNOTATION_PATH'], 'train.record')]\n",
320
+ "pipeline_config.eval_input_reader[0].label_map_path = files['LABELMAP']\n",
321
+ "pipeline_config.eval_input_reader[0].tf_record_input_reader.input_path[:] = [os.path.join(paths['ANNOTATION_PATH'], 'test.record')]"
322
+ ]
323
+ },
324
+ {
325
+ "cell_type": "code",
326
+ "execution_count": null,
327
+ "metadata": {
328
+ "id": "oJvfgwWqpfDG"
329
+ },
330
+ "outputs": [],
331
+ "source": [
332
+ "config_text = text_format.MessageToString(pipeline_config) \n",
333
+ "with tf.io.gfile.GFile(files['PIPELINE_CONFIG'], \"wb\") as f: \n",
334
+ " f.write(config_text) "
335
+ ]
336
+ },
337
+ {
338
+ "cell_type": "code",
339
+ "execution_count": null,
340
+ "metadata": {
341
+ "id": "B-Y2UQmQpfDG"
342
+ },
343
+ "outputs": [],
344
+ "source": [
345
+ "TRAINING_SCRIPT = os.path.join(paths['APIMODEL_PATH'], 'research', 'object_detection', 'model_main_tf2.py')"
346
+ ]
347
+ },
348
+ {
349
+ "cell_type": "code",
350
+ "execution_count": null,
351
+ "metadata": {
352
+ "id": "jMP2XDfQpfDH"
353
+ },
354
+ "outputs": [],
355
+ "source": [
356
+ "command = \"python {} --model_dir={} --pipeline_config_path={} --num_train_steps=2000\".format(TRAINING_SCRIPT, paths['CHECKPOINT_PATH'],files['PIPELINE_CONFIG'])"
357
+ ]
358
+ },
359
+ {
360
+ "cell_type": "code",
361
+ "execution_count": null,
362
+ "metadata": {
363
+ "colab": {
364
+ "base_uri": "https://localhost:8080/"
365
+ },
366
+ "id": "A4OXXi-ApfDH",
367
+ "outputId": "117a0e83-012b-466e-b7a6-ccaa349ac5ab"
368
+ },
369
+ "outputs": [],
370
+ "source": [
371
+ "print(command)"
372
+ ]
373
+ },
374
+ {
375
+ "cell_type": "code",
376
+ "execution_count": null,
377
+ "metadata": {
378
+ "colab": {
379
+ "base_uri": "https://localhost:8080/"
380
+ },
381
+ "id": "i3ZsJR-qpfDH",
382
+ "outputId": "cabec5e1-45e6-4f2f-d9cf-297d9c1d0225"
383
+ },
384
+ "outputs": [],
385
+ "source": [
386
+ "!{command}"
387
+ ]
388
+ },
389
+ {
390
+ "cell_type": "code",
391
+ "execution_count": null,
392
+ "metadata": {
393
+ "id": "80L7-fdPpfDH"
394
+ },
395
+ "outputs": [],
396
+ "source": [
397
+ "command = \"python {} --model_dir={} --pipeline_config_path={} --checkpoint_dir={}\".format(TRAINING_SCRIPT, paths['CHECKPOINT_PATH'],files['PIPELINE_CONFIG'], paths['CHECKPOINT_PATH'])"
398
+ ]
399
+ },
400
+ {
401
+ "cell_type": "code",
402
+ "execution_count": null,
403
+ "metadata": {
404
+ "colab": {
405
+ "base_uri": "https://localhost:8080/"
406
+ },
407
+ "id": "lYsgEPx9pfDH",
408
+ "outputId": "8632d48b-91d2-45d9-bcb8-c1b172bf6eed"
409
+ },
410
+ "outputs": [],
411
+ "source": [
412
+ "print(command)"
413
+ ]
414
+ },
415
+ {
416
+ "cell_type": "code",
417
+ "execution_count": null,
418
+ "metadata": {
419
+ "id": "lqTV2jGBpfDH"
420
+ },
421
+ "outputs": [],
422
+ "source": [
423
+ "!{command}"
424
+ ]
425
+ },
426
+ {
427
+ "cell_type": "code",
428
+ "execution_count": null,
429
+ "metadata": {
430
+ "id": "8TYk4_oIpfDI"
431
+ },
432
+ "outputs": [],
433
+ "source": [
434
+ "import os\n",
435
+ "import tensorflow as tf\n",
436
+ "from object_detection.utils import label_map_util\n",
437
+ "from object_detection.utils import visualization_utils as viz_utils\n",
438
+ "from object_detection.builders import model_builder\n",
439
+ "from object_detection.utils import config_util"
440
+ ]
441
+ },
442
+ {
443
+ "cell_type": "code",
444
+ "execution_count": null,
445
+ "metadata": {
446
+ "id": "tDnQg-cYpfDI"
447
+ },
448
+ "outputs": [],
449
+ "source": [
450
+ "# Load pipeline config and build a detection model\n",
451
+ "configs = config_util.get_configs_from_pipeline_file(files['PIPELINE_CONFIG'])\n",
452
+ "detection_model = model_builder.build(model_config=configs['model'], is_training=False)\n",
453
+ "\n",
454
+ "# Restore checkpoint\n",
455
+ "ckpt = tf.compat.v2.train.Checkpoint(model=detection_model)\n",
456
+ "ckpt.restore(os.path.join(paths['CHECKPOINT_PATH'], 'ckpt-9')).expect_partial()\n",
457
+ "\n",
458
+ "@tf.function\n",
459
+ "def detect_fn(image):\n",
460
+ " image, shapes = detection_model.preprocess(image)\n",
461
+ " prediction_dict = detection_model.predict(image, shapes)\n",
462
+ " detections = detection_model.postprocess(prediction_dict, shapes)\n",
463
+ " return detections"
464
+ ]
465
+ },
466
+ {
467
+ "cell_type": "code",
468
+ "execution_count": null,
469
+ "metadata": {
470
+ "id": "Y_MKiuZ4pfDI"
471
+ },
472
+ "outputs": [],
473
+ "source": [
474
+ "import cv2 \n",
475
+ "import numpy as np\n",
476
+ "from matplotlib import pyplot as plt\n",
477
+ "%matplotlib inline"
478
+ ]
479
+ },
480
+ {
481
+ "cell_type": "code",
482
+ "execution_count": null,
483
+ "metadata": {
484
+ "id": "cBDbIhNapfDI"
485
+ },
486
+ "outputs": [],
487
+ "source": [
488
+ "category_index = label_map_util.create_category_index_from_labelmap(files['LABELMAP'])"
489
+ ]
490
+ },
491
+ {
492
+ "cell_type": "code",
493
+ "execution_count": null,
494
+ "metadata": {
495
+ "id": "Lx3crOhOzITB"
496
+ },
497
+ "outputs": [],
498
+ "source": [
499
+ "IMAGE_PATH = os.path.join(paths['IMAGE_PATH'], 'test', '20587612 (36).png')"
500
+ ]
501
+ },
502
+ {
503
+ "cell_type": "code",
504
+ "execution_count": null,
505
+ "metadata": {
506
+ "colab": {
507
+ "base_uri": "https://localhost:8080/",
508
+ "height": 269
509
+ },
510
+ "id": "Tpzn1SMry1yK",
511
+ "outputId": "c392a2c5-10fe-4fc4-9998-a1d4c7db2bd3"
512
+ },
513
+ "outputs": [],
514
+ "source": [
515
+ "img = cv2.imread(IMAGE_PATH)\n",
516
+ "image_np = np.array(img)\n",
517
+ "\n",
518
+ "input_tensor = tf.convert_to_tensor(np.expand_dims(image_np, 0), dtype=tf.float32)\n",
519
+ "detections = detect_fn(input_tensor)\n",
520
+ "\n",
521
+ "num_detections = int(detections.pop('num_detections'))\n",
522
+ "detections = {key: value[0, :num_detections].numpy()\n",
523
+ " for key, value in detections.items()}\n",
524
+ "detections['num_detections'] = num_detections\n",
525
+ "\n",
526
+ "# detection_classes should be ints.\n",
527
+ "detections['detection_classes'] = detections['detection_classes'].astype(np.int64)\n",
528
+ "\n",
529
+ "label_id_offset = 1\n",
530
+ "image_np_with_detections = image_np.copy()\n",
531
+ "\n",
532
+ "viz_utils.visualize_boxes_and_labels_on_image_array(\n",
533
+ " image_np_with_detections,\n",
534
+ " detections['detection_boxes'],\n",
535
+ " detections['detection_classes']+label_id_offset,\n",
536
+ " detections['detection_scores'],\n",
537
+ " category_index,\n",
538
+ " use_normalized_coordinates=True,\n",
539
+ " max_boxes_to_draw=5,\n",
540
+ " min_score_thresh=.2,\n",
541
+ " agnostic_mode=False)\n",
542
+ "\n",
543
+ "plt.imshow(cv2.cvtColor(image_np_with_detections, cv2.COLOR_BGR2RGB))\n",
544
+ "plt.show()"
545
+ ]
546
+ }
547
+ ],
548
+ "metadata": {
549
+ "accelerator": "GPU",
550
+ "colab": {
551
+ "name": "3. Training and Detection.ipynb",
552
+ "provenance": []
553
+ },
554
+ "kernelspec": {
555
+ "display_name": "hamza1",
556
+ "language": "python",
557
+ "name": "hamza1"
558
+ },
559
+ "language_info": {
560
+ "codemirror_mode": {
561
+ "name": "ipython",
562
+ "version": 3
563
+ },
564
+ "file_extension": ".py",
565
+ "mimetype": "text/x-python",
566
+ "name": "python",
567
+ "nbconvert_exporter": "python",
568
+ "pygments_lexer": "ipython3",
569
+ "version": "3.8.0"
570
+ }
571
+ },
572
+ "nbformat": 4,
573
+ "nbformat_minor": 4
574
+ }