iBrokeTheCode commited on
Commit
67919d4
Β·
1 Parent(s): 6ab520d

chore: Add model service files

Browse files
.devcontainer/api/devcontainer.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "ML Project - API",
3
+ "dockerComposeFile": "../../docker-compose-dev.yml",
4
+ "service": "api",
5
+ "workspaceFolder": "/src",
6
+ "customizations": {
7
+ "vscode": {
8
+ "extensions": ["ms-python.python"]
9
+ }
10
+ },
11
+ "shutdownAction": "none"
12
+ }
.devcontainer/model/devcontainer.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "ML Project - Model",
3
+ "dockerComposeFile": "../../docker-compose-dev.yml",
4
+ "service": "model",
5
+ "workspaceFolder": "/src",
6
+ "customizations": {
7
+ "vscode": {
8
+ "extensions": ["ms-python.python"]
9
+ }
10
+ },
11
+ "shutdownAction": "none"
12
+ }
.devcontainer/ui/devcontainer.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "ML Project - UI",
3
+ "dockerComposeFile": "../../docker-compose-dev.yml",
4
+ "service": "ui",
5
+ "workspaceFolder": "/src",
6
+ "customizations": {
7
+ "vscode": {
8
+ "extensions": ["ms-python.python"]
9
+ }
10
+ },
11
+ "shutdownAction": "none"
12
+ }
.github/workflows/main.yml ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This is a basic workflow to help you get started with Actions
2
+
3
+ name: CI
4
+
5
+ # Controls when the workflow will run
6
+ on:
7
+ # Triggers the workflow on push or pull request events but only for the "master" branch
8
+ push:
9
+ branches: [ "master" ]
10
+ pull_request:
11
+ branches: [ "master" ]
12
+
13
+ # Allows you to run this workflow manually from the Actions tab
14
+ workflow_dispatch:
15
+
16
+ # A workflow run is made up of one or more jobs that can run sequentially or in parallel
17
+ jobs:
18
+ # This workflow contains a single job called "build"
19
+ build:
20
+ # The type of runner that the job will run on
21
+ runs-on: ubuntu-latest
22
+
23
+ # Steps represent a sequence of tasks that will be executed as part of the job
24
+ steps:
25
+ # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
26
+ - uses: actions/checkout@v3
27
+
28
+ - name: Black Check
29
+ uses: RojerGS/python-black-check@master
30
+ with:
31
+ line-length: '88'
32
+
33
+ # Run all the containers specified in docker-compose.yml file
34
+ - name: Test containers build
35
+ run: docker-compose up -d --build
model/Dockerfile ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.8.13 AS base
2
+
3
+ ENV PYTHONPATH=$PYTHONPATH:/src/
4
+
5
+ ADD requirements.txt .
6
+ RUN pip3 install -r requirements.txt
7
+
8
+ ENV PYTHONPATH=$PYTHONPATH:/src/
9
+
10
+ COPY ./ /src/
11
+
12
+ WORKDIR /src
13
+
14
+ FROM base AS test
15
+ RUN ["pytest", "-v", "/src/tests"]
16
+
17
+ FROM base AS build
18
+ ENTRYPOINT ["python3", "/src/ml_service.py"]
19
+
model/__init__.py ADDED
File without changes
model/ml_service.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+ import time
4
+
5
+ import numpy as np
6
+ import redis
7
+ import settings
8
+ from tensorflow.keras.applications import ResNet50
9
+ from tensorflow.keras.applications.resnet50 import decode_predictions, preprocess_input
10
+ from tensorflow.keras.preprocessing import image
11
+
12
+ # Connect to Redis and assign to variable db
13
+ db = redis.Redis(
14
+ host=settings.REDIS_IP, port=settings.REDIS_PORT, db=settings.REDIS_DB_ID
15
+ )
16
+
17
+ # Load ML model
18
+ model = ResNet50(include_top=True, weights="imagenet")
19
+
20
+
21
+ def predict(image_name):
22
+ """
23
+ Load image from the corresponding folder based on the image name
24
+ received, then, run our ML model to get predictions.
25
+
26
+ Parameters
27
+ ----------
28
+ image_name : str
29
+ Image filename.
30
+
31
+ Returns
32
+ -------
33
+ class_name, pred_probability : tuple(str, float)
34
+ Model predicted class as a string and the corresponding confidence
35
+ score as a number.
36
+ """
37
+ class_name = None
38
+ pred_probability = None
39
+
40
+ # Get image path
41
+ image_path = os.path.join(settings.UPLOAD_FOLDER, image_name)
42
+
43
+ # Load image
44
+ img = image.load_img(image_path, target_size=(224, 224))
45
+
46
+ # Apply preprocessing (convert to numpy array, match model input dimensions (including batch) and use the resnet50 preprocessing)
47
+ # Convert Pillow image to np.array
48
+ x = image.img_to_array(img)
49
+
50
+ # Add an extra dimension because the model is expecting as input a batch of images
51
+ x_batch = np.expand_dims(x, axis=0)
52
+
53
+ # Scaled pixels values
54
+ x_batch = preprocess_input(x_batch)
55
+
56
+ # Make predictions
57
+ predictions = model.predict(x_batch)
58
+
59
+ # Get predictions using model methods and decode predictions using resnet50 decode_predictions
60
+ top_pred = decode_predictions(predictions, top=1)[0][0] # imagenet_id, label, score
61
+ _, class_name, pred_probability = top_pred
62
+
63
+ # Convert probabilities to float and round it
64
+ pred_probability = round(float(pred_probability), 4)
65
+
66
+ return class_name, pred_probability
67
+
68
+
69
+ def classify_process():
70
+ """
71
+ Loop indefinitely asking Redis for new jobs.
72
+ When a new job arrives, takes it from the Redis queue, uses the loaded ML
73
+ model to get predictions and stores the results back in Redis using
74
+ the original job ID so other services can see it was processed and access
75
+ the results.
76
+
77
+ Load image from the corresponding folder based on the image name
78
+ received, then, run our ML model to get predictions.
79
+ """
80
+ while True:
81
+ # Take a new job from Redis
82
+ q = db.brpop(settings.REDIS_QUEUE)[1]
83
+
84
+ # Decode the JSON data for the given job
85
+ q = json.loads(q.decode("utf-8"))
86
+
87
+ # Important! Get and keep the original job ID
88
+ job_id = q["id"]
89
+
90
+ # Run the loaded ml model (use the predict() function)
91
+ prediction, score = predict(q["image_name"]) # πŸ‘ˆ Verify image name
92
+
93
+ # Prepare a new JSON with the results
94
+ output = {"prediction": prediction, "score": score}
95
+
96
+ # Store the job results on Redis using the original
97
+ # job ID as the key
98
+ db.set(job_id, json.dumps(output))
99
+
100
+ # Sleep for a bit
101
+ time.sleep(settings.SERVER_SLEEP)
102
+
103
+
104
+ if __name__ == "__main__":
105
+ # Now launch process
106
+ print("Launching ML service...")
107
+ classify_process()
model/requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Pillow==9.0.1
2
+ pytest==7.1.1
3
+ redis==4.1.4
4
+ tensorflow==2.8.0
5
+ protobuf==3.20.0
model/settings.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ # We will store images uploaded by the user on this folder
4
+ UPLOAD_FOLDER = "uploads/"
5
+ os.makedirs(UPLOAD_FOLDER, exist_ok=True)
6
+
7
+ # REDIS
8
+
9
+ # Queue name
10
+ REDIS_QUEUE = "service_queue"
11
+ # Port
12
+ REDIS_PORT = 6379
13
+ # DB Id
14
+ REDIS_DB_ID = 0
15
+ # Host IP
16
+ REDIS_IP = os.getenv("REDIS_IP", "redis")
17
+ # Sleep parameters which manages the interval between requests to our redis queue
18
+ SERVER_SLEEP = 0.05
model/tests/__init__.py ADDED
File without changes
model/tests/dog.jpeg ADDED

Git LFS Details

  • SHA256: 8fb0318ed02f8a28133664be7faa884475cbb961c8f1ee1ff46410e34e38b8b3
  • Pointer size: 130 Bytes
  • Size of remote file: 34.6 kB
model/tests/test_model.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import unittest
2
+
3
+ import ml_service
4
+
5
+
6
+ # πŸ’‘ NOTE Run test with:
7
+ # - python3 -m unittest -vvv tests.test_model
8
+ # - python3 tests/test_model.py
9
+ class TestMLService(unittest.TestCase):
10
+ def test_predict(self):
11
+ ml_service.settings.UPLOAD_FOLDER = "tests"
12
+ class_name, pred_probability = ml_service.predict("dog.jpeg")
13
+ self.assertEqual(class_name, "Eskimo_dog")
14
+ self.assertAlmostEqual(pred_probability, 0.9346, 5)
15
+
16
+
17
+ if __name__ == "__main__":
18
+ unittest.main(verbosity=2)