tebakaja commited on
Commit
b6f0f70
0 Parent(s):

migrate: GCP to Hugging Face

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
.dockerignore ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Development
2
+ /venv
3
+
4
+ Makefile
5
+ .env-example
6
+ Docpet_Backend.code-workspace
7
+
8
+ # Git & Github
9
+ /.github
10
+
11
+ README.md
12
+ .gitignore
13
+
14
+ # Terraform
15
+ /.terraform
16
+ /modules
17
+
18
+ main.tf
19
+ modules.tf
20
+ variables.tf
21
+ .terraform.lock.hcl
22
+
23
+ # Docker
24
+ /deployment
25
+
26
+ .dockerignore
27
+ docker-compose.yml
.env-example ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ DATABASE_PORT=6500
2
+ POSTGRES_PASSWORD=password123
3
+ POSTGRES_USER=postgres
4
+ POSTGRES_DB=docpet
5
+ POSTGRES_HOST=postgres
6
+ POSTGRES_HOSTNAME=127.0.0.1
7
+
8
+ ACCESS_TOKEN_EXPIRES_IN=15
9
+ REFRESH_TOKEN_EXPIRES_IN=60
10
+ JWT_ALGORITHM=RS256
11
+
12
+ CLIENT_ORIGIN=http://localhost:3000
13
+
14
+ JWT_PRIVATE_KEY=LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlCT2dJQkFBSkJBSSs3QnZUS0FWdHVQYzEzbEFkVk94TlVmcWxzMm1SVmlQWlJyVFpjd3l4RVhVRGpNaFZuCi9KVHRsd3h2a281T0pBQ1k3dVE0T09wODdiM3NOU3ZNd2xNQ0F3RUFBUUpBYm5LaENOQ0dOSFZGaHJPQ0RCU0IKdmZ2ckRWUzVpZXAwd2h2SGlBUEdjeWV6bjd0U2RweUZ0NEU0QTNXT3VQOXhqenNjTFZyb1pzRmVMUWlqT1JhUwp3UUloQU84MWl2b21iVGhjRkltTFZPbU16Vk52TGxWTW02WE5iS3B4bGh4TlpUTmhBaUVBbWRISlpGM3haWFE0Cm15QnNCeEhLQ3JqOTF6bVFxU0E4bHUvT1ZNTDNSak1DSVFEbDJxOUdtN0lMbS85b0EyaCtXdnZabGxZUlJPR3oKT21lV2lEclR5MUxaUVFJZ2ZGYUlaUWxMU0tkWjJvdXF4MHdwOWVEejBEWklLVzVWaSt6czdMZHRDdUVDSUVGYwo3d21VZ3pPblpzbnU1clBsTDJjZldLTGhFbWwrUVFzOCtkMFBGdXlnCi0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0t
15
+ JWT_PUBLIC_KEY=LS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS0KTUZ3d0RRWUpLb1pJaHZjTkFRRUJCUUFEU3dBd1NBSkJBSSs3QnZUS0FWdHVQYzEzbEFkVk94TlVmcWxzMm1SVgppUFpSclRaY3d5eEVYVURqTWhWbi9KVHRsd3h2a281T0pBQ1k3dVE0T09wODdiM3NOU3ZNd2xNQ0F3RUFBUT09Ci0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLQ==
.gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ ml_models/*.h5 filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
.github/workflows/pipeline.yaml ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: pipeline
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - main
7
+ tags:
8
+ - '*'
9
+
10
+ jobs:
11
+ backend_deployment:
12
+ name: Backend Deployment
13
+ runs-on: ubuntu-latest
14
+ environment: Production
15
+
16
+ env:
17
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
18
+ SPACE_NAME: docpet_backend_service
19
+ HF_USERNAME: qywok
20
+
21
+ steps:
22
+ - name: Set global directory
23
+ run: git config --global --add safe.directory /github/workspace
24
+
25
+ - uses: actions/checkout@v3
26
+ with:
27
+ persist-credentials: false
28
+ fetch-depth: 1000
29
+
30
+ - name: Check git status
31
+ run: git status
32
+
33
+ - name: Configure git
34
+ run: |
35
+ git config --local user.email "alfariqyraihan@gmail.com"
36
+ git config --local user.name "qywok"
37
+
38
+ - name: Pull changes from remote
39
+ run: |
40
+ git pull https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME main || \
41
+ (git merge --strategy-option theirs)
42
+
43
+ - name: Add and commit changes
44
+ run: |
45
+ git add -A
46
+ git diff-index --quiet HEAD || git commit -m "Backend Deployment"
47
+
48
+ - name: Push to Hugging Face
49
+ run: |
50
+ git push https://$HF_USERNAME:$HF_TOKEN@huggingface.co/spaces/$HF_USERNAME/$SPACE_NAME main --force
.github/workflows/production.yaml ADDED
@@ -0,0 +1,140 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Production Pipeline
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - aiueo
7
+ tags:
8
+ - '*'
9
+
10
+ permissions:
11
+ contents: read
12
+
13
+ jobs:
14
+ # Build
15
+ build:
16
+ name: Build
17
+ runs-on: ubuntu-latest
18
+ environment: Production
19
+
20
+ env:
21
+ service: docpet-backend-service
22
+ project_id: docpet-project-ch2-ps068
23
+
24
+ steps:
25
+ - name: Checkout
26
+ uses: actions/checkout@v3
27
+
28
+ - id: 'auth'
29
+ uses: 'google-github-actions/auth@v1'
30
+ with:
31
+ credentials_json: ${{ secrets.CONTAINER_REGISTRY_CREDENTIAL }}
32
+
33
+ - name: Setup Google Cloud
34
+ uses: google-github-actions/setup-gcloud@v1
35
+
36
+ - name: Setup Docker
37
+ run: gcloud auth configure-docker --quiet
38
+
39
+ # Push Docker Image to Google Container Registry
40
+ # --- DEPRECATED ---
41
+ # Container Registry is deprecated. After May 15, 2024,
42
+ # Artifact Registry will host images for the gcr.io domain
43
+ # in projects without previous Container Registry usage.
44
+ - name: Push Docker Image to Google Container Registry
45
+ run: |
46
+ git clone https://github.com/CH2-PS068/Docpet_Backend
47
+ cd Docpet_Backend
48
+
49
+ echo '${{ secrets.GOOGLE_BUCKET_CREDENTIAL }}' > sa_bucket.json
50
+
51
+ sed -i 's/docpet-project-ch2-ps068-a53f10c5c713.json/sa_bucket.json/g' app/gcs.py
52
+
53
+ docker build --tag $service:latest \
54
+ --file deployment/production.dockerfile .
55
+
56
+ docker tag $service:latest \
57
+ gcr.io/$project_id/$service:latest
58
+
59
+ docker push gcr.io/$project_id/$service:latest
60
+
61
+ # - name: Push Docker Image to Google Artifact Registry
62
+ # env:
63
+ # GOOGLE_PROJECT: ${{ secrets.GOOGLE_PROJECT }}
64
+ # run: |
65
+ # gcloud auth configure-docker asia-southeast2-docker.pkg.dev
66
+ # docker build -t asia-southeast2-docker.pkg.dev/$GOOGLE_PROJECT/capstone/nginx:latest .
67
+ # docker push asia-southeast2-docker.pkg.dev/$GOOGLE_PROJECT/capstone/nginx:latest
68
+
69
+ # Deploy
70
+ deploy:
71
+ name: Deploy
72
+ runs-on: ubuntu-latest
73
+ needs: build
74
+
75
+ env:
76
+ service: docpet-backend-service
77
+ project_id: docpet-project-ch2-ps068
78
+
79
+ steps:
80
+ - name: Checkout
81
+ uses: actions/checkout@v3
82
+
83
+ - id: 'auth'
84
+ uses: 'google-github-actions/auth@v1'
85
+ with:
86
+ credentials_json: ${{ secrets.CLOUD_RUN_CREDENTIAL }}
87
+
88
+ - name: Setup Google Cloud
89
+ uses: google-github-actions/setup-gcloud@v1
90
+
91
+ # Deploy to Google Cloud Run
92
+ - name: Deploy to Google Cloud Run
93
+ run: |
94
+ gcloud run services delete docpet-backend-service --platform=managed \
95
+ --region=asia-southeast2 --quiet
96
+
97
+ gcloud secrets delete postgres-password-secret \
98
+ --project=docpet-project-ch2-ps068 --quiet
99
+
100
+ gcloud secrets delete private-key-secret \
101
+ --project=docpet-project-ch2-ps068 --quiet
102
+
103
+ gcloud secrets delete public-key-secret \
104
+ --project=docpet-project-ch2-ps068 --quiet
105
+
106
+ git clone https://github.com/CH2-PS068/Docpet_Backend
107
+ cd Docpet_Backend
108
+
109
+ terraform init
110
+
111
+ terraform apply -no-color -auto-approve \
112
+ -var="service_account=${{ secrets.SERVICE_ACCOUNT }}" \
113
+ -var="service_container=gcr.io/$project_id/$service:latest" \
114
+ -var="vpc_access_connector=test" \
115
+ -var="postgres_password=${{ secrets.POSTGRES_PASSWORD }}" \
116
+ -var="postgres_port=${{ secrets.POSTGRES_PORT }}" \
117
+ -var="postgres_user=${{ secrets.POSTGRES_USER }}" \
118
+ -var="postgres_db=${{ secrets.POSTGRES_DB }}" \
119
+ -var="postgres_host=${{ secrets.POSTGRES_HOST }}" \
120
+ -var="postgres_hostname=${{ secrets.POSTGRES_HOSTNAME }}" \
121
+ -var="access_token_expires=${{ secrets.ACCESS_TOKEN_EXP }}" \
122
+ -var="refresh_token_expires=${{ secrets.REFRESH_TOKEN_EXP }}" \
123
+ -var="jwt_algorithm=${{ secrets.JWT_ALGORITHM }}" \
124
+ -var="jwt_public_key=${{ secrets.JWT_PUBLIC_KEY }}" \
125
+ -var="jwt_private_key=${{ secrets.JWT_PRIVATE_KEY }}"
126
+
127
+ # Push the latest Info to Discord
128
+ - name: Push the latest Info to Discord
129
+ run: |
130
+ ENDPOINT=$(gcloud run services describe docpet-backend-service \
131
+ --platform=managed --region=asia-southeast2 \
132
+ --format="value(status.url)")
133
+
134
+ wget https://raw.githubusercontent.com/CH2-PS068/Docpet_Infra/main/services/deploy_info_service/push_notif.lua
135
+
136
+ sudo apt install -y lua5.4
137
+ sudo apt install -y luarocks
138
+ sudo luarocks install dkjson
139
+
140
+ lua push_notif.lua "${{ secrets.DISCORD_ID }}" "${{ secrets.DISCORD_TOKEN }}" "${{ github.event.head_commit.message }}" "${{ github.event.head_commit.timestamp }}" "$ENDPOINT"
.github/workflows/staging.yaml ADDED
@@ -0,0 +1,152 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Production Pipeline
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - aiueo
7
+ tags:
8
+ - '*'
9
+
10
+ permissions:
11
+ contents: read
12
+
13
+ jobs:
14
+ # Testing
15
+ testing:
16
+ name: Testing
17
+ runs-on: ubuntu-latest
18
+ environment: testing
19
+
20
+ steps:
21
+ - name: Checkout
22
+ uses: actions/checkout@v3
23
+
24
+ - name: Unit Testing
25
+ run: |
26
+ pip3 install nose2 nose2[coverage_plugin] scikit-learn \
27
+ numpy keras tensorflow keras_preprocessing
28
+
29
+ nose2 --start-dir tests \
30
+ --verbose \
31
+ --pretty-assert \
32
+ --with-coverage
33
+
34
+ # Build
35
+ build:
36
+ name: Build
37
+ runs-on: ubuntu-latest
38
+ environment: build
39
+ needs: testing
40
+
41
+ env:
42
+ version: latest
43
+ service: ml-service
44
+ region: asia-southeast2
45
+ project_id: ${{ secrets.GOOGLE_PROJECT }}
46
+
47
+ steps:
48
+ - name: Checkout
49
+ uses: actions/checkout@v3
50
+
51
+ - id: 'auth'
52
+ uses: 'google-github-actions/auth@v1'
53
+ with:
54
+ credentials_json: ${{ secrets.CONTAINER_REGISTRY_CREDENTIAL }}
55
+
56
+ - name: Setup Google Cloud
57
+ uses: google-github-actions/setup-gcloud@v1
58
+
59
+ - name: Setup Docker
60
+ run: gcloud auth configure-docker --quiet
61
+
62
+ # Push Docker Image to Google Container Registry
63
+ # --- DEPRECATED ---
64
+ # Container Registry is deprecated. After May 15, 2024,
65
+ # Artifact Registry will host images for the gcr.io domain
66
+ # in projects without previous Container Registry usage.
67
+ - name: Push Docker Image to Google Container Registry
68
+ run: |
69
+ git clone https://github.com/belajarqywok/fastapi-tensorflow-jaeger
70
+ cd fastapi-tensorflow-jaeger
71
+
72
+ docker build --tag $service:$version \
73
+ --file deployment/development.dockerfile .
74
+
75
+ docker tag $service:$version \
76
+ gcr.io/$project_id/$service:$version
77
+
78
+ docker push gcr.io/$project_id/$service:$version
79
+
80
+ # - name: Push Docker Image to Google Artifact Registry
81
+ # env:
82
+ # GOOGLE_PROJECT: ${{ secrets.GOOGLE_PROJECT }}
83
+ # run: |
84
+ # gcloud auth configure-docker asia-southeast2-docker.pkg.dev
85
+ # docker build -t asia-southeast2-docker.pkg.dev/$GOOGLE_PROJECT/capstone/nginx:latest .
86
+ # docker push asia-southeast2-docker.pkg.dev/$GOOGLE_PROJECT/capstone/nginx:latest
87
+
88
+ # Deploy
89
+ deploy:
90
+ name: Deploy
91
+ runs-on: ubuntu-latest
92
+ environment: deploy
93
+ needs: build
94
+
95
+ steps:
96
+ - name: Checkout
97
+ uses: actions/checkout@v3
98
+
99
+ - id: 'auth'
100
+ uses: 'google-github-actions/auth@v1'
101
+ with:
102
+ credentials_json: ${{ secrets.CLOUD_RUN_CREDENTIAL }}
103
+
104
+ - name: Setup Google Cloud
105
+ uses: google-github-actions/setup-gcloud@v1
106
+
107
+ # Deploy to Google Cloud Run
108
+ - name: Deploy to Google Cloud Run
109
+ uses: 'google-github-actions/deploy-cloudrun@v1'
110
+ id: deploy-cloud-run
111
+ with:
112
+ service: ml-service
113
+ image: gcr.io/${{ secrets.GOOGLE_PROJECT }}/ml-service:latest
114
+ region: asia-southeast2
115
+ flags: |
116
+ --cpu=4
117
+ --port=80
118
+ --cpu-boost
119
+ --memory=4Gi
120
+ --timeout=800
121
+ --concurrency=80
122
+ --min-instances=0
123
+ --max-instances=13
124
+ --project=${{ secrets.GOOGLE_PROJECT }}
125
+ --allow-unauthenticated
126
+ --vpc-egress=private-ranges-only
127
+ --vpc-connector=projects/${{ secrets.GOOGLE_PROJECT }}/locations/asia-southeast2/connectors/ml-service-connector
128
+ --service-account=${{ secrets.CLOUD_RUN_SA }}
129
+
130
+ env_vars: |
131
+ VERSION=v1
132
+ JWT_ACCESS_TOKEN_SECRET=${{ secrets.JWT_ACCESS_TOKEN_SECRET }}
133
+ JWT_REFRESH_TOKEN_SECRET=${{ secrets.JWT_REFRESH_TOKEN_SECRET }}
134
+ JWT_ALGORITHM=${{ secrets.JWT_ALGORITHM }}
135
+ JWT_ACCESS_TOKEN_EXPIRE=${{ secrets.JWT_ACCESS_TOKEN_EXPIRE }}
136
+ JWT_REFRESH_TOKEN_EXPIRE=${{ secrets.JWT_REFRESH_TOKEN_EXPIRE }}
137
+ POSTGRES_HOST=${{ secrets.POSTGRES_HOST }}
138
+ POSTGRES_PORT=${{ secrets.POSTGRES_PORT }}
139
+ POSTGRES_USER=${{ secrets.POSTGRES_USER }}
140
+ POSTGRES_PASS=${{ secrets.POSTGRES_PASS }}
141
+ POSTGRES_DB=${{ secrets.POSTGRES_DB }}
142
+
143
+ # Deploy to Google Cloud Run
144
+ - name: Deploy to Google Cloud Run
145
+ uses: 'google-github-actions/deploy-cloudrun@v1'
146
+ id: deploy-cloud-run
147
+
148
+ - name: Cloud Run URL Output
149
+ run: echo "${{ steps.deploy-cloud-run.outputs.url }}"
150
+
151
+ - name: Send Discord Notification
152
+ run: echo "send notification"
.gitignore ADDED
@@ -0,0 +1,141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ pip-wheel-metadata/
24
+ share/python-wheels/
25
+ *.egg-info/
26
+ .installed.cfg
27
+ *.egg
28
+ MANIFEST
29
+
30
+ # PyInstaller
31
+ # Usually these files are written by a python script from a template
32
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
33
+ *.manifest
34
+ *.spec
35
+
36
+ # Installer logs
37
+ pip-log.txt
38
+ pip-delete-this-directory.txt
39
+
40
+ # Unit test / coverage reports
41
+ htmlcov/
42
+ .tox/
43
+ .nox/
44
+ .coverage
45
+ .coverage.*
46
+ .cache
47
+ nosetests.xml
48
+ coverage.xml
49
+ *.cover
50
+ *.py,cover
51
+ .hypothesis/
52
+ .pytest_cache/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ target/
76
+
77
+ # Jupyter Notebook
78
+ .ipynb_checkpoints
79
+
80
+ # IPython
81
+ profile_default/
82
+ ipython_config.py
83
+
84
+ # pyenv
85
+ .python-version
86
+
87
+ # pipenv
88
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
90
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
91
+ # install all needed dependencies.
92
+ #Pipfile.lock
93
+
94
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95
+ __pypackages__/
96
+
97
+ # Celery stuff
98
+ celerybeat-schedule
99
+ celerybeat.pid
100
+
101
+ # SageMath parsed files
102
+ *.sage.py
103
+
104
+ # Environments
105
+ .env
106
+ .venv
107
+ env/
108
+ venv/
109
+ ENV/
110
+ env.bak/
111
+ venv.bak/
112
+
113
+ # Spyder project settings
114
+ .spyderproject
115
+ .spyproject
116
+
117
+ # Rope project settings
118
+ .ropeproject
119
+
120
+ # mkdocs documentation
121
+ /site
122
+
123
+ # mypy
124
+ .mypy_cache/
125
+ .dmypy.json
126
+ dmypy.json
127
+
128
+ # Pyre type checker
129
+ .pyre/
130
+
131
+ # Terraform
132
+ /.terraform/providers/registry.terraform.io
133
+
134
+ # Credentials
135
+ docpet-project-ch2-ps068-a53f10c5c713.json
136
+ container-registry-sa.json
137
+ cloudrun-sa.json
138
+
139
+ # Development
140
+ envtest.bat
141
+ activate.bat
.terraform.lock.hcl ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is maintained automatically by "terraform init".
2
+ # Manual edits may be lost in future updates.
3
+
4
+ provider "registry.terraform.io/hashicorp/google" {
5
+ version = "5.6.0"
6
+ constraints = "5.6.0"
7
+ hashes = [
8
+ "h1:9eXuahX5uApOxDb/3ywewY9Eja2MAvyb9bplbyBW218=",
9
+ "zh:102b6a2672fade82114eb14ed46923fb1b74be2aaca3a50b4f35f7057a9a94b9",
10
+ "zh:1a56b63175068c67efbe7d130986ba2839a938f5ffc96a14fd450153174dbfa3",
11
+ "zh:1ba1c5e0c86e8aaa8037406390846e78c89b63faf9e527c7874641f35d436e1b",
12
+ "zh:3f7161b9288b47cbe89d2f9675f78d83b58ad5880c793b01f50a71ee2583844b",
13
+ "zh:66912d6e4180dac37185d17424b345a9d4e3c3c791d45e0737b35e32c9536b35",
14
+ "zh:6f06f56e9fac2e55b50e74ffac42d9522bb379394e51dca1eddd4c3b7a68545c",
15
+ "zh:8741861ebfa13bb1ed74ea7f4865388a0725ca3a781b6d873ce45e6a4630fe41",
16
+ "zh:ae89a9c538665fbc30bb83aa3b13acb18d8380e551ccf242e1c0ab4d626089ab",
17
+ "zh:c510f8321c7599aa601b1870fdc0c76cbad3054ed5cc70fe8e37a13a8046a71f",
18
+ "zh:cf143a53d5a25c6216d09a9c0b115bb473ffcebd5c4c62b2b2594b1ebc13e662",
19
+ "zh:de05b957e5dfdbaf92db47cd9b3ef46a0f8d94599eea6d472928f33058856add",
20
+ "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
21
+ ]
22
+ }
.terraform/modules/modules.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"Modules":[{"Key":"","Source":"","Dir":"."},{"Key":"access_token_secret","Source":"./modules/secrets","Dir":"modules/secrets"},{"Key":"db_password_secret","Source":"./modules/secrets","Dir":"modules/secrets"},{"Key":"docpet_service_cloud_run","Source":"./modules/cloud_run","Dir":"modules/cloud_run"},{"Key":"pgsql_password_secret","Source":"./modules/secrets","Dir":"modules/secrets"},{"Key":"postgres_password_secret","Source":"./modules/secrets","Dir":"modules/secrets"},{"Key":"private_key_secret","Source":"./modules/secrets","Dir":"modules/secrets"},{"Key":"public_key_secret","Source":"./modules/secrets","Dir":"modules/secrets"},{"Key":"refresh_token_secret","Source":"./modules/secrets","Dir":"modules/secrets"}]}
.terraform/terraform.tfstate ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": 3,
3
+ "serial": 1,
4
+ "lineage": "481f8758-12d3-9684-b4c4-6c3284d1905a",
5
+ "backend": {
6
+ "type": "gcs",
7
+ "config": {
8
+ "access_token": null,
9
+ "bucket": "docpet-terraform-state-store",
10
+ "credentials": null,
11
+ "encryption_key": null,
12
+ "impersonate_service_account": null,
13
+ "impersonate_service_account_delegates": null,
14
+ "kms_encryption_key": null,
15
+ "prefix": null,
16
+ "storage_custom_endpoint": null
17
+ },
18
+ "hash": 18446744071998496525
19
+ },
20
+ "modules": [
21
+ {
22
+ "path": [
23
+ "root"
24
+ ],
25
+ "outputs": {},
26
+ "resources": {},
27
+ "depends_on": []
28
+ }
29
+ ]
30
+ }
Dockerfile ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-bullseye
2
+
3
+ LABEL organization="docpet"
4
+ LABEL team="cloud-computing-team"
5
+ LABEL backend_engineer="Putu Wira Pratama Putra"
6
+ LABEL Infra_engineer="Al-Fariqy Raihan Azhwar"
7
+
8
+ ENV PYTHONDONTWRITEBYTECODE 1
9
+ ENV PYTHONUNBUFFERED 1
10
+
11
+ RUN useradd -m -u 1000 user
12
+ USER user
13
+
14
+ WORKDIR /etc/docpet_backend_service
15
+
16
+ RUN python3 -m pip install --upgrade pip
17
+
18
+ COPY --chown=user requirements/linux.requirements.txt \
19
+ /etc/docpet_backend_service/
20
+
21
+ RUN pip3 install -r linux.requirements.txt
22
+
23
+ COPY --chown=user . /etc/docpet_backend_service/
24
+
25
+ RUN wget -O /etc/docpet_backend_service/ml_models/model.h5 \
26
+ https://github.com/belajarqywok/mbadhog/raw/main/model.h5
27
+
28
+ EXPOSE 7860
29
+
30
+ CMD [ "/bin/bash", "-c", "alembic upgrade head && uvicorn app:app --host 0.0.0.0 --port 7860 --workers 30" ]
Docpet_Backend.code-workspace ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "folders": [
3
+ {
4
+ "path": "."
5
+ }
6
+ ],
7
+ "settings": {}
8
+ }
Makefile ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ dev:
2
+ docker-compose up -d
3
+
4
+ dev-down:
5
+ docker-compose down
README.md ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Docpet Backend Service
3
+ emoji: 👨‍⚕️
4
+ colorFrom: purple
5
+ colorTo: green
6
+ sdk: docker
7
+ python_version: "3.9"
8
+ app_file: app.py
9
+ pinned: false
10
+ ---
11
+
12
+ # Backend with FastAPI Docker Setup Guide
13
+
14
+ This guide walks you through setting up a FastAPI project with PostgreSQL using Docker and integrating Alembic for database migrations.
15
+
16
+ ## Prerequisites
17
+
18
+ - Docker installed on your machine
19
+ - Python and pip install
20
+ ```bash
21
+ pip install -r requirements.txt
22
+ ```
23
+
24
+ ## Step 1: Init and Setup Project
25
+
26
+ Run the following commands in your terminal:
27
+
28
+ ```bash
29
+ docker-compose up -d
30
+ docker-compose down
31
+ ```
32
+
33
+ ## Step 2: Start PostgreSQL Docker Container
34
+ ```bash
35
+ pip install fastapi[all]
36
+ pip install sqlalchemy psycopg2
37
+ ```
38
+
39
+ ## Step 3: Start FastAPI Server
40
+ ```bash
41
+ uvicorn app.main:app --host localhost --port 8000 --reload
42
+ ```
43
+
44
+ Make a GET request to http://localhost:8000/api/healthchecker in Postman or any API testing tool to verify the response:
45
+ ```bash
46
+ {
47
+ "message": "Hello World!"
48
+ }
49
+ ```
50
+ ## Step 4: access the PostgreSQL Command Line in the Docker container and create admin role and grant
51
+ ```bash
52
+ docker exec -it postgres psql -U postgres
53
+
54
+ CREATE USER admin WITH PASSWORD 'your_password';
55
+
56
+ GRANT ALL PRIVILEGES ON DATABASE docpet TO admin;
57
+ ```
58
+
59
+ ## Step 5: Access PostgreSQL Docker Container Shell
60
+ ```bash
61
+ docker exec -it <container name> bash
62
+ ```
63
+
64
+ Access the running Postgres database with the command:
65
+ ```bash
66
+ psql -U admin <database name>
67
+ ```
68
+
69
+ ## Step 6: Install uuid-ossp Plugin
70
+ Execute the following SQL command to display and install the uuid-ossp extension:
71
+ ```bash
72
+ select * from pg_available_extensions;
73
+
74
+ CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
75
+ ```
76
+
77
+ ## Step 7: Initialize Alembic for Database Migrations
78
+ Execute the following SQL command to display and install the uuid-ossp extension:
79
+ ```bash
80
+ pip install alembic
81
+ alembic init alembic
82
+ ```
83
+
84
+ ## Step 8: Create a Revision File for Database Changes
85
+ ```bash
86
+ alembic revision --autogenerate -m "create users table"
87
+ ```
88
+
89
+ ## Step 9: Apply Database Changes
90
+ ```bash
91
+ alembic upgrade head
92
+ ```
93
+
94
+ ## Step 10: View Documentation
95
+ FastAPI automatically generates API documentation complying with OpenAPI standards.
96
+ ```bash
97
+ Visit http://localhost:8000/docs to explore the API documentation.
98
+ ```
alembic.ini ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # A generic, single database configuration.
2
+
3
+ [alembic]
4
+ # path to migration scripts
5
+ script_location = alembic
6
+
7
+ # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
8
+ # Uncomment the line below if you want the files to be prepended with date and time
9
+ # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
10
+ # for all available tokens
11
+ # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
12
+
13
+ # sys.path path, will be prepended to sys.path if present.
14
+ # defaults to the current working directory.
15
+ prepend_sys_path = .
16
+
17
+ # timezone to use when rendering the date within the migration file
18
+ # as well as the filename.
19
+ # If specified, requires the python-dateutil library that can be
20
+ # installed by adding `alembic[tz]` to the pip requirements
21
+ # string value is passed to dateutil.tz.gettz()
22
+ # leave blank for localtime
23
+ # timezone =
24
+
25
+ # max length of characters to apply to the
26
+ # "slug" field
27
+ # truncate_slug_length = 40
28
+
29
+ # set to 'true' to run the environment during
30
+ # the 'revision' command, regardless of autogenerate
31
+ # revision_environment = false
32
+
33
+ # set to 'true' to allow .pyc and .pyo files without
34
+ # a source .py file to be detected as revisions in the
35
+ # versions/ directory
36
+ # sourceless = false
37
+
38
+ # version location specification; This defaults
39
+ # to alembic/versions. When using multiple version
40
+ # directories, initial revisions must be specified with --version-path.
41
+ # The path separator used here should be the separator specified by "version_path_separator" below.
42
+ # version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
43
+
44
+ # version path separator; As mentioned above, this is the character used to split
45
+ # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
46
+ # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
47
+ # Valid values for version_path_separator are:
48
+ #
49
+ # version_path_separator = :
50
+ # version_path_separator = ;
51
+ # version_path_separator = space
52
+ version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
53
+
54
+ # the output encoding used when revision files
55
+ # are written from script.py.mako
56
+ # output_encoding = utf-8
57
+
58
+ sqlalchemy.url = driver://user:pass@localhost/dbname
59
+
60
+
61
+ [post_write_hooks]
62
+ # post_write_hooks defines scripts or Python functions that are run
63
+ # on newly generated revision scripts. See the documentation for further
64
+ # detail and examples
65
+
66
+ # format using "black" - use the console_scripts runner, against the "black" entrypoint
67
+ # hooks = black
68
+ # black.type = console_scripts
69
+ # black.entrypoint = black
70
+ # black.options = -l 79 REVISION_SCRIPT_FILENAME
71
+
72
+ # Logging configuration
73
+ [loggers]
74
+ keys = root,sqlalchemy,alembic
75
+
76
+ [handlers]
77
+ keys = console
78
+
79
+ [formatters]
80
+ keys = generic
81
+
82
+ [logger_root]
83
+ level = WARN
84
+ handlers = console
85
+ qualname =
86
+
87
+ [logger_sqlalchemy]
88
+ level = WARN
89
+ handlers =
90
+ qualname = sqlalchemy.engine
91
+
92
+ [logger_alembic]
93
+ level = INFO
94
+ handlers =
95
+ qualname = alembic
96
+
97
+ [handler_console]
98
+ class = StreamHandler
99
+ args = (sys.stderr,)
100
+ level = NOTSET
101
+ formatter = generic
102
+
103
+ [formatter_generic]
104
+ format = %(levelname)-5.5s [%(name)s] %(message)s
105
+ datefmt = %H:%M:%S
alembic/README ADDED
@@ -0,0 +1 @@
 
 
1
+ Generic single-database configuration.
alembic/env.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from logging.config import fileConfig
2
+
3
+ from sqlalchemy import engine_from_config
4
+ from sqlalchemy import pool
5
+
6
+ from alembic import context
7
+
8
+ from app.config import settings
9
+ from app.models import Base
10
+
11
+ # this is the Alembic Config object, which provides
12
+ # access to the values within the .ini file in use.
13
+ config = context.config
14
+
15
+ config.set_main_option(
16
+ "sqlalchemy.url",
17
+ f"postgresql+psycopg2://{settings.POSTGRES_USER}:{settings.POSTGRES_PASSWORD}@{settings.POSTGRES_HOSTNAME}:{settings.DATABASE_PORT}/{settings.POSTGRES_DB}"
18
+ )
19
+
20
+ # Interpret the config file for Python logging.
21
+ # This line sets up loggers basically.
22
+ if config.config_file_name is not None:
23
+ fileConfig(config.config_file_name)
24
+
25
+ # add your model's MetaData object here
26
+ # for 'autogenerate' support
27
+ # from myapp import mymodel
28
+ # target_metadata = mymodel.Base.metadata
29
+ target_metadata = Base.metadata
30
+
31
+ # other values from the config, defined by the needs of env.py,
32
+ # can be acquired:
33
+ # my_important_option = config.get_main_option("my_important_option")
34
+ # ... etc.
35
+
36
+
37
+ def run_migrations_offline() -> None:
38
+ """Run migrations in 'offline' mode.
39
+
40
+ This configures the context with just a URL
41
+ and not an Engine, though an Engine is acceptable
42
+ here as well. By skipping the Engine creation
43
+ we don't even need a DBAPI to be available.
44
+
45
+ Calls to context.execute() here emit the given string to the
46
+ script output.
47
+
48
+ """
49
+ url = config.get_main_option("sqlalchemy.url")
50
+ context.configure(
51
+ url=url,
52
+ target_metadata=target_metadata,
53
+ literal_binds=True,
54
+ dialect_opts={"paramstyle": "named"},
55
+ )
56
+
57
+ with context.begin_transaction():
58
+ context.run_migrations()
59
+
60
+
61
+ def run_migrations_online() -> None:
62
+ """Run migrations in 'online' mode.
63
+
64
+ In this scenario we need to create an Engine
65
+ and associate a connection with the context.
66
+
67
+ """
68
+ connectable = engine_from_config(
69
+ config.get_section(config.config_ini_section),
70
+ prefix="sqlalchemy.",
71
+ poolclass=pool.NullPool,
72
+ )
73
+
74
+ with connectable.connect() as connection:
75
+ context.configure(
76
+ connection=connection, target_metadata=target_metadata
77
+ )
78
+
79
+ with context.begin_transaction():
80
+ context.run_migrations()
81
+
82
+
83
+ if context.is_offline_mode():
84
+ run_migrations_offline()
85
+ else:
86
+ run_migrations_online()
alembic/script.py.mako ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """${message}
2
+
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
8
+ from alembic import op
9
+ import sqlalchemy as sa
10
+ ${imports if imports else ""}
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = ${repr(up_revision)}
14
+ down_revision = ${repr(down_revision)}
15
+ branch_labels = ${repr(branch_labels)}
16
+ depends_on = ${repr(depends_on)}
17
+
18
+
19
+ def upgrade() -> None:
20
+ ${upgrades if upgrades else "pass"}
21
+
22
+
23
+ def downgrade() -> None:
24
+ ${downgrades if downgrades else "pass"}
alembic/versions/64174712dd01_migrate.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """migrate
2
+
3
+ Revision ID: 64174712dd01
4
+ Revises: d4b392539c2c
5
+ Create Date: 2023-12-08 04:14:27.348117
6
+
7
+ """
8
+ from alembic import op
9
+ import sqlalchemy as sa
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = '64174712dd01'
14
+ down_revision = 'd4b392539c2c'
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ op.create_table('doctors',
22
+ sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
23
+ sa.Column('name', sa.String(), nullable=False),
24
+ sa.Column('price', sa.String(), nullable=False),
25
+ sa.Column('rating', sa.String(), nullable=False),
26
+ sa.Column('role', sa.String(), server_default='Hewan', nullable=False),
27
+ sa.Column('role_detail', sa.String(), nullable=False),
28
+ sa.Column('day', sa.String(), nullable=True),
29
+ sa.Column('time', sa.String(), nullable=False),
30
+ sa.Column('location', sa.String(), nullable=False),
31
+ sa.Column('university', sa.String(), nullable=False),
32
+ sa.Column('yearofexperience', sa.String(), nullable=False),
33
+ sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
34
+ sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
35
+ sa.PrimaryKeyConstraint('id'),
36
+ sa.UniqueConstraint('name')
37
+ )
38
+ # ### end Alembic commands ###
39
+
40
+
41
+ def downgrade() -> None:
42
+ # ### commands auto generated by Alembic - please adjust! ###
43
+ op.drop_table('doctors')
44
+ # ### end Alembic commands ###
alembic/versions/af0bc5bbaf5d_initial_migration.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Initial migration
2
+
3
+ Revision ID: af0bc5bbaf5d
4
+ Revises: 64174712dd01
5
+ Create Date: 2024-07-18 03:46:35.639043
6
+
7
+ """
8
+ from alembic import op
9
+ import sqlalchemy as sa
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = 'af0bc5bbaf5d'
14
+ down_revision = '64174712dd01'
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ op.create_table('pets',
22
+ sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
23
+ sa.Column('name', sa.String(), nullable=False),
24
+ sa.Column('age', sa.String(), nullable=False),
25
+ sa.Column('gender', sa.Enum('Jantan', 'Betina', name='gender'), nullable=False),
26
+ sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
27
+ sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
28
+ sa.PrimaryKeyConstraint('id')
29
+ )
30
+ op.create_table('users',
31
+ sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
32
+ sa.Column('name', sa.String(), nullable=False),
33
+ sa.Column('email', sa.String(), nullable=False),
34
+ sa.Column('password', sa.String(), nullable=False),
35
+ sa.Column('verified', sa.Boolean(), server_default='False', nullable=False),
36
+ sa.Column('role', sa.String(), server_default='user', nullable=False),
37
+ sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
38
+ sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('now()'), nullable=False),
39
+ sa.PrimaryKeyConstraint('id'),
40
+ sa.UniqueConstraint('email')
41
+ )
42
+ # ### end Alembic commands ###
43
+
44
+
45
+ def downgrade() -> None:
46
+ # ### commands auto generated by Alembic - please adjust! ###
47
+ op.drop_table('users')
48
+ op.drop_table('pets')
49
+ # ### end Alembic commands ###
alembic/versions/d4b392539c2c_create_users_table.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """create users table
2
+
3
+ Revision ID: d4b392539c2c
4
+ Create Date: 2023-12-06 06:52:25.020699
5
+
6
+ """
7
+ from alembic import op
8
+ import sqlalchemy as sa
9
+
10
+
11
+ # revision identifiers, used by Alembic.
12
+ revision = 'd4b392539c2c'
13
+ down_revision = None
14
+ branch_labels = None
15
+ depends_on = None
16
+
17
+
18
+ def upgrade() -> None:
19
+ # ### commands auto generated by Alembic - please adjust! ###
20
+ pass
21
+ # ### end Alembic commands ###
22
+
23
+
24
+ def downgrade() -> None:
25
+ # ### commands auto generated by Alembic - please adjust! ###
26
+ pass
27
+ # ### end Alembic commands ###
app.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from app.config import settings
3
+ from fastapi.responses import RedirectResponse
4
+ from fastapi.middleware.cors import CORSMiddleware
5
+ from app.routers import user, auth, upload, pet, disease_detection, doctor
6
+
7
+ app = FastAPI()
8
+
9
+ origins = [
10
+ settings.CLIENT_ORIGIN,
11
+ ]
12
+
13
+ app.add_middleware(
14
+ middleware_class = CORSMiddleware,
15
+ allow_origins = ["*"],
16
+ allow_methods = ["*"],
17
+ allow_headers = ["*"],
18
+ allow_credentials = True
19
+ )
20
+
21
+ # Auth Routes Group
22
+ app.include_router(
23
+ tags = ['Auth'],
24
+ router = auth.router,
25
+ prefix = '/api/auth'
26
+ )
27
+
28
+ # User Routes Group
29
+ app.include_router(
30
+ tags = ['Users'],
31
+ router = user.router,
32
+ prefix = '/api/users'
33
+ )
34
+
35
+ # Upload Routes Group
36
+ app.include_router(
37
+ tags = ['Upload'],
38
+ router = upload.router,
39
+ prefix = '/api/upload'
40
+ )
41
+
42
+ # Pet Routes Group
43
+ app.include_router(
44
+ tags = ['Pets'],
45
+ router = pet.router,
46
+ prefix = '/api/pets'
47
+ )
48
+
49
+ # Doctor Routes Group
50
+ app.include_router(
51
+ tags = ['Doctors'],
52
+ router = doctor.router,
53
+ prefix = '/api/doctors'
54
+ )
55
+
56
+ # Disease Detection Routes Group
57
+ app.include_router(
58
+ tags = ['Disease Detection'],
59
+ router = disease_detection.router,
60
+ prefix = '/api/disease_detection'
61
+ )
62
+
63
+ # Main Routes Group
64
+ @app.get('/api/checker', tags = ['Main'])
65
+ def checker():
66
+ return {'message': 'Hello Docpet!'}
67
+
68
+ @app.get("/", tags = ['Main'])
69
+ def root():
70
+ return RedirectResponse(url="/docs")
71
+
app/__init__.py ADDED
File without changes
app/config.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseSettings, EmailStr
2
+
3
+ class Settings(BaseSettings):
4
+ DATABASE_PORT: int
5
+ POSTGRES_PASSWORD: str
6
+ POSTGRES_USER: str
7
+ POSTGRES_DB: str
8
+ POSTGRES_HOST: str
9
+ POSTGRES_HOSTNAME: str
10
+
11
+ JWT_PUBLIC_KEY: str
12
+ JWT_PRIVATE_KEY: str
13
+ REFRESH_TOKEN_EXPIRES_IN: int
14
+ ACCESS_TOKEN_EXPIRES_IN: int
15
+ JWT_ALGORITHM: str
16
+
17
+ CLIENT_ORIGIN: str
18
+
19
+ class Config:
20
+ env_file = './.env'
21
+
22
+ settings = Settings()
app/database.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from sqlalchemy import create_engine
2
+ from sqlalchemy.ext.declarative import declarative_base
3
+ from sqlalchemy.orm import sessionmaker
4
+ from .config import settings
5
+
6
+ SQLALCHEMY_DATABASE_URL = f"postgresql://{settings.POSTGRES_USER}:{settings.POSTGRES_PASSWORD}@{settings.POSTGRES_HOSTNAME}:{settings.DATABASE_PORT}/{settings.POSTGRES_DB}"
7
+
8
+ engine = create_engine(
9
+ SQLALCHEMY_DATABASE_URL
10
+ )
11
+ SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
12
+
13
+ Base = declarative_base()
14
+
15
+
16
+ def get_db():
17
+ db = SessionLocal()
18
+ try:
19
+ yield db
20
+ finally:
21
+ db.close()
app/gcs.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from google.cloud import storage
2
+
3
+ # def create_gcs_client():
4
+ # return storage.Client.from_service_account_json(
5
+ # json_credentials_path = 'docpet-project-ch2-ps068-a53f10c5c713.json'
6
+ # )
7
+
8
+ def upload_blob(bucket_name, source_file_name, destination_blob_name):
9
+ print("iwan fals: 'dar der dor suara senapan...'")
10
+ # """Uploads a file to the bucket."""
11
+ # storage_client = create_gcs_client()
12
+ # bucket = storage_client.bucket(bucket_name)
13
+ # blob = bucket.blob(destination_blob_name)
14
+
15
+ # blob.upload_from_filename(source_file_name)
16
+
17
+ # print('File {} uploaded to {}.'.format(
18
+ # source_file_name,
19
+ # destination_blob_name))
app/main.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from app.config import settings
3
+ from fastapi.responses import RedirectResponse
4
+ from fastapi.middleware.cors import CORSMiddleware
5
+ from app.routers import user, auth, upload, pet, disease_detection, doctor
6
+
7
+ app = FastAPI()
8
+
9
+ origins = [
10
+ settings.CLIENT_ORIGIN,
11
+ ]
12
+
13
+ app.add_middleware(
14
+ middleware_class = CORSMiddleware,
15
+ allow_origins = ["*"],
16
+ allow_methods = ["*"],
17
+ allow_headers = ["*"],
18
+ allow_credentials = True
19
+ )
20
+
21
+ # Auth Routes Group
22
+ app.include_router(
23
+ tags = ['Auth'],
24
+ router = auth.router,
25
+ prefix = '/api/auth'
26
+ )
27
+
28
+ # User Routes Group
29
+ app.include_router(
30
+ tags = ['Users'],
31
+ router = user.router,
32
+ prefix = '/api/users'
33
+ )
34
+
35
+ # Upload Routes Group
36
+ app.include_router(
37
+ tags = ['Upload'],
38
+ router = upload.router,
39
+ prefix = '/api/upload'
40
+ )
41
+
42
+ # Pet Routes Group
43
+ app.include_router(
44
+ tags = ['Pets'],
45
+ router = pet.router,
46
+ prefix = '/api/pets'
47
+ )
48
+
49
+ # Doctor Routes Group
50
+ app.include_router(
51
+ tags = ['Doctors'],
52
+ router = doctor.router,
53
+ prefix = '/api/doctors'
54
+ )
55
+
56
+ # Disease Detection Routes Group
57
+ app.include_router(
58
+ tags = ['Disease Detection'],
59
+ router = disease_detection.router,
60
+ prefix = '/api/disease_detection'
61
+ )
62
+
63
+ # Main Routes Group
64
+ @app.get('/api/checker', tags = ['Main'])
65
+ def checker():
66
+ return {'message': 'Hello Docpet!'}
67
+
68
+ @app.get("/", tags = ['Main'])
69
+ def root():
70
+ return RedirectResponse(url="/docs")
71
+
app/models.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uuid # uuid is used to generate a random unique id for each user
2
+ from .database import Base
3
+ from sqlalchemy import TIMESTAMP, Column, String, Boolean, text, Enum
4
+ from sqlalchemy.dialects.postgresql import UUID
5
+
6
+ # User model
7
+ class User(Base):
8
+ __tablename__ = 'users'
9
+ id = Column(UUID(as_uuid=True), primary_key=True, nullable=False,
10
+ default=uuid.uuid4)
11
+ name = Column(String, nullable=False)
12
+ email = Column(String, unique=True, nullable=False)
13
+ password = Column(String, nullable=False)
14
+ verified = Column(Boolean, nullable=False, server_default='False')
15
+ role = Column(String, server_default='user', nullable=False)
16
+ created_at = Column(TIMESTAMP(timezone=True),
17
+ nullable=False, server_default=text("now()"))
18
+ updated_at = Column(TIMESTAMP(timezone=True),
19
+ nullable=False, server_default=text("now()"))
20
+
21
+ class Pet(Base):
22
+ __tablename__ = 'pets'
23
+ id = Column(UUID(as_uuid=True), primary_key=True, nullable=False,
24
+ default=uuid.uuid4)
25
+ name = Column(String, nullable=False)
26
+ age = Column(String, nullable=False)
27
+ gender = Column(Enum('Jantan', 'Betina', name='gender'), nullable=False)
28
+ created_at = Column(TIMESTAMP(timezone=True),
29
+ nullable=False, server_default=text("now()"))
30
+ updated_at = Column(TIMESTAMP(timezone=True),
31
+ nullable=False, server_default=text("now()"))
32
+
33
+ class Doctor(Base):
34
+ __tablename__ = 'doctors'
35
+ id = Column(UUID(as_uuid=True), primary_key=True, nullable=False,
36
+ default=uuid.uuid4)
37
+ name = Column(String, unique=True, nullable=False)
38
+ price = Column(String, nullable=False)
39
+ rating = Column(String, nullable=False)
40
+ role = Column(String, server_default='Hewan', nullable=False)
41
+ role_detail = Column(String, nullable=False)
42
+ day = Column(String, nullable=True)
43
+ time = Column(String, nullable=False)
44
+ location = Column(String, nullable=False)
45
+ university = Column(String, nullable=False)
46
+ yearofexperience = Column(String, nullable=False)
47
+ created_at = Column(TIMESTAMP(timezone=True),
48
+ nullable=False, server_default=text("now()"))
49
+ updated_at = Column(TIMESTAMP(timezone=True),
50
+ nullable=False, server_default=text("now()"))
app/oauth2.py ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import base64
2
+ from typing import List
3
+ from fastapi import Depends, HTTPException, status
4
+ from fastapi_jwt_auth import AuthJWT
5
+ from pydantic import BaseModel
6
+
7
+ from . import models
8
+ from .database import get_db
9
+ from sqlalchemy.orm import Session
10
+ from .config import settings
11
+
12
+
13
+ class Settings(BaseModel):
14
+ authjwt_algorithm: str = settings.JWT_ALGORITHM
15
+ authjwt_decode_algorithms: List[str] = [settings.JWT_ALGORITHM]
16
+ authjwt_token_location: set = {'cookies', 'headers'}
17
+ authjwt_access_cookie_key: str = 'access_token'
18
+ authjwt_refresh_cookie_key: str = 'refresh_token'
19
+ authjwt_cookie_csrf_protect: bool = False
20
+ authjwt_public_key: str = base64.b64decode(
21
+ settings.JWT_PUBLIC_KEY).decode('utf-8')
22
+ authjwt_private_key: str = base64.b64decode(
23
+ settings.JWT_PRIVATE_KEY).decode('utf-8')
24
+
25
+
26
+ @AuthJWT.load_config
27
+ def get_config():
28
+ return Settings()
29
+
30
+
31
+ class NotVerified(Exception):
32
+ pass
33
+
34
+
35
+ class UserNotFound(Exception):
36
+ pass
37
+
38
+
39
+ def require_user(db: Session = Depends(get_db), Authorize: AuthJWT = Depends()):
40
+ try:
41
+ Authorize.jwt_required()
42
+ user_id = Authorize.get_jwt_subject()
43
+ user = db.query(models.User).filter(models.User.id == user_id).first()
44
+
45
+ if not user:
46
+ raise UserNotFound('User no longer exist')
47
+
48
+ if not user.verified:
49
+ raise NotVerified('You are not verified')
50
+
51
+ except Exception as e:
52
+ error = e.__class__.__name__
53
+ print(error)
54
+ if error == 'MissingTokenError':
55
+ raise HTTPException(
56
+ status_code=status.HTTP_401_UNAUTHORIZED, detail='You are not logged in')
57
+ if error == 'UserNotFound':
58
+ raise HTTPException(
59
+ status_code=status.HTTP_401_UNAUTHORIZED, detail='User no longer exist')
60
+ if error == 'NotVerified':
61
+ raise HTTPException(
62
+ status_code=status.HTTP_401_UNAUTHORIZED, detail='Please verify your account')
63
+ raise HTTPException(
64
+ status_code=status.HTTP_401_UNAUTHORIZED, detail='Token is invalid or has expired')
65
+ return user_id
app/routers/auth.py ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import timedelta
2
+ from fastapi import APIRouter, Request, Response, status, Depends, HTTPException
3
+ from pydantic import EmailStr
4
+
5
+ from app import oauth2
6
+ from .. import schemas, models, utils
7
+ from sqlalchemy.orm import Session
8
+ from ..database import get_db
9
+ from app.oauth2 import AuthJWT
10
+ from ..config import settings
11
+
12
+ router = APIRouter()
13
+ ACCESS_TOKEN_EXPIRES_IN = settings.ACCESS_TOKEN_EXPIRES_IN
14
+ REFRESH_TOKEN_EXPIRES_IN = settings.REFRESH_TOKEN_EXPIRES_IN
15
+
16
+ @router.post('/register', status_code=status.HTTP_201_CREATED, response_model=schemas.UserResponse)
17
+ async def create_user(payload: schemas.CreateUserSchema, db: Session = Depends(get_db)):
18
+ # Check if user already exist
19
+ user = db.query(models.User).filter(
20
+ models.User.email == EmailStr(payload.email.lower())).first()
21
+ if user:
22
+ raise HTTPException(status_code=status.HTTP_409_CONFLICT,
23
+ detail='Account already exist')
24
+ # Compare password and passwordConfirm
25
+ if payload.password != payload.passwordConfirm:
26
+ raise HTTPException(
27
+ status_code=status.HTTP_400_BAD_REQUEST, detail='Passwords do not match')
28
+ # Hash the password
29
+ payload.password = utils.hash_password(payload.password)
30
+ del payload.passwordConfirm
31
+ payload.role = 'user'
32
+ payload.verified = True
33
+ payload.email = EmailStr(payload.email.lower())
34
+ new_user = models.User(**payload.dict())
35
+ db.add(new_user)
36
+ db.commit()
37
+ db.refresh(new_user)
38
+ return new_user
39
+
40
+
41
+ @router.post('/login')
42
+ def login(payload: schemas.LoginUserSchema, response: Response, db: Session = Depends(get_db), Authorize: AuthJWT = Depends()):
43
+ # Check if the user exist
44
+ user = db.query(models.User).filter(
45
+ models.User.email == EmailStr(payload.email.lower())).first()
46
+ if not user:
47
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
48
+ detail='Incorrect Email or Password')
49
+
50
+ # Check if user verified his email
51
+ if not user.verified:
52
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
53
+ detail='Please verify your email address')
54
+
55
+ # Check if the password is valid
56
+ if not utils.verify_password(payload.password, user.password):
57
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
58
+ detail='Incorrect Email or Password')
59
+
60
+ # Create access token
61
+ access_token = Authorize.create_access_token(
62
+ subject=str(user.id), expires_time=timedelta(minutes=ACCESS_TOKEN_EXPIRES_IN))
63
+
64
+ # Create refresh token
65
+ refresh_token = Authorize.create_refresh_token(
66
+ subject=str(user.id), expires_time=timedelta(minutes=REFRESH_TOKEN_EXPIRES_IN))
67
+
68
+ # Store refresh and access tokens in cookie
69
+ response.set_cookie('access_token', access_token, ACCESS_TOKEN_EXPIRES_IN * 60,
70
+ ACCESS_TOKEN_EXPIRES_IN * 60, '/', None, False, True, 'lax')
71
+ response.set_cookie('refresh_token', refresh_token,
72
+ REFRESH_TOKEN_EXPIRES_IN * 60, REFRESH_TOKEN_EXPIRES_IN * 60, '/', None, False, True, 'lax')
73
+ response.set_cookie('logged_in', 'True', ACCESS_TOKEN_EXPIRES_IN * 60,
74
+ ACCESS_TOKEN_EXPIRES_IN * 60, '/', None, False, False, 'lax')
75
+
76
+ # Send both access
77
+ return {'status': 'success', 'access_token': access_token}
78
+
79
+
80
+ @router.get('/refresh')
81
+ def refresh_token(response: Response, request: Request, Authorize: AuthJWT = Depends(), db: Session = Depends(get_db)):
82
+ try:
83
+ print(Authorize._refresh_cookie_key)
84
+ Authorize.jwt_refresh_token_required()
85
+
86
+ user_id = Authorize.get_jwt_subject()
87
+ if not user_id:
88
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
89
+ detail='Could not refresh access token')
90
+ user = db.query(models.User).filter(models.User.id == user_id).first()
91
+ if not user:
92
+ raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED,
93
+ detail='The user belonging to this token no logger exist')
94
+ access_token = Authorize.create_access_token(
95
+ subject=str(user.id), expires_time=timedelta(minutes=ACCESS_TOKEN_EXPIRES_IN))
96
+ except Exception as e:
97
+ error = e.__class__.__name__
98
+ if error == 'MissingTokenError':
99
+ raise HTTPException(
100
+ status_code=status.HTTP_400_BAD_REQUEST, detail='Please provide refresh token')
101
+ raise HTTPException(
102
+ status_code=status.HTTP_400_BAD_REQUEST, detail=error)
103
+
104
+ response.set_cookie('access_token', access_token, ACCESS_TOKEN_EXPIRES_IN * 60,
105
+ ACCESS_TOKEN_EXPIRES_IN * 60, '/', None, False, True, 'lax')
106
+ response.set_cookie('logged_in', 'True', ACCESS_TOKEN_EXPIRES_IN * 60,
107
+ ACCESS_TOKEN_EXPIRES_IN * 60, '/', None, False, False, 'lax')
108
+ return {'access_token': access_token}
109
+
110
+
111
+ @router.get('/logout', status_code=status.HTTP_200_OK)
112
+ def logout(response: Response, Authorize: AuthJWT = Depends(), user_id: str = Depends(oauth2.require_user)):
113
+ Authorize.unset_jwt_cookies()
114
+ response.set_cookie('logged_in', '', -1)
115
+
116
+ return {'status': 'success'}
app/routers/disease_detection.py ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import time
3
+ import string
4
+ import secrets
5
+ from http import HTTPStatus
6
+ from app.gcs import upload_blob
7
+ from app.oauth2 import require_user
8
+ from app.utils import image_prediction
9
+ from fastapi.responses import JSONResponse
10
+ from fastapi import UploadFile, File, Depends, APIRouter
11
+
12
+ router = APIRouter()
13
+
14
+ @router.post("/")
15
+ async def disease_detection(
16
+ file: UploadFile = File(...),
17
+ # _: str = Depends(require_user)
18
+ ) -> JSONResponse:
19
+ try:
20
+ file.file.seek(0, 2)
21
+ file_size = file.file.tell()
22
+
23
+ await file.seek(0)
24
+
25
+ # 1 kb = 1024 bytes
26
+ # 1 mb = 1024 kb
27
+ if file_size > 10 * 1024 * 1024:
28
+ # if more than 10 mb
29
+ return JSONResponse(
30
+ content = {
31
+ 'message': 'file too large (MAX: 10 MB)',
32
+ 'status_code': HTTPStatus.BAD_REQUEST,
33
+ 'data': None
34
+ },
35
+ status_code = HTTPStatus.BAD_REQUEST
36
+ )
37
+
38
+ content_type = file.content_type
39
+ if content_type not in ["image/jpeg", "image/jpg", "image/png"]:
40
+ return JSONResponse(
41
+ content = {
42
+ 'message': 'invalid file type',
43
+ 'status_code': HTTPStatus.BAD_REQUEST,
44
+ 'data': None
45
+ },
46
+ status_code = HTTPStatus.BAD_REQUEST
47
+ )
48
+
49
+ file_location = f"temp/{file.filename}"
50
+ os.makedirs(os.path.dirname(file_location), exist_ok=True)
51
+
52
+ with open(file_location, "wb+") as file_object:
53
+ file_object.write(file.file.read())
54
+
55
+ # Prediction Result
56
+ predict_result = image_prediction(file_location)
57
+
58
+ timestamp = str(int(time.time()))
59
+ random_string = ''.join(
60
+ secrets.choice(string.ascii_letters + string.digits) for _ in range(64)
61
+ )
62
+
63
+ # Upload to Google Cloud Storage
64
+ upload_blob(
65
+ bucket_name = "docpet-dev-test",
66
+ source_file_name = file_location,
67
+ destination_blob_name = f"{timestamp}-{random_string}.jpeg"
68
+ )
69
+
70
+ os.remove(file_location)
71
+
72
+ return JSONResponse(
73
+ content = {
74
+ 'message': 'prediction success',
75
+ 'status_code': HTTPStatus.OK,
76
+ 'data': predict_result
77
+ },
78
+ status_code = HTTPStatus.OK
79
+ )
80
+
81
+ except Exception as e:
82
+ print(e)
83
+ return JSONResponse(
84
+ content = {
85
+ 'message': 'Internal Server Error',
86
+ 'status_code': HTTPStatus.INTERNAL_SERVER_ERROR,
87
+ 'data': None
88
+ },
89
+ status_code = HTTPStatus.INTERNAL_SERVER_ERROR
90
+ )
app/routers/doctor.py ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uuid
2
+ from fastapi import APIRouter, HTTPException
3
+ from app.schemas import CreateDoctorSchema, UpdateDoctorSchema, DoctorResponse
4
+ from app.models import Doctor
5
+ from app.database import SessionLocal
6
+
7
+ router = APIRouter()
8
+
9
+ @router.get("/")
10
+ async def get_doctors():
11
+ db = SessionLocal()
12
+ try:
13
+ doctors = db.query(Doctor).all()
14
+ return [{"id": Doctor.id, "name": Doctor.name, "role_detail": Doctor.role_detail, "rating": Doctor.rating , "price": Doctor.price, "university": Doctor.university, "yearofexperience": Doctor.yearofexperience} for Doctor in doctors]
15
+ except Exception as e:
16
+ db.rollback()
17
+ raise HTTPException(status_code=400, detail=str(e))
18
+ finally:
19
+ db.close()
20
+
21
+ @router.get("/{doctor_id}")
22
+ async def get_doctor(doctor_id: uuid.UUID):
23
+ db = SessionLocal()
24
+ try:
25
+ doctor = db.query(Doctor).filter(Doctor.id == doctor_id).first()
26
+ if not doctor:
27
+ raise HTTPException(status_code=404, detail="Doctor not found")
28
+ return doctor
29
+ except Exception as e:
30
+ db.rollback()
31
+ raise HTTPException(status_code=400, detail=str(e))
32
+ finally:
33
+ db.close()
34
+
35
+ @router.post("/")
36
+ async def create_doctor(doctor: CreateDoctorSchema):
37
+ db = SessionLocal()
38
+ try:
39
+ new_doctor = Doctor(**doctor.dict())
40
+ db.add(new_doctor)
41
+ db.commit()
42
+ db.refresh(new_doctor)
43
+ return {"message": "Doctor created successfully", "doctor": new_doctor.__dict__}
44
+ except Exception as e:
45
+ db.rollback()
46
+ raise HTTPException(status_code=400, detail=str(e))
47
+ finally:
48
+ db.close()
49
+
50
+ @router.put("/{doctor_id}")
51
+ async def update_doctor(doctor_id: uuid.UUID, doctor: UpdateDoctorSchema):
52
+ db = SessionLocal()
53
+ try:
54
+ existing_doctor = db.query(Doctor).filter(Doctor.id == doctor_id).first()
55
+ if not existing_doctor:
56
+ raise HTTPException(status_code=404, detail="Doctor not found")
57
+ for attr, value in doctor.dict().items():
58
+ setattr(existing_doctor, attr, value)
59
+ db.commit()
60
+ return {"message": "Doctor updated successfully", "doctor": existing_doctor.__dict__}
61
+ except Exception as e:
62
+ db.rollback()
63
+ raise HTTPException(status_code=400, detail=str(e))
64
+ finally:
65
+ db.close()
66
+
app/routers/pet.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uuid
2
+ from fastapi import APIRouter, HTTPException
3
+ from app.schemas import CreatePetSchema, UpdatePetSchema
4
+ from app.models import Pet
5
+ from app.database import SessionLocal
6
+
7
+ router = APIRouter()
8
+
9
+ @router.get("/")
10
+ async def get_pets():
11
+ db = SessionLocal()
12
+ try:
13
+ pets = db.query(Pet).all()
14
+ return [{"id": pet.id, "name": pet.name, "age": pet.age} for pet in pets]
15
+ except Exception as e:
16
+ db.rollback()
17
+ raise HTTPException(status_code=400, detail=str(e))
18
+ finally:
19
+ db.close()
20
+
21
+ @router.post("/")
22
+ async def create_pet(pet: CreatePetSchema):
23
+ db = SessionLocal()
24
+ try:
25
+ new_pet = Pet(**pet.dict())
26
+ db.add(new_pet)
27
+ db.commit()
28
+ return {"id": new_pet.id, "name": new_pet.name, "age": new_pet.age}
29
+ except Exception as e:
30
+ db.rollback()
31
+ raise HTTPException(status_code=400, detail=str(e))
32
+ finally:
33
+ db.close()
34
+
35
+ @router.put("/{pet_id}", response_model=UpdatePetSchema)
36
+ async def update_pet(pet_id: uuid.UUID, pet: UpdatePetSchema):
37
+ db = SessionLocal()
38
+ try:
39
+ existing_pet = db.query(Pet).filter(Pet.id == pet_id).first()
40
+ if not existing_pet:
41
+ raise HTTPException(status_code=404, detail="Pet not found")
42
+ for attr, value in pet.dict().items():
43
+ setattr(existing_pet, attr, value)
44
+ db.commit()
45
+ return {"id": existing_pet.id, "name": existing_pet.name, "age": existing_pet.age, "gender": existing_pet.gender}
46
+ except Exception as e:
47
+ db.rollback()
48
+ raise HTTPException(status_code=400, detail=str(e))
49
+ finally:
50
+ db.close()
app/routers/upload.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import UploadFile, File, APIRouter
2
+ from app.gcs import upload_blob
3
+ import os
4
+
5
+ router = APIRouter()
6
+
7
+ @router.post("/")
8
+ async def upload_file(file: UploadFile = File(...)):
9
+ try:
10
+ file_location = f"temp/{file.filename}"
11
+ os.makedirs(os.path.dirname(file_location), exist_ok=True)
12
+ with open(file_location, "wb+") as file_object:
13
+ file_object.write(file.file.read())
14
+ upload_blob("docpet-dev-test", file_location, file.filename)
15
+ return {"success": True, "filename": file.filename, "message": "File uploaded successfully"}
16
+ except Exception as e:
17
+ return {"success": False, "message": str(e)}
app/routers/user.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, Depends
2
+ from ..database import get_db
3
+ from sqlalchemy.orm import Session
4
+ from .. import models, schemas, oauth2
5
+
6
+ router = APIRouter()
7
+
8
+
9
+ @router.get('/me', response_model=schemas.UserResponse)
10
+ def get_me(db: Session = Depends(get_db), user_id: str = Depends(oauth2.require_user)):
11
+ user = db.query(models.User).filter(models.User.id == user_id).first()
12
+ return user
app/schemas.py ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ import uuid
3
+ from pydantic import BaseModel, EmailStr, constr, Field
4
+ from enum import Enum
5
+
6
+ class UserBaseSchema(BaseModel):
7
+ name: str
8
+ email: EmailStr
9
+
10
+ class Config:
11
+ orm_mode = True
12
+
13
+
14
+ class CreateUserSchema(UserBaseSchema):
15
+ password: constr(min_length=8)
16
+ passwordConfirm: str
17
+ role: str = 'user'
18
+ verified: bool = False
19
+
20
+
21
+ class LoginUserSchema(BaseModel):
22
+ email: EmailStr
23
+ password: constr(min_length=8)
24
+
25
+
26
+ class UserResponse(UserBaseSchema):
27
+ id: uuid.UUID
28
+ created_at: datetime
29
+ updated_at: datetime
30
+
31
+ class GenderEnum(str, Enum):
32
+ Jantan = 'Jantan'
33
+ Betina = 'Betina'
34
+
35
+ class PetBaseSchema(BaseModel):
36
+ name: constr(max_length=20) = Field(..., description="Pet's name")
37
+ gender: GenderEnum = Field(..., description="Pet's gender")
38
+ age: str = Field(..., description="Pet's age")
39
+
40
+ class CreatePetSchema(PetBaseSchema):
41
+ pass
42
+
43
+ class UpdatePetSchema(PetBaseSchema):
44
+ pass
45
+
46
+ class DoctorBaseSchema(BaseModel):
47
+ name: constr(max_length=20) = Field(..., description="Doctor's name")
48
+ price: str = Field(..., description="Doctor's price")
49
+ rating: str = Field(..., description="Doctor's rating")
50
+ role: str = Field(..., description="Doctor's role")
51
+ role_detail: str = Field(..., description="Doctor's detail")
52
+ day: str = Field(..., description="Doctor's day")
53
+ time: str = Field(..., description="Doctor's time")
54
+ location: str = Field(..., description="Doctor's location")
55
+ university: str = Field(..., description="Doctor's university")
56
+ yearofexperience: str = Field(..., description="Doctor's year of experience")
57
+
58
+ class CreateDoctorSchema(DoctorBaseSchema):
59
+ pass
60
+
61
+ class UpdateDoctorSchema(DoctorBaseSchema):
62
+ pass
63
+
64
+ class DoctorResponse(DoctorBaseSchema):
65
+ id: uuid.UUID
66
+ created_at: datetime
67
+ updated_at: datetime
app/utils.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import numpy as np
3
+ from passlib.context import CryptContext
4
+ from tensorflow.keras.models import load_model
5
+ from tensorflow.keras.utils import load_img, img_to_array
6
+
7
+ pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
8
+
9
+ def hash_password(password: str):
10
+ return pwd_context.hash(password)
11
+
12
+ def verify_password(password: str, hashed_password: str):
13
+ return pwd_context.verify(password, hashed_password)
14
+
15
+ def image_prediction(image_location: str) -> dict:
16
+ labels = json.load(open("./ml_models/labels.json")) \
17
+ .get("disease_labels")
18
+
19
+ model = load_model('./ml_models/model.h5')
20
+
21
+ image = load_img(image_location, target_size = (224, 224))
22
+
23
+ x = np.expand_dims(a = img_to_array(image), axis = 0)
24
+
25
+ images = np.vstack(tup = [x])
26
+ classes = model.predict(x = images, batch_size = 32)
27
+
28
+ for idx_predict, class_value in enumerate(classes[0]):
29
+ if class_value == 1:
30
+ label = labels[idx_predict]
31
+ break
32
+ else: label = None
33
+
34
+ return label
deployment/development.dockerfile ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-bullseye
2
+
3
+ LABEL team="cloud-computing-team"
4
+
5
+ # Write Byte Code and Buffered
6
+ ENV PYTHONDONTWRITEBYTECODE 1
7
+ ENV PYTHONUNBUFFERED 1
8
+
9
+ # Set Working Directory
10
+ WORKDIR /etc/docpet_backend_service
11
+
12
+ # Upgrade PIP
13
+ RUN python3 -m pip install --upgrade pip
14
+
15
+ # Install the Requirements
16
+ COPY requirements/linux.requirements.txt \
17
+ /etc/docpet_backend_service/
18
+
19
+ RUN pip3 install -r linux.requirements.txt
20
+
21
+ COPY . /etc/docpet_backend_service/
22
+
23
+ EXPOSE 8000
24
+
25
+ CMD [ "/bin/bash", "-c", "alembic upgrade head && uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload" ]
deployment/production.dockerfile ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-bullseye
2
+
3
+ LABEL team="cloud-computing-team"
4
+
5
+ # Write Byte Code and Buffered
6
+ ENV PYTHONDONTWRITEBYTECODE 1
7
+ ENV PYTHONUNBUFFERED 1
8
+
9
+ # Set Working Directory
10
+ WORKDIR /etc/docpet_backend_service
11
+
12
+ # Upgrade PIP
13
+ RUN python3 -m pip install --upgrade pip
14
+
15
+ # Install the Requirements
16
+ COPY requirements/linux.requirements.txt \
17
+ /etc/docpet_backend_service/
18
+
19
+ RUN pip3 install -r linux.requirements.txt
20
+
21
+ COPY . /etc/docpet_backend_service/
22
+
23
+ EXPOSE 80
24
+
25
+ CMD [ "/bin/bash", "-c", "alembic upgrade head && uvicorn app.main:app --host 0.0.0.0 --port 80 --workers 10" ]
docker-compose.yml ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ version: '3.3'
2
+ services:
3
+ postgres:
4
+ image: postgres
5
+ container_name: postgres
6
+ ports:
7
+ - '6500:5432'
8
+ restart: always
9
+ env_file:
10
+ - ./.env
11
+ volumes:
12
+ - postgres-db:/var/lib/postgresql/data
13
+
14
+ volumes:
15
+ postgres-db:
main.tf ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ terraform {
2
+ required_version = ">= 1.0"
3
+
4
+ required_providers {
5
+ google = {
6
+ source = "hashicorp/google"
7
+ version = "5.6.0"
8
+ }
9
+ }
10
+
11
+ backend "gcs" {
12
+ bucket = "docpet-terraform-state-store"
13
+ }
14
+ }
15
+
16
+ provider "google" {
17
+ project = var.project_id
18
+ region = var.region
19
+ }
ml_models/labels.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "disease_labels": [
3
+ {
4
+ "name": "Mata Katarak",
5
+ "symptom": "Katarak dapat menunjukkan gejala seperti perubahan warna mata yang keruh atau kabur, matanya terlihat kebiruan atau putih, kehilangan kemampuan untuk melihat dengan jelas, hingga penurunan aktivitas atau ketidakmampuan untuk menangkap pergerakan dengan cepat. Hewan mungkin terlihat lebih tidak aktif atau cenderung menabrak benda-benda di sekitarnya. Gejala ini dapat berkembang secara perlahan dan bertahap. Penting untuk segera konsultasikan ke dokter hewan jika ada indikasi katarak, karena pengobatan dini dapat membantu mencegah kemungkinan penurunan penglihatan yang lebih lanjut.",
6
+ "treatment": "Katarak dapat diobati dengan pembedahan untuk mengangkat lensa mata yang keruh dan menggantinya dengan lensa buatan. Setelah operasi, penggunaan tetes mata antibiotik dan antiinflamasi diresepkan untuk mencegah infeksi dan mengurangi peradangan. Terapi medis atau perubahan pola makan juga dapat membantu jika katarak belum parah namun mulai mengganggu penglihatan. Penting untuk pemantauan berkala oleh dokter hewan untuk memastikan pemulihan yang optimal.",
7
+ "information": "Katarak pada hewan peliharaan dapat dipicu oleh faktor genetik, usia, cedera, atau kondisi medis tertentu seperti diabetes. Gejalanya bisa berkembang secara perlahan dan terkadang sulit untuk terdeteksi pada awalnya. Pemantauan secara rutin terhadap mata peliharaan serta kunjungan teratur ke dokter hewan dapat membantu mendeteksi katarak lebih awal dan mengurangi risiko penurunan penglihatan yang signifikan. Makanan khusus untuk kesehatan mata dan suplemen tertentu juga dapat direkomendasikan untuk membantu menjaga kesehatan mata hewan peliharaan."
8
+ },
9
+
10
+ {
11
+ "name": "Mata Glaukoma",
12
+ "symptom": "Glaukoma ditandai dengan gejala seperti pupil yang melebar, mata yang kemerahan, peningkatan tekanan bola mata, pembengkakan mata, dan kadang-kadang terlihat benjolan pada mata. Hewan peliharaan mungkin mengalami sakit atau tidak nyaman, dapat terjadi perubahan dalam perilaku mereka seperti menjadi lebih sensitif terhadap cahaya atau menggosok-gosokkan matanya secara berlebihan. Penglihatan hewan juga bisa terpengaruh, yang dapat dilihat dari perilaku mereka yang lebih waspada atau kesulitan melihat dengan jelas. Deteksi dini dan penanganan oleh dokter hewan penting untuk mencegah kerusakan permanen pada mata.",
13
+ "treatment": "Pengobatan glaukoma bergantung pada tingkat keparahan dan penyebabnya. Dokter hewan dapat meresepkan obat tetes mata untuk mengurangi tekanan bola mata. Dalam beberapa kasus, prosedur bedah seperti iridektomi (pembuatan lubang kecil pada iris) atau enukleasi (pengangkatan mata) mungkin diperlukan jika glaukoma sudah parah dan tidak merespons pengobatan medis. Perawatan tambahan seperti terapi penghilang rasa sakit juga sering direkomendasikan untuk mengurangi ketidaknyamanan hewan peliharaan. Penting untuk segera berkonsultasi dengan dokter hewan untuk penanganan yang tepat sesuai dengan kondisi individu hewan.",
14
+ "information": "Glaukoma pada hewan terjadi ketika tekanan di dalam bola mata meningkat secara signifikan, menyebabkan kerusakan pada saraf mata. Ini bisa bersifat primer (idiopatik) atau sekunder, terkait dengan cedera, infeksi, atau masalah mata lainnya. Glaukoma seringkali merupakan kondisi darurat karena dapat menyebabkan kebutaan dalam waktu singkat jika tidak ditangani dengan cepat. Pemantauan teratur tekanan bola mata, penggunaan obat tetes mata untuk mengontrol tekanan, serta pemeriksaan rutin oleh dokter hewan sangat penting untuk mengelola kondisi ini dan mencegah kerusakan mata yang parah."
15
+ },
16
+
17
+ {
18
+ "name": "Mata Konjungtivitis",
19
+ "symptom": "Konjungtivitis ditandai oleh mata yang merah, bengkak, dan mungkin mengeluarkan lebih banyak air mata atau sekresi yang tidak normal, seperti lendir atau nanah. Hewan tersebut mungkin merasa tidak nyaman, menunjukkan tanda-tanda gatal atau sensasi terbakar di sekitar mata, yang bisa menyebabkan mereka menggaruk atau menggosok area tersebut. Kasus yang lebih parah dapat menyebabkan kelopak mata menutup atau menempel bersama karena pembengkakan atau sekresi. Sensitivitas terhadap cahaya juga bisa menjadi gejala. Penting untuk membawa hewan peliharaan ke dokter hewan untuk diagnosis yang tepat dan perawatan yang sesuai, karena konjungtivitis bisa disebabkan oleh berbagai faktor seperti infeksi bakteri, virus, alergi, atau iritasi oleh benda asing. Dokter hewan akan dapat menentukan penyebabnya dan meresepkan pengobatan yang sesuai untuk mengatasi kondisi mata tersebut.",
20
+ "treatment": "Pengobatan untuk konjungtivitis pada umumnya melibatkan pendekatan yang spesifik terhadap penyebabnya. Dokter hewan sering meresepkan tetes mata atau salep mata yang mengandung antibiotik, antiviral, atau bahan yang sesuai dengan penyebab infeksi mata tersebut. Selain itu, pembersihan mata secara teratur dengan larutan khusus atau air steril juga direkomendasikan untuk menghilangkan sekresi dan bahan iritan dari mata. Terapi kompres hangat dengan lap bersih dapat membantu mengurangi pembengkakan serta meningkatkan kenyamanan hewan. Pengobatan pendukung seperti antihistamin atau perubahan lingkungan juga mungkin direkomendasikan jika konjungtivitis disebabkan oleh alergi. Perawatan tambahan seperti antibiotik atau prosedur medis lainnya bisa diperlukan tergantung pada keparahan infeksi atau komplikasi yang mungkin terjadi. Penting untuk mengikuti instruksi dokter hewan dengan teliti dan tidak menggunakan obat-obatan tanpa persetujuan dokter, serta memantau perkembangan gejala. Jika gejalanya tidak membaik atau memburuk, segera konsultasikan kembali dengan dokter hewan untuk penanganan lebih lanjut.",
21
+ "information": "Konjungtivitis adalah kondisi di mana mata menjadi merah, bengkak, dan sering kali mengeluarkan cairan yang berlebihan, seperti air mata, lendir, atau nanah. Gejalanya juga dapat mencakup rasa gatal atau terbakar di sekitar mata, kelopak mata yang mungkin sulit untuk dibuka, serta sensitivitas terhadap cahaya. Penyebabnya bisa bervariasi, mulai dari infeksi bakteri atau virus seperti Chlamydia atau herpes, reaksi alergi terhadap debu atau serbuk sari, hingga iritasi karena benda asing yang masuk ke mata. Pengobatan tergantung pada penyebabnya; untuk infeksi, dokter hewan mungkin meresepkan tetes atau salep mata dengan antibiotik atau antiviral. Sementara itu, dalam kasus alergi, manajemen lingkungan dan terapi antihistamin bisa direkomendasikan. Membersihkan mata secara teratur dan menjaga kebersihan lingkungan hewan peliharaan juga penting. Penting untuk mendapatkan diagnosis yang tepat dari dokter hewan dan mengikuti perawatan yang diresepkan dengan cermat untuk memastikan pemulihan yang optimal bagi hewan peliharaan Anda."
22
+ },
23
+
24
+ {
25
+ "name": "Mata Normal",
26
+ "symptom": "Mata normal biasanya memiliki pupil yang seragam dan responsif terhadap cahaya dengan baik. Kornea (bagian jernih di depan mata) bersih tanpa kekeruhan, memungkinkan cahaya masuk dengan baik. Iris (bagian berwarna di dalam mata) tampak normal, tidak ada perubahan warna atau bengkak yang mencolok. Sklera (bagian putih mata) tidak merah atau terlalu berwarna, menunjukkan kesehatan yang baik. Mata juga tidak mengalami air mata berlebihan atau keluarnya cairan yang tidak normal. Kucing dan anjing sehat biasanya memiliki pandangan yang jelas dan responsif terhadap lingkungan sekitarnya. Rutin pemeriksaan mata dapat menjaga kesehatan mata mereka.",
27
+ "treatment": "Untuk menjaga mata tetap sehat, pembersihan rutin dengan lap lembab untuk menghapus kotoran atau lendir yang terkumpul adalah penting. Tetes mata yang direkomendasikan oleh dokter hewan bisa digunakan untuk membersihkan mata atau mencegah iritasi. Diet sehat dengan nutrisi yang mencakup vitamin A, seperti wortel, juga membantu menjaga kesehatan mata. Pemeriksaan rutin oleh dokter hewan akan membantu mendeteksi masalah mata sejak dini. Jika ada kondisi seperti infeksi atau penyakit mata, pengobatan akan disesuaikan dengan diagnosa yang tepat, mungkin melalui obat tetes mata atau pengobatan oral, yang akan diresepkan oleh dokter hewan.",
28
+ "information": "Terkait perawatan mata, perhatikan perilaku mereka. Jika terjadi gosok-gosok mata berlebihan, mata merah, keluar cairan yang tidak normal, atau gejala lain seperti kepekaan terhadap cahaya atau penglihatan buram, segera konsultasikan dengan dokter hewan. Mata kucing dan anjing rentan terhadap infeksi, luka, atau kondisi medis lainnya. Jangan gunakan produk atau obat tanpa rekomendasi dokter hewan, karena dapat memperburuk masalah mata. Serta, hindari penggunaan cairan atau bahan yang tidak aman atau tajam di sekitar mata hewan peliharaan untuk mencegah cedera. Pemeliharaan rutin dan perhatian terhadap mata dapat mencegah masalah yang lebih serius."
29
+ }
30
+ ]
31
+ }
modules.tf ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # PostgreSQL Password Secret
2
+ module "postgres_password_secret" {
3
+ source = "./modules/secrets"
4
+ secret_project_id = var.project_id
5
+ secret_id = "postgres-password-secret"
6
+ secret_data = var.postgres_password
7
+ }
8
+
9
+ # JWT Public Key Secret
10
+ module "public_key_secret" {
11
+ source = "./modules/secrets"
12
+ secret_project_id = var.project_id
13
+ secret_id = "public-key-secret"
14
+ secret_data = var.jwt_public_key
15
+ }
16
+
17
+ # JWT Private Key Secret
18
+ module "private_key_secret" {
19
+ source = "./modules/secrets"
20
+ secret_project_id = var.project_id
21
+ secret_id = "private-key-secret"
22
+ secret_data = var.jwt_private_key
23
+ }
24
+
25
+ # Docpet Backend Service (Cloud Run)
26
+ module "docpet_service_cloud_run" {
27
+ source = "./modules/cloud_run"
28
+ cloud_run_name = var.service_name
29
+
30
+ cloud_run_project = var.project_id
31
+
32
+ cloud_run_description = <<-EOT
33
+ Docpet Backend Service
34
+ EOT
35
+
36
+ cloud_run_location = var.region
37
+ cloud_run_ingress = "INGRESS_TRAFFIC_ALL"
38
+
39
+ # cloud_run_revision = var.revision_name
40
+
41
+ cloud_run_service_account = var.service_account
42
+
43
+ cloud_run_image = var.service_container
44
+ cloud_run_port = 80
45
+
46
+ cloud_run_cpu = "4.0"
47
+ cloud_run_memory = "4Gi"
48
+ cloud_run_cpu_idle = true
49
+ cloud_run_cpu_boost = true
50
+
51
+ cloud_run_startup_probe = {
52
+ http_path = "/api/checker"
53
+ http_port = 80
54
+
55
+ period_seconds = 240
56
+ timeout_seconds = 240
57
+ failure_threshold = 20
58
+ initial_delay_seconds = 240
59
+ }
60
+
61
+ cloud_run_liveness_probe = {
62
+ http_path = "/api/checker"
63
+ http_port = 80
64
+
65
+ period_seconds = 240
66
+ timeout_seconds = 5
67
+ failure_threshold = 5
68
+ initial_delay_seconds = 10
69
+ }
70
+
71
+ cloud_run_timeout = 800
72
+ cloud_run_max_instance_concurrent = 80
73
+ cloud_run_execution_environment = "EXECUTION_ENVIRONMENT_GEN1"
74
+
75
+ cloud_run_min_instance = 0
76
+ cloud_run_max_instance = 15
77
+
78
+ cloud_run_traffic_percent = 100
79
+ cloud_run_traffic_type = "TRAFFIC_TARGET_ALLOCATION_TYPE_LATEST"
80
+
81
+ cloud_run_vpc_access_connector = var.vpc_access_connector
82
+ cloud_run_vpc_access_egress = "PRIVATE_RANGES_ONLY"
83
+
84
+ cloud_run_envars = {
85
+ DATABASE_PORT = var.postgres_port
86
+ POSTGRES_USER = var.postgres_user
87
+ POSTGRES_DB = var.postgres_db
88
+ POSTGRES_HOST = var.postgres_host
89
+ POSTGRES_HOSTNAME = var.postgres_hostname
90
+
91
+ ACCESS_TOKEN_EXPIRES_IN = var.access_token_expires
92
+ REFRESH_TOKEN_EXPIRES_IN = var.refresh_token_expires
93
+ JWT_ALGORITHM = var.jwt_algorithm
94
+ CLIENT_ORIGIN = "*"
95
+
96
+ JWT_PUBLIC_KEY = module.public_key_secret.secret_id_output
97
+ JWT_PRIVATE_KEY = module.private_key_secret.secret_id_output
98
+ POSTGRES_PASSWORD = module.postgres_password_secret.secret_id_output
99
+ }
100
+ }
101
+
102
+ # Cloud Run Service IAM
103
+ resource "google_cloud_run_service_iam_binding" "cloud_run_service_iam" {
104
+ project = var.project_id
105
+ location = var.region
106
+ service = module.docpet_service_cloud_run.cloud_run_service_name_output
107
+ role = "roles/run.invoker"
108
+ members = ["allUsers"]
109
+ }
modules/cloud_run/cloud_run.tf ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Cloud Run Service
2
+ resource "google_cloud_run_v2_service" "cloud_run_service" {
3
+ name = var.cloud_run_name
4
+ project = var.cloud_run_project
5
+ description = var.cloud_run_description
6
+
7
+ location = var.cloud_run_location
8
+ ingress = var.cloud_run_ingress
9
+
10
+ template {
11
+ # revision = var.cloud_run_revision
12
+ service_account = var.cloud_run_service_account
13
+
14
+ containers {
15
+ image = var.cloud_run_image
16
+
17
+ ports {
18
+ container_port = var.cloud_run_port
19
+ }
20
+
21
+ resources {
22
+ limits = {
23
+ cpu = var.cloud_run_cpu
24
+ memory = var.cloud_run_memory
25
+ }
26
+
27
+ cpu_idle = var.cloud_run_cpu_idle
28
+ startup_cpu_boost = var.cloud_run_cpu_boost
29
+ }
30
+
31
+ # startup_probe {
32
+ # period_seconds = var.cloud_run_startup_probe["period_seconds"]
33
+ # timeout_seconds = var.cloud_run_startup_probe["timeout_seconds"]
34
+ # failure_threshold = var.cloud_run_startup_probe["failure_threshold"]
35
+ # initial_delay_seconds = var.cloud_run_startup_probe["initial_delay_seconds"]
36
+
37
+ # http_get {
38
+ # path = var.cloud_run_startup_probe["http_path"]
39
+ # port = var.cloud_run_startup_probe["http_port"]
40
+ # }
41
+ # }
42
+
43
+ liveness_probe {
44
+ period_seconds = var.cloud_run_liveness_probe["period_seconds"]
45
+ timeout_seconds = var.cloud_run_liveness_probe["timeout_seconds"]
46
+ failure_threshold = var.cloud_run_liveness_probe["failure_threshold"]
47
+ initial_delay_seconds = var.cloud_run_liveness_probe["initial_delay_seconds"]
48
+
49
+ http_get {
50
+ path = var.cloud_run_liveness_probe["http_path"]
51
+ port = var.cloud_run_liveness_probe["http_port"]
52
+ }
53
+ }
54
+
55
+ # Environment Variables
56
+ env {
57
+ name = "DATABASE_PORT"
58
+ value = var.cloud_run_envars["DATABASE_PORT"]
59
+ }
60
+
61
+ env {
62
+ name = "POSTGRES_USER"
63
+ value = var.cloud_run_envars["POSTGRES_USER"]
64
+ }
65
+
66
+ env {
67
+ name = "POSTGRES_DB"
68
+ value = var.cloud_run_envars["POSTGRES_DB"]
69
+ }
70
+
71
+ env {
72
+ name = "POSTGRES_HOST"
73
+ value = var.cloud_run_envars["POSTGRES_HOST"]
74
+ }
75
+
76
+ env {
77
+ name = "POSTGRES_HOSTNAME"
78
+ value = var.cloud_run_envars["POSTGRES_HOSTNAME"]
79
+ }
80
+
81
+ env {
82
+ name = "ACCESS_TOKEN_EXPIRES_IN"
83
+ value = var.cloud_run_envars["ACCESS_TOKEN_EXPIRES_IN"]
84
+ }
85
+
86
+ env {
87
+ name = "REFRESH_TOKEN_EXPIRES_IN"
88
+ value = var.cloud_run_envars["REFRESH_TOKEN_EXPIRES_IN"]
89
+ }
90
+
91
+ env {
92
+ name = "JWT_ALGORITHM"
93
+ value = var.cloud_run_envars["JWT_ALGORITHM"]
94
+ }
95
+
96
+ env {
97
+ name = "CLIENT_ORIGIN"
98
+ value = var.cloud_run_envars["CLIENT_ORIGIN"]
99
+ }
100
+
101
+
102
+ # Secrets
103
+ env {
104
+ name = "JWT_PUBLIC_KEY"
105
+ value_source {
106
+ secret_key_ref {
107
+ # secret = google_secret_manager_secret.secret.secret_id
108
+ secret = var.cloud_run_envars["JWT_PUBLIC_KEY"]
109
+ version = "1"
110
+ }
111
+ }
112
+ }
113
+
114
+ env {
115
+ name = "JWT_PRIVATE_KEY"
116
+ value_source {
117
+ secret_key_ref {
118
+ # secret = google_secret_manager_secret.secret.secret_id
119
+ secret = var.cloud_run_envars["JWT_PRIVATE_KEY"]
120
+ version = "1"
121
+ }
122
+ }
123
+ }
124
+
125
+ env {
126
+ name = "POSTGRES_PASSWORD"
127
+ value_source {
128
+ secret_key_ref {
129
+ # secret = google_secret_manager_secret.secret.secret_id
130
+ secret = var.cloud_run_envars["POSTGRES_PASSWORD"]
131
+ version = "1"
132
+ }
133
+ }
134
+ }
135
+ }
136
+
137
+ timeout = "${
138
+ var.cloud_run_timeout
139
+ }s"
140
+
141
+ max_instance_request_concurrency = var.cloud_run_max_instance_concurrent
142
+ execution_environment = var.cloud_run_execution_environment
143
+
144
+ scaling {
145
+ min_instance_count = var.cloud_run_min_instance
146
+ max_instance_count = var.cloud_run_max_instance
147
+ }
148
+
149
+ vpc_access {
150
+ # projects/{project}/locations/{location}/connectors/{connector}
151
+ connector = "projects/${
152
+ var.cloud_run_project
153
+ }/locations/${
154
+ var.cloud_run_location
155
+ }/connectors/${
156
+ var.cloud_run_vpc_access_connector
157
+ }"
158
+
159
+ egress = var.cloud_run_vpc_access_egress
160
+ }
161
+ }
162
+
163
+ traffic {
164
+ percent = var.cloud_run_traffic_percent
165
+ type = var.cloud_run_traffic_type
166
+ }
167
+ }
modules/cloud_run/outputs.tf ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # Cloud Run Service Name Output
2
+ output "cloud_run_service_name_output" {
3
+ value = google_cloud_run_v2_service.cloud_run_service.name
4
+ description = "Cloud Run Service Name Output <type: String>"
5
+ }
modules/cloud_run/variables.tf ADDED
@@ -0,0 +1,256 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Cloud Run Name
2
+ variable "cloud_run_name" {
3
+ type = string
4
+ default = "cloud_run_service"
5
+ description = "Cloud Run Name <type: String>"
6
+ }
7
+
8
+ # Cloud Run Project ID
9
+ variable "cloud_run_project" {
10
+ type = string
11
+ description = "Cloud Run Project ID <type: String>"
12
+ }
13
+
14
+ # Cloud Run Description
15
+ variable "cloud_run_description" {
16
+ type = string
17
+ default = <<-EOT
18
+ this is service for blablabla
19
+ EOT
20
+
21
+ description = "Cloud Run Description <type: String>"
22
+ }
23
+
24
+ # Cloud Run Location
25
+ variable "cloud_run_location" {
26
+ type = string
27
+ default = "asia-southeast2"
28
+ description = "Cloud Run Location <type: String>"
29
+ }
30
+
31
+ # Cloud Run Ingress
32
+ variable "cloud_run_ingress" {
33
+ type = string
34
+ default = "INGRESS_TRAFFIC_ALL"
35
+ description = "value"
36
+ }
37
+
38
+ # Cloud Run Revision
39
+ variable "cloud_run_revision" {
40
+ type = string
41
+ default = "revision-service-xxxxx"
42
+ description = "Cloud Run Revision <type: String>"
43
+ }
44
+
45
+ # Cloud Run Service Account
46
+ variable "cloud_run_service_account" {
47
+ type = string
48
+ description = "Cloud Run Service Account <type: String>"
49
+ }
50
+
51
+ # Cloud Run Container Image
52
+ variable "cloud_run_image" {
53
+ type = string
54
+ default = "us-docker.pkg.dev/cloudrun/container/hello"
55
+ description = "Cloud Run Container Image <type: String>"
56
+ }
57
+
58
+ # Cloud Run Port
59
+ variable "cloud_run_port" {
60
+ type = number
61
+ default = 80
62
+ description = "Cloud Run Port <type: Number>"
63
+ }
64
+
65
+ # Cloud Run vCPU Limit
66
+ variable "cloud_run_cpu" {
67
+ type = string
68
+ default = "2m"
69
+ description = "Cloud Run vCPU limit <type: String>"
70
+ }
71
+
72
+ # Cloud Run Memory Limit
73
+ variable "cloud_run_memory" {
74
+ type = string
75
+ default = "2Gi"
76
+ description = "Cloud Run Memory Limit <type: String>"
77
+ }
78
+
79
+ # Cloud Run CPU Idle
80
+ variable "cloud_run_cpu_idle" {
81
+ type = bool
82
+ default = true
83
+ description = "Cloud Run CPU Idle <type: Bool>"
84
+ }
85
+
86
+ # Cloud Run StartUp CPU Boost
87
+ variable "cloud_run_cpu_boost" {
88
+ type = bool
89
+ default = true
90
+ description = "Cloud Run StartUp CPU Boost <type: String>"
91
+ }
92
+
93
+ # Cloud Run StartUp Probe
94
+ variable "cloud_run_startup_probe" {
95
+ type = map(any)
96
+
97
+ default = {
98
+ http_path = "/"
99
+ http_port = 80
100
+
101
+ period_seconds = 240
102
+ timeout_seconds = 5
103
+ failure_threshold = 5
104
+ initial_delay_seconds = 10
105
+ }
106
+
107
+ description =<<-EOT
108
+ Cloud Run StartUp Probe <type: Map(Any)>
109
+ Example:
110
+ cloud_run_startup_probe = {
111
+ http_path = "/"
112
+ http_port = 80
113
+
114
+ period_seconds = 240
115
+ timeout_seconds = 5
116
+ failure_threshold = 5
117
+ initial_delay_seconds = 10
118
+ }
119
+ EOT
120
+ }
121
+
122
+ # Cloud Run Liveness Probe
123
+ variable "cloud_run_liveness_probe" {
124
+ type = map(any)
125
+
126
+ default = {
127
+ http_path = "/"
128
+ http_port = 80
129
+
130
+ period_seconds = 240
131
+ timeout_seconds = 5
132
+ failure_threshold = 5
133
+ initial_delay_seconds = 10
134
+ }
135
+
136
+ description =<<-EOT
137
+ Cloud Run Liveness Probe <type: Map(Any)>
138
+ Example:
139
+ cloud_run_startup_probe {
140
+ http_path = "/"
141
+ http_port = 80
142
+
143
+ period_seconds = 240
144
+ timeout_seconds = 5
145
+ failure_threshold = 5
146
+ initial_delay_seconds = 10
147
+ }
148
+ EOT
149
+ }
150
+
151
+ # Cloud Run Timeout
152
+ variable "cloud_run_timeout" {
153
+ type = number
154
+ default = 800
155
+ description = "Cloud Run Timeout <type: Number>"
156
+ }
157
+
158
+ # Cloud Run Max Instance Request Concurrency
159
+ variable "cloud_run_max_instance_concurrent" {
160
+ type = number
161
+ default = 80
162
+ description = "Cloud Run Max Instance Request Concurrency <type: Number>"
163
+ }
164
+
165
+ # Cloud Run Execution Environment
166
+ variable "cloud_run_execution_environment" {
167
+ type = string
168
+ default = "EXECUTION_ENVIRONMENT_GEN1"
169
+ description = "Cloud Run Execution Environment <type: String>"
170
+ }
171
+
172
+ # Cloud Run Min Instance
173
+ variable "cloud_run_min_instance" {
174
+ type = number
175
+ default = 0
176
+ description = "Cloud Run Min Instance <type: Number>"
177
+ }
178
+
179
+ # Cloud Run Max Instance
180
+ variable "cloud_run_max_instance" {
181
+ type = number
182
+ default = 15
183
+ description = "Cloud Run Max Instance <type: Number>"
184
+ }
185
+
186
+ # Cloud Run Traffic Percent
187
+ variable "cloud_run_traffic_percent" {
188
+ type = number
189
+ default = 100
190
+ description = "Cloud Run Traffic Percent <type: Number>"
191
+ }
192
+
193
+ # Cloud Run Traffic Type
194
+ variable "cloud_run_traffic_type" {
195
+ type = string
196
+ default = "TRAFFIC_TARGET_ALLOCATION_TYPE_LATEST"
197
+ description = "Cloud Run Traffic Type <type: String>"
198
+ }
199
+
200
+ # Cloud Run Connector
201
+ variable "cloud_run_vpc_access_connector" {
202
+ type = string
203
+ default = "project-vpc-connector"
204
+ description = "Cloud Run Connector <type: String>"
205
+ }
206
+
207
+ # Cloud Run VPC Access Egress
208
+ variable "cloud_run_vpc_access_egress" {
209
+ type = string
210
+ default = "ALL_TRAFFIC"
211
+ description = "Cloud Run VPC Access Egress <type: String>"
212
+ }
213
+
214
+ # Cloud Run Environment Variables
215
+ variable "cloud_run_envars" {
216
+ type = map(string)
217
+ sensitive = true
218
+
219
+ default = {
220
+ DATABASE_PORT = "5432"
221
+ POSTGRES_USER = "postgres"
222
+ POSTGRES_DB = "dbname"
223
+ POSTGRES_HOST = "postgres"
224
+ POSTGRES_HOSTNAME = "127.0.0.1"
225
+
226
+ ACCESS_TOKEN_EXPIRES_IN = "15"
227
+ REFRESH_TOKEN_EXPIRES_IN = "60"
228
+ JWT_ALGORITHM = "RS256"
229
+ CLIENT_ORIGIN = "*"
230
+
231
+ JWT_PUBLIC_KEY = "<jwt public key>"
232
+ JWT_PRIVATE_KEY = "<jwt private key>"
233
+ POSTGRES_PASSWORD = "secret value"
234
+ }
235
+
236
+ description =<<-EOT
237
+ Cloud Run Environment Variables <type: map(String)>
238
+ Example:
239
+ cloud_run_envars = {
240
+ DATABASE_PORT = "5432"
241
+ POSTGRES_USER = "postgres"
242
+ POSTGRES_DB = "docpet"
243
+ POSTGRES_HOST = "postgres"
244
+ POSTGRES_HOSTNAME = "127.0.0.1"
245
+
246
+ ACCESS_TOKEN_EXPIRES_IN = "15"
247
+ REFRESH_TOKEN_EXPIRES_IN = "60"
248
+ JWT_ALGORITHM = "RS256"
249
+ CLIENT_ORIGIN = "*"
250
+
251
+ JWT_PUBLIC_KEY = "<jwt public key>"
252
+ JWT_PRIVATE_KEY = "<jwt private key>"
253
+ POSTGRES_PASSWORD = "secret value"
254
+ }
255
+ EOT
256
+ }
modules/secrets/outputs.tf ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Secret ID Output
2
+ output "secret_id_output" {
3
+ value = google_secret_manager_secret.secret_manager.secret_id
4
+ description = "Secret ID Output <type: String>"
5
+ }
6
+
7
+ # Secret Version Output
8
+ output "secret_version_output" {
9
+ value = google_secret_manager_secret_version.secret_manager_version
10
+ description = "Secret Version Output <type: Resource>"
11
+ }
modules/secrets/secrets.tf ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ data "google_project" "project" {
2
+ project_id = var.secret_project_id
3
+ }
4
+
5
+ resource "google_secret_manager_secret" "secret_manager" {
6
+ secret_id = var.secret_id
7
+ replication {
8
+ auto {}
9
+ }
10
+ }
11
+
12
+ resource "google_secret_manager_secret_version" "secret_manager_version" {
13
+ secret = google_secret_manager_secret.secret_manager.name
14
+ secret_data = var.secret_data
15
+ }
16
+
17
+ resource "google_secret_manager_secret_iam_member" "secret_manager_iam_member" {
18
+ secret_id = google_secret_manager_secret.secret_manager.id
19
+ member = "serviceAccount:${
20
+ data.google_project.project.number
21
+ }-compute@developer.gserviceaccount.com"
22
+
23
+ role = "roles/secretmanager.secretAccessor"
24
+
25
+ depends_on = [
26
+ google_secret_manager_secret.secret_manager
27
+ ]
28
+ }
modules/secrets/variables.tf ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Secret Project ID
2
+ variable "secret_project_id" {
3
+ type = string
4
+ description = "Secret Project ID <type: String>"
5
+ }
6
+
7
+ # Secret ID
8
+ variable "secret_id" {
9
+ type = string
10
+ default = "secret-aBcDeFg"
11
+ description = "Secret ID <type: String>"
12
+ }
13
+
14
+ # Secret Data (SENSITIVE)
15
+ variable "secret_data" {
16
+ type = string
17
+ sensitive = true
18
+ description = "Secret Data (SENSITIVE) <type: String>"
19
+ }
requirements/freebsd.requirements.txt ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ absl-py==2.0.0
2
+ aiosmtplib==1.1.7
3
+ alembic==1.9.0
4
+ anyio==3.6.2
5
+ astunparse==1.6.3
6
+ bcrypt==4.0.1
7
+ blinker==1.5
8
+ cachetools==5.3.2
9
+ certifi==2022.12.7
10
+ cffi==1.15.1
11
+ charset-normalizer==3.3.2
12
+ click==8.1.3
13
+ colorama==0.4.6
14
+ cryptography==3.4.8
15
+ dnspython==2.2.1
16
+ email-validator==1.3.0
17
+ fastapi==0.87.0
18
+ fastapi-jwt-auth==0.5.0
19
+ fastapi-mail==1.2.2
20
+ flatbuffers==23.5.26
21
+ gast==0.4.0
22
+ google-api-core==2.14.0
23
+ google-auth==2.25.0
24
+ google-auth-oauthlib==1.0.0
25
+ google-cloud-core==2.3.3
26
+ google-cloud-storage==2.13.0
27
+ google-crc32c==1.5.0
28
+ google-pasta==0.2.0
29
+ google-resumable-media==2.6.0
30
+ googleapis-common-protos==1.61.0
31
+ greenlet==2.0.1
32
+ grpcio==1.59.3
33
+ h11==0.14.0
34
+ h5py==3.10.0
35
+ httpcore==0.16.3
36
+ httptools==0.5.0
37
+ httpx==0.23.1
38
+ idna==3.4
39
+ importlib-metadata==7.0.0
40
+ itsdangerous==2.1.2
41
+ jax==0.4.21
42
+ Jinja2==3.1.2
43
+ keras==2.12.0
44
+ Keras-Preprocessing==1.1.2
45
+ libclang==16.0.6
46
+ Mako==1.2.4
47
+ Markdown==3.5.1
48
+ MarkupSafe==2.1.1
49
+ ml-dtypes==0.3.1
50
+ numpy==1.23.5
51
+ oauthlib==3.2.2
52
+ opt-einsum==3.3.0
53
+ orjson==3.8.3
54
+ packaging==23.2
55
+ passlib==1.7.4
56
+ Pillow==10.1.0
57
+ protobuf==4.25.1
58
+ psycopg2==2.9.5
59
+ pyasn1==0.5.1
60
+ pyasn1-modules==0.3.0
61
+ pycparser==2.21
62
+ pydantic==1.10.2
63
+ PyJWT==1.7.1
64
+ python-dotenv==0.21.0
65
+ python-multipart==0.0.5
66
+ PyYAML==6.0
67
+ requests==2.31.0
68
+ requests-oauthlib==1.3.1
69
+ rfc3986==1.5.0
70
+ rsa==4.9
71
+ scipy==1.11.4
72
+ six==1.16.0
73
+ sniffio==1.3.0
74
+ SQLAlchemy==1.4.45
75
+ starlette==0.21.0
76
+ tensorboard==2.12.3
77
+ tensorboard-data-server==0.7.2
78
+ tensorflow==2.12.0
79
+ tensorflow-estimator==2.12.0
80
+ tensorflow-io-gcs-filesystem==0.31.0
81
+ termcolor==2.4.0
82
+ typing_extensions==4.4.0
83
+ ujson==5.6.0
84
+ urllib3==2.1.0
85
+ uvicorn==0.20.0
86
+ watchfiles==0.18.1
87
+ websockets==10.4
88
+ Werkzeug==3.0.1
89
+ wrapt==1.14.1
90
+ zipp==3.17.0