feat: first working samgis version based on samgis_core and samgis_web
Browse files- README.md +12 -2
- app.py +191 -0
- poetry.lock +0 -0
- pyproject.toml +4 -14
- samgis/__init__.py +0 -25
- samgis/__version__.py +0 -8
- samgis/io/__init__.py +0 -1
- samgis/io/coordinates_pixel_conversion.py +0 -99
- samgis/io/geo_helpers.py +0 -91
- samgis/io/raster_helpers.py +0 -330
- samgis/io/tms2geotiff.py +0 -181
- samgis/io/wrappers_helpers.py +0 -224
- samgis/prediction_api/__init__.py +0 -1
- samgis/prediction_api/predictors.py +0 -92
- samgis/utilities/__init__.py +0 -1
- samgis/utilities/constants.py +0 -44
- samgis/utilities/type_hints.py +0 -103
- scripts/extract-openapi-fastapi.py +3 -2
- scripts/extract-openapi-lambda.py +4 -4
- static/src/App.vue +2 -2
- static/vite.config.ts +22 -11
- tests/__init__.py +2 -1
- tests/io/__init__.py +0 -0
- tests/io/test_coordinates_pixel_conversion.py +0 -27
- tests/io/test_geo_helpers.py +0 -104
- tests/io/test_raster_helpers.py +0 -254
- tests/io/test_tms2geotiff.py +0 -138
- tests/io/test_wrappers_helpers.py +0 -135
- tests/local_tiles_http_server.py +0 -46
- tests/prediction_api/__init__.py +0 -0
- tests/prediction_api/test_predictors.py +0 -81
- tests/{test_fastapi_app.py → test_app.py} +24 -50
- tests/test_lambda_app.py +0 -232
- wrappers/__init__.py +0 -0
- wrappers/fastapi_wrapper.py +0 -168
- wrappers/lambda_wrapper.py +0 -58
README.md
CHANGED
@@ -17,6 +17,12 @@ I tested these instructions on MacOS, but should work on linux as well.
|
|
17 |
It's possible to prepare the model files using <https://github.com/vietanhdev/samexporter/> or using the ones
|
18 |
from <https://huggingface.co/aletrn/sam-quantized> (copy them within the folder `/machine_learning_models`).
|
19 |
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
## SamGIS - HuggingFace version
|
21 |
|
22 |
The SamGIS HuggingSpace url is <https://huggingface.co/spaces/aletrn/samgis>.
|
@@ -36,6 +42,7 @@ docker stop $(docker ps -a -q); docker rm $(docker ps -a -q)
|
|
36 |
--build-arg DEPENDENCY_GROUP=fastapi \
|
37 |
--build-arg VITE__MAP_DESCRIPTION=${VITE__MAP_DESCRIPTION} \
|
38 |
--build-arg VITE__SAMGIS_SPACE=${VITE__SAMGIS_SPACE} \
|
|
|
39 |
--tag registry.gitlab.com/aletrn/gis-prediction
|
40 |
)
|
41 |
|
@@ -46,7 +53,10 @@ docker build . --tag registry.gitlab.com/aletrn/samgis-huggingface --progress=pl
|
|
46 |
Run the container (keep it on background) and show logs
|
47 |
|
48 |
```bash
|
49 |
-
docker run -d --name samgis-huggingface -p 7860:7860
|
|
|
|
|
|
|
50 |
```
|
51 |
|
52 |
Test it with curl using a json payload:
|
@@ -103,7 +113,7 @@ If you need to use the SPA frontend follow the frontend instruction [here](/stat
|
|
103 |
You can run the local server using this python command:
|
104 |
|
105 |
```python
|
106 |
-
uvicorn
|
107 |
```
|
108 |
|
109 |
Change the port and/or the host ip if needed. Test it with curl using a json payload:
|
|
|
17 |
It's possible to prepare the model files using <https://github.com/vietanhdev/samexporter/> or using the ones
|
18 |
from <https://huggingface.co/aletrn/sam-quantized> (copy them within the folder `/machine_learning_models`).
|
19 |
|
20 |
+
In this case after the clone of this repository it's best to initialize the `sam-quantized` submodule:
|
21 |
+
|
22 |
+
```bash
|
23 |
+
git submodule update --init --recursive
|
24 |
+
```
|
25 |
+
|
26 |
## SamGIS - HuggingFace version
|
27 |
|
28 |
The SamGIS HuggingSpace url is <https://huggingface.co/spaces/aletrn/samgis>.
|
|
|
42 |
--build-arg DEPENDENCY_GROUP=fastapi \
|
43 |
--build-arg VITE__MAP_DESCRIPTION=${VITE__MAP_DESCRIPTION} \
|
44 |
--build-arg VITE__SAMGIS_SPACE=${VITE__SAMGIS_SPACE} \
|
45 |
+
--build-arg VITE__STATIC_INDEX_URL=${VITE__STATIC_INDEX_URL} \
|
46 |
--tag registry.gitlab.com/aletrn/gis-prediction
|
47 |
)
|
48 |
|
|
|
53 |
Run the container (keep it on background) and show logs
|
54 |
|
55 |
```bash
|
56 |
+
docker run -d --name samgis-huggingface -p 7860:7860 \
|
57 |
+
-e VITE__STATIC_INDEX_URL=${VITE__STATIC_INDEX_URL} \
|
58 |
+
-e VITE__INDEX_URL=${VITE__INDEX_URL} \
|
59 |
+
registry.gitlab.com/aletrn/samgis-huggingface; docker logs -f samgis-huggingface
|
60 |
```
|
61 |
|
62 |
Test it with curl using a json payload:
|
|
|
113 |
You can run the local server using this python command:
|
114 |
|
115 |
```python
|
116 |
+
uvicorn app:app --host 127.0.0.1 --port 7860
|
117 |
```
|
118 |
|
119 |
Change the port and/or the host ip if needed. Test it with curl using a json payload:
|
app.py
ADDED
@@ -0,0 +1,191 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import os
|
3 |
+
from pathlib import Path
|
4 |
+
|
5 |
+
import structlog.stdlib
|
6 |
+
import uvicorn
|
7 |
+
from asgi_correlation_id import CorrelationIdMiddleware
|
8 |
+
from dotenv import load_dotenv
|
9 |
+
from fastapi import FastAPI, HTTPException, Request
|
10 |
+
from fastapi.exceptions import RequestValidationError
|
11 |
+
from fastapi.responses import FileResponse, HTMLResponse
|
12 |
+
from fastapi.staticfiles import StaticFiles
|
13 |
+
from fastapi.templating import Jinja2Templates
|
14 |
+
from pydantic import ValidationError
|
15 |
+
from samgis_core.utilities import create_folders_if_not_exists
|
16 |
+
from samgis_core.utilities.session_logger import setup_logging
|
17 |
+
from samgis_web.prediction_api.predictors import samexporter_predict
|
18 |
+
from samgis_web.utilities.frontend_builder import build_frontend
|
19 |
+
from samgis_web.utilities.type_hints import ApiRequestBody
|
20 |
+
from starlette.responses import JSONResponse
|
21 |
+
|
22 |
+
|
23 |
+
load_dotenv()
|
24 |
+
project_root_folder = Path(globals().get("__file__", "./_")).absolute().parent
|
25 |
+
workdir = os.getenv("WORKDIR", project_root_folder)
|
26 |
+
model_folder = Path(project_root_folder / "machine_learning_models")
|
27 |
+
|
28 |
+
log_level = os.getenv("LOG_LEVEL", "INFO")
|
29 |
+
setup_logging(log_level=log_level)
|
30 |
+
app_logger = structlog.stdlib.get_logger()
|
31 |
+
app_logger.info(f"PROJECT_ROOT_FOLDER:{project_root_folder}, WORKDIR:{workdir}.")
|
32 |
+
|
33 |
+
folders_map = os.getenv("FOLDERS_MAP", "{}")
|
34 |
+
markdown_text = os.getenv("MARKDOWN_TEXT", "")
|
35 |
+
examples_text_list = os.getenv("EXAMPLES_TEXT_LIST", "").split("\n")
|
36 |
+
example_body = json.loads(os.getenv("EXAMPLE_BODY", "{}"))
|
37 |
+
mount_gradio_app = bool(os.getenv("MOUNT_GRADIO_APP", ""))
|
38 |
+
|
39 |
+
static_dist_folder = Path(project_root_folder) / "static" / "dist"
|
40 |
+
input_css_path = os.getenv("INPUT_CSS_PATH", "src/input.css")
|
41 |
+
vite_gradio_url = os.getenv("VITE_GRADIO_URL", "/gradio")
|
42 |
+
vite_index_url = os.getenv("VITE_INDEX_URL", "/")
|
43 |
+
vite_samgis_url = os.getenv("VITE_SAMGIS_URL", "/samgis")
|
44 |
+
fastapi_title = "samgis"
|
45 |
+
app = FastAPI(title=fastapi_title, version="1.0")
|
46 |
+
|
47 |
+
|
48 |
+
@app.middleware("http")
|
49 |
+
async def request_middleware(request, call_next):
|
50 |
+
from samgis_web.web.middlewares import logging_middleware
|
51 |
+
|
52 |
+
return await logging_middleware(request, call_next)
|
53 |
+
|
54 |
+
|
55 |
+
@app.get("/health")
|
56 |
+
async def health() -> JSONResponse:
|
57 |
+
from samgis_web.__version__ import __version__ as version_web
|
58 |
+
from samgis_core.__version__ import __version__ as version_core
|
59 |
+
|
60 |
+
app_logger.info(f"still alive, version_web:{version_web}, version_core:{version_core}.")
|
61 |
+
return JSONResponse(status_code=200, content={"msg": "still alive..."})
|
62 |
+
|
63 |
+
|
64 |
+
def infer_samgis_fn(request_input: ApiRequestBody | str) -> str | JSONResponse:
|
65 |
+
from samgis_web.web.web_helpers import get_source_name, get_parsed_bbox_points_with_dictlist_prompt
|
66 |
+
|
67 |
+
app_logger.info("starting inference request...")
|
68 |
+
try:
|
69 |
+
import time
|
70 |
+
|
71 |
+
time_start_run = time.time()
|
72 |
+
body_request = get_parsed_bbox_points_with_dictlist_prompt(request_input)
|
73 |
+
app_logger.info(f"body_request:{body_request}.")
|
74 |
+
try:
|
75 |
+
source_name = get_source_name(request_input.source_type)
|
76 |
+
app_logger.info(f"source_name = {source_name}.")
|
77 |
+
output = samexporter_predict(
|
78 |
+
bbox=body_request["bbox"], prompt=body_request["prompt"], zoom=body_request["zoom"],
|
79 |
+
source=body_request["source"], source_name=source_name, model_folder=model_folder
|
80 |
+
)
|
81 |
+
duration_run = time.time() - time_start_run
|
82 |
+
app_logger.info(f"duration_run:{duration_run}.")
|
83 |
+
body = {
|
84 |
+
"duration_run": duration_run,
|
85 |
+
"output": output
|
86 |
+
}
|
87 |
+
dumped = json.dumps(body)
|
88 |
+
app_logger.info(f"json.dumps(body) type:{type(dumped)}, len:{len(dumped)}.")
|
89 |
+
app_logger.debug(f"complete json.dumps(body):{dumped}.")
|
90 |
+
return dumped
|
91 |
+
except Exception as inference_exception:
|
92 |
+
app_logger.error(f"inference_exception:{inference_exception}.")
|
93 |
+
app_logger.error(f"inference_exception, request_input:{request_input}.")
|
94 |
+
raise HTTPException(status_code=500, detail="Internal Server Error")
|
95 |
+
except ValidationError as va1:
|
96 |
+
app_logger.error(f"validation error: {str(va1)}.")
|
97 |
+
app_logger.error(f"ValidationError, request_input:{request_input}.")
|
98 |
+
raise RequestValidationError("Unprocessable Entity")
|
99 |
+
|
100 |
+
|
101 |
+
@app.post("/infer_samgis")
|
102 |
+
def infer_samgis(request_input: ApiRequestBody) -> JSONResponse:
|
103 |
+
dumped = infer_samgis_fn(request_input=request_input)
|
104 |
+
app_logger.info(f"json.dumps(body) type:{type(dumped)}, len:{len(dumped)}.")
|
105 |
+
app_logger.debug(f"complete json.dumps(body):{dumped}.")
|
106 |
+
return JSONResponse(status_code=200, content={"body": dumped})
|
107 |
+
|
108 |
+
|
109 |
+
@app.exception_handler(RequestValidationError)
|
110 |
+
def request_validation_exception_handler(request: Request, exc: RequestValidationError) -> JSONResponse:
|
111 |
+
from samgis_web.web import exception_handlers
|
112 |
+
|
113 |
+
return exception_handlers.request_validation_exception_handler(request, exc)
|
114 |
+
|
115 |
+
|
116 |
+
@app.exception_handler(HTTPException)
|
117 |
+
def http_exception_handler(request: Request, exc: HTTPException) -> JSONResponse:
|
118 |
+
from samgis_web.web import exception_handlers
|
119 |
+
|
120 |
+
return exception_handlers.http_exception_handler(request, exc)
|
121 |
+
|
122 |
+
|
123 |
+
create_folders_if_not_exists.folders_creation(folders_map)
|
124 |
+
write_tmp_on_disk = os.getenv("WRITE_TMP_ON_DISK", "")
|
125 |
+
app_logger.info(f"write_tmp_on_disk:{write_tmp_on_disk}.")
|
126 |
+
if bool(write_tmp_on_disk):
|
127 |
+
try:
|
128 |
+
assert Path(write_tmp_on_disk).is_dir()
|
129 |
+
app.mount("/vis_output", StaticFiles(directory=write_tmp_on_disk), name="vis_output")
|
130 |
+
templates = Jinja2Templates(directory=str(project_root_folder / "static"))
|
131 |
+
|
132 |
+
|
133 |
+
@app.get("/vis_output", response_class=HTMLResponse)
|
134 |
+
def list_files(request: Request):
|
135 |
+
|
136 |
+
files = os.listdir(write_tmp_on_disk)
|
137 |
+
files_paths = sorted([f"{request.url._url}/{f}" for f in files])
|
138 |
+
print(files_paths)
|
139 |
+
return templates.TemplateResponse(
|
140 |
+
"list_files.html", {"request": request, "files": files_paths}
|
141 |
+
)
|
142 |
+
except (AssertionError, RuntimeError) as rerr:
|
143 |
+
app_logger.error(f"{rerr} while loading the folder write_tmp_on_disk:{write_tmp_on_disk}...")
|
144 |
+
raise rerr
|
145 |
+
|
146 |
+
|
147 |
+
build_frontend(
|
148 |
+
project_root_folder=workdir,
|
149 |
+
input_css_path=input_css_path,
|
150 |
+
output_dist_folder=static_dist_folder
|
151 |
+
)
|
152 |
+
app_logger.info("build_frontend ok!")
|
153 |
+
|
154 |
+
app.mount("/static", StaticFiles(directory=static_dist_folder, html=True), name="static")
|
155 |
+
app.mount(vite_index_url, StaticFiles(directory=static_dist_folder, html=True), name="index")
|
156 |
+
|
157 |
+
|
158 |
+
@app.get(vite_index_url)
|
159 |
+
async def index() -> FileResponse:
|
160 |
+
return FileResponse(path=static_dist_folder / "index.html", media_type="text/html")
|
161 |
+
|
162 |
+
|
163 |
+
app_logger.info(f"There is need to create and mount gradio app interface? {mount_gradio_app}...")
|
164 |
+
if mount_gradio_app:
|
165 |
+
import gradio as gr
|
166 |
+
from samgis_web.web.gradio_helpers import get_gradio_interface_geojson
|
167 |
+
|
168 |
+
app_logger.info(f"creating gradio interface...")
|
169 |
+
gr_interface = get_gradio_interface_geojson(
|
170 |
+
infer_samgis_fn,
|
171 |
+
markdown_text,
|
172 |
+
examples_text_list,
|
173 |
+
example_body
|
174 |
+
)
|
175 |
+
app_logger.info(
|
176 |
+
f"gradio interface created, mounting gradio app on url {vite_gradio_url} within FastAPI...")
|
177 |
+
app = gr.mount_gradio_app(app, gr_interface, path=vite_gradio_url)
|
178 |
+
app_logger.info("mounted gradio app within fastapi")
|
179 |
+
|
180 |
+
|
181 |
+
# add the CorrelationIdMiddleware AFTER the @app.middleware("http") decorated function to avoid missing request id
|
182 |
+
app.add_middleware(CorrelationIdMiddleware)
|
183 |
+
|
184 |
+
|
185 |
+
if __name__ == '__main__':
|
186 |
+
try:
|
187 |
+
uvicorn.run("app:app", host="0.0.0.0", port=7860)
|
188 |
+
except Exception as ex:
|
189 |
+
app_logger.error(f"fastapi/gradio application {fastapi_title}, exception:{ex}.")
|
190 |
+
print(f"fastapi/gradio application {fastapi_title}, exception:{ex}.")
|
191 |
+
raise ex
|
poetry.lock
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
pyproject.toml
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
[tool.poetry]
|
2 |
name = "samgis"
|
3 |
-
version = "1.5.
|
4 |
description = "A backend for machine learning instance segmentation on geospatial data even without dedicated graphics cards."
|
5 |
authors = ["alessandro trinca tornidor <alessandro@trinca.tornidor.com>"]
|
6 |
license = "MIT license"
|
@@ -8,7 +8,7 @@ readme = "README.md"
|
|
8 |
|
9 |
[metadata]
|
10 |
name = "samgis"
|
11 |
-
version = "1.5.
|
12 |
|
13 |
[tool.poetry.urls]
|
14 |
Source = "https://github.com/trincadev/samgis-be"
|
@@ -18,7 +18,6 @@ Demo = "https://huggingface.co/spaces/aletrn/samgis"
|
|
18 |
bson = "^0.5.10"
|
19 |
contextily = "^1.6.0"
|
20 |
geopandas = "^1.0.1"
|
21 |
-
loguru = "^0.7.2"
|
22 |
numpy = [
|
23 |
{version = "1.25.2", python = "~3.10"},
|
24 |
{version = "^1.26", python = "~3.11"}
|
@@ -28,16 +27,8 @@ python = ">=3.10, <3.12"
|
|
28 |
python-dotenv = "^1.0.1"
|
29 |
rasterio = "^1.3.10"
|
30 |
requests = "^2.32.3"
|
31 |
-
samgis-core = "
|
32 |
-
|
33 |
-
[tool.poetry.group.aws_lambda]
|
34 |
-
optional = true
|
35 |
-
|
36 |
-
[tool.poetry.group.aws_lambda.dependencies]
|
37 |
-
aws-lambda-powertools = "^2.30.2"
|
38 |
-
awslambdaric = "^2.0.10"
|
39 |
-
jmespath = "^1.0.1"
|
40 |
-
pydantic = "^2.8.2"
|
41 |
|
42 |
[tool.poetry.group.test]
|
43 |
optional = true
|
@@ -64,7 +55,6 @@ optional = true
|
|
64 |
[tool.poetry.group.fastapi.dependencies]
|
65 |
fastapi = "^0.111.0"
|
66 |
jinja2 = "^3.1.4"
|
67 |
-
loguru = "^0.7.2"
|
68 |
pydantic = "^2.8.2"
|
69 |
uvicorn = "^0.30.1"
|
70 |
|
|
|
1 |
[tool.poetry]
|
2 |
name = "samgis"
|
3 |
+
version = "1.5.5"
|
4 |
description = "A backend for machine learning instance segmentation on geospatial data even without dedicated graphics cards."
|
5 |
authors = ["alessandro trinca tornidor <alessandro@trinca.tornidor.com>"]
|
6 |
license = "MIT license"
|
|
|
8 |
|
9 |
[metadata]
|
10 |
name = "samgis"
|
11 |
+
version = "1.5.5"
|
12 |
|
13 |
[tool.poetry.urls]
|
14 |
Source = "https://github.com/trincadev/samgis-be"
|
|
|
18 |
bson = "^0.5.10"
|
19 |
contextily = "^1.6.0"
|
20 |
geopandas = "^1.0.1"
|
|
|
21 |
numpy = [
|
22 |
{version = "1.25.2", python = "~3.10"},
|
23 |
{version = "^1.26", python = "~3.11"}
|
|
|
27 |
python-dotenv = "^1.0.1"
|
28 |
rasterio = "^1.3.10"
|
29 |
requests = "^2.32.3"
|
30 |
+
samgis-core = "^3.0.2"
|
31 |
+
samgis-web = "^1.0.4"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
|
33 |
[tool.poetry.group.test]
|
34 |
optional = true
|
|
|
55 |
[tool.poetry.group.fastapi.dependencies]
|
56 |
fastapi = "^0.111.0"
|
57 |
jinja2 = "^3.1.4"
|
|
|
58 |
pydantic = "^2.8.2"
|
59 |
uvicorn = "^0.30.1"
|
60 |
|
samgis/__init__.py
DELETED
@@ -1,25 +0,0 @@
|
|
1 |
-
"""Get machine learning predictions from geodata raster images"""
|
2 |
-
import os
|
3 |
-
|
4 |
-
# not used here but contextily_tile is imported in samgis.io.tms2geotiff
|
5 |
-
from contextily import tile as contextily_tile
|
6 |
-
from pathlib import Path
|
7 |
-
from samgis.utilities.constants import SERVICE_NAME
|
8 |
-
|
9 |
-
PROJECT_ROOT_FOLDER = Path(globals().get("__file__", "./_")).absolute().parent.parent
|
10 |
-
WORKDIR = os.getenv("WORKDIR", PROJECT_ROOT_FOLDER)
|
11 |
-
MODEL_FOLDER = Path(PROJECT_ROOT_FOLDER / "machine_learning_models")
|
12 |
-
|
13 |
-
IS_AWS_LAMBDA = bool(os.getenv("IS_AWS_LAMBDA", ""))
|
14 |
-
|
15 |
-
if IS_AWS_LAMBDA:
|
16 |
-
try:
|
17 |
-
from aws_lambda_powertools import Logger
|
18 |
-
|
19 |
-
app_logger = Logger(service=SERVICE_NAME)
|
20 |
-
except ModuleNotFoundError:
|
21 |
-
print("this should be AWS LAMBDA environment but we miss the required aws lambda powertools package")
|
22 |
-
else:
|
23 |
-
from samgis_core.utilities.fastapi_logger import setup_logging
|
24 |
-
|
25 |
-
app_logger = setup_logging(debug=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samgis/__version__.py
DELETED
@@ -1,8 +0,0 @@
|
|
1 |
-
import importlib.metadata
|
2 |
-
|
3 |
-
|
4 |
-
try:
|
5 |
-
__version__ = importlib.metadata.version(__package__ or __name__)
|
6 |
-
except importlib.metadata.PackageNotFoundError or ImportError as e:
|
7 |
-
print(f"metadata::e: {type(e)}, {e}: package installed?")
|
8 |
-
__version__ = "1.5.1"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samgis/io/__init__.py
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
"""input/output helpers functions"""
|
|
|
|
samgis/io/coordinates_pixel_conversion.py
DELETED
@@ -1,99 +0,0 @@
|
|
1 |
-
"""functions useful to convert to/from latitude-longitude coordinates to pixel image coordinates"""
|
2 |
-
from samgis_core.utilities.type_hints import TupleFloat, TupleFloatAny
|
3 |
-
|
4 |
-
from samgis import app_logger
|
5 |
-
from samgis.utilities.constants import TILE_SIZE, EARTH_EQUATORIAL_RADIUS
|
6 |
-
from samgis.utilities.type_hints import ImagePixelCoordinates
|
7 |
-
from samgis.utilities.type_hints import LatLngDict
|
8 |
-
|
9 |
-
|
10 |
-
def _get_latlng2pixel_projection(latlng: LatLngDict) -> ImagePixelCoordinates:
|
11 |
-
from math import log, pi, sin
|
12 |
-
|
13 |
-
app_logger.debug(f"latlng: {type(latlng)}, value:{latlng}.")
|
14 |
-
app_logger.debug(f'latlng lat: {type(latlng.lat)}, value:{latlng.lat}.')
|
15 |
-
app_logger.debug(f'latlng lng: {type(latlng.lng)}, value:{latlng.lng}.')
|
16 |
-
try:
|
17 |
-
sin_y: float = sin(latlng.lat * pi / 180)
|
18 |
-
app_logger.debug(f"sin_y, #1:{sin_y}.")
|
19 |
-
sin_y = min(max(sin_y, -0.9999), 0.9999)
|
20 |
-
app_logger.debug(f"sin_y, #2:{sin_y}.")
|
21 |
-
x = TILE_SIZE * (0.5 + latlng.lng / 360)
|
22 |
-
app_logger.debug(f"x:{x}.")
|
23 |
-
y = TILE_SIZE * (0.5 - log((1 + sin_y) / (1 - sin_y)) / (4 * pi))
|
24 |
-
app_logger.debug(f"y:{y}.")
|
25 |
-
|
26 |
-
return {"x": x, "y": y}
|
27 |
-
except Exception as e_get_latlng2pixel_projection:
|
28 |
-
app_logger.error(f'args type:{type(latlng)}, {latlng}.')
|
29 |
-
app_logger.exception(f'e_get_latlng2pixel_projection:{e_get_latlng2pixel_projection}.', exc_info=True)
|
30 |
-
raise e_get_latlng2pixel_projection
|
31 |
-
|
32 |
-
|
33 |
-
def _get_point_latlng_to_pixel_coordinates(latlng: LatLngDict, zoom: int | float) -> ImagePixelCoordinates:
|
34 |
-
from math import floor
|
35 |
-
|
36 |
-
try:
|
37 |
-
world_coordinate: ImagePixelCoordinates = _get_latlng2pixel_projection(latlng)
|
38 |
-
app_logger.debug(f"world_coordinate:{world_coordinate}.")
|
39 |
-
scale: int = pow(2, zoom)
|
40 |
-
app_logger.debug(f"scale:{scale}.")
|
41 |
-
return ImagePixelCoordinates(
|
42 |
-
x=floor(world_coordinate["x"] * scale),
|
43 |
-
y=floor(world_coordinate["y"] * scale)
|
44 |
-
)
|
45 |
-
except Exception as e_format_latlng_to_pixel_coordinates:
|
46 |
-
app_logger.error(f'latlng type:{type(latlng)}, {latlng}.')
|
47 |
-
app_logger.error(f'zoom type:{type(zoom)}, {zoom}.')
|
48 |
-
app_logger.exception(f'e_format_latlng_to_pixel_coordinates:{e_format_latlng_to_pixel_coordinates}.',
|
49 |
-
exc_info=True)
|
50 |
-
raise e_format_latlng_to_pixel_coordinates
|
51 |
-
|
52 |
-
|
53 |
-
def get_latlng_to_pixel_coordinates(
|
54 |
-
latlng_origin_ne: LatLngDict,
|
55 |
-
latlng_origin_sw: LatLngDict,
|
56 |
-
latlng_current_point: LatLngDict,
|
57 |
-
zoom: int | float,
|
58 |
-
k: str
|
59 |
-
) -> ImagePixelCoordinates:
|
60 |
-
"""
|
61 |
-
Parse the input request lambda event
|
62 |
-
|
63 |
-
Args:
|
64 |
-
latlng_origin_ne: NE latitude-longitude origin point
|
65 |
-
latlng_origin_sw: SW latitude-longitude origin point
|
66 |
-
latlng_current_point: latitude-longitude prompt point
|
67 |
-
zoom: Level of detail
|
68 |
-
k: prompt type
|
69 |
-
|
70 |
-
Returns:
|
71 |
-
ImagePixelCoordinates: pixel image coordinate point
|
72 |
-
"""
|
73 |
-
app_logger.debug(f"latlng_origin - {k}: {type(latlng_origin_ne)}, value:{latlng_origin_ne}.")
|
74 |
-
app_logger.debug(f"latlng_current_point - {k}: {type(latlng_current_point)}, value:{latlng_current_point}.")
|
75 |
-
latlng_map_origin_ne = _get_point_latlng_to_pixel_coordinates(latlng_origin_ne, zoom)
|
76 |
-
latlng_map_origin_sw = _get_point_latlng_to_pixel_coordinates(latlng_origin_sw, zoom)
|
77 |
-
latlng_map_current_point = _get_point_latlng_to_pixel_coordinates(latlng_current_point, zoom)
|
78 |
-
diff_coord_x = abs(latlng_map_origin_sw["x"] - latlng_map_current_point["x"])
|
79 |
-
diff_coord_y = abs(latlng_map_origin_ne["y"] - latlng_map_current_point["y"])
|
80 |
-
point = ImagePixelCoordinates(x=diff_coord_x, y=diff_coord_y)
|
81 |
-
app_logger.debug(f"point type - {k}: {point}.")
|
82 |
-
return point
|
83 |
-
|
84 |
-
|
85 |
-
def _from4326_to3857(lat: float, lon: float) -> TupleFloat or TupleFloatAny:
|
86 |
-
from math import radians, log, tan
|
87 |
-
|
88 |
-
x_tile: float = radians(lon) * EARTH_EQUATORIAL_RADIUS
|
89 |
-
y_tile: float = log(tan(radians(45 + lat / 2.0))) * EARTH_EQUATORIAL_RADIUS
|
90 |
-
return x_tile, y_tile
|
91 |
-
|
92 |
-
|
93 |
-
def _deg2num(lat: float, lon: float, zoom: int):
|
94 |
-
from math import radians, pi, asinh, tan
|
95 |
-
|
96 |
-
n = 2 ** zoom
|
97 |
-
x_tile = ((lon + 180) / 360 * n)
|
98 |
-
y_tile = (1 - asinh(tan(radians(lat))) / pi) * n / 2
|
99 |
-
return x_tile, y_tile
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samgis/io/geo_helpers.py
DELETED
@@ -1,91 +0,0 @@
|
|
1 |
-
"""handle geo-referenced raster images"""
|
2 |
-
from affine import Affine
|
3 |
-
from numpy import ndarray as np_ndarray
|
4 |
-
|
5 |
-
from samgis_core.utilities.type_hints import ListFloat, DictStrInt, TupleFloat
|
6 |
-
from samgis import app_logger
|
7 |
-
|
8 |
-
|
9 |
-
def load_affine_transformation_from_matrix(matrix_source_coefficients: ListFloat) -> Affine:
|
10 |
-
"""
|
11 |
-
Wrapper for rasterio.Affine.from_gdal() method
|
12 |
-
|
13 |
-
Args:
|
14 |
-
matrix_source_coefficients: 6 floats ordered by GDAL.
|
15 |
-
|
16 |
-
Returns:
|
17 |
-
Affine transform
|
18 |
-
"""
|
19 |
-
|
20 |
-
if len(matrix_source_coefficients) != 6:
|
21 |
-
raise ValueError(f"Expected 6 coefficients, found {len(matrix_source_coefficients)}; "
|
22 |
-
f"argument type: {type(matrix_source_coefficients)}.")
|
23 |
-
|
24 |
-
try:
|
25 |
-
a, d, b, e, c, f = (float(x) for x in matrix_source_coefficients)
|
26 |
-
center = tuple.__new__(Affine, [a, b, c, d, e, f, 0.0, 0.0, 1.0])
|
27 |
-
return center * Affine.translation(-0.5, -0.5)
|
28 |
-
except Exception as e:
|
29 |
-
app_logger.exception(f"exception:{e}, check updates on https://github.com/rasterio/affine",
|
30 |
-
extra=e,
|
31 |
-
stack_info=True, exc_info=True)
|
32 |
-
raise e
|
33 |
-
|
34 |
-
|
35 |
-
def get_affine_transform_from_gdal(matrix_source_coefficients: ListFloat or TupleFloat) -> Affine:
|
36 |
-
"""wrapper for rasterio Affine from_gdal method
|
37 |
-
|
38 |
-
Args:
|
39 |
-
matrix_source_coefficients: 6 floats ordered by GDAL.
|
40 |
-
|
41 |
-
Returns:
|
42 |
-
Affine transform
|
43 |
-
"""
|
44 |
-
return Affine.from_gdal(*matrix_source_coefficients)
|
45 |
-
|
46 |
-
|
47 |
-
def get_vectorized_raster_as_geojson(mask: np_ndarray, transform: TupleFloat) -> DictStrInt:
|
48 |
-
"""
|
49 |
-
Get shapes and values of connected regions in a dataset or array
|
50 |
-
|
51 |
-
Args:
|
52 |
-
mask: numpy mask
|
53 |
-
transform: tuple of float to transform into an Affine transform
|
54 |
-
|
55 |
-
Returns:
|
56 |
-
dict containing the output geojson and the predictions number
|
57 |
-
"""
|
58 |
-
try:
|
59 |
-
from rasterio.features import shapes
|
60 |
-
from geopandas import GeoDataFrame
|
61 |
-
|
62 |
-
app_logger.debug(f"matrix to consume with rasterio.shapes: {type(transform)}, {transform}.")
|
63 |
-
|
64 |
-
# old value for mask => band != 0
|
65 |
-
shapes_generator = ({
|
66 |
-
'properties': {'raster_val': v}, 'geometry': s}
|
67 |
-
for i, (s, v)
|
68 |
-
# instead of `enumerate(shapes(mask, mask=(band != 0), transform=rio_src.transform))`
|
69 |
-
# use mask=None to avoid using source
|
70 |
-
in enumerate(shapes(mask, mask=None, transform=transform))
|
71 |
-
)
|
72 |
-
app_logger.info("created shapes_generator, transform it to a polygon list...")
|
73 |
-
shapes_list = list(shapes_generator)
|
74 |
-
app_logger.info(f"created {len(shapes_list)} polygons.")
|
75 |
-
gpd_polygonized_raster = GeoDataFrame.from_features(shapes_list, crs="EPSG:3857")
|
76 |
-
app_logger.info("created a GeoDataFrame, export to geojson...")
|
77 |
-
geojson = gpd_polygonized_raster.to_json(to_wgs84=True)
|
78 |
-
app_logger.info("created geojson, preparing API response...")
|
79 |
-
return {
|
80 |
-
"geojson": geojson,
|
81 |
-
"n_shapes_geojson": len(shapes_list)
|
82 |
-
}
|
83 |
-
except Exception as e_shape_band:
|
84 |
-
try:
|
85 |
-
app_logger.error(f"mask type:{type(mask)}.")
|
86 |
-
app_logger.error(f"transform type:{type(transform)}, {transform}.")
|
87 |
-
app_logger.error(f"mask shape:{mask.shape}, dtype:{mask.dtype}.")
|
88 |
-
except Exception as e_shape_dtype:
|
89 |
-
app_logger.exception(f"mask shape or dtype not found:{e_shape_dtype}.", exc_info=True)
|
90 |
-
app_logger.exception(f"e_shape_band:{e_shape_band}.", exc_info=True)
|
91 |
-
raise e_shape_band
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samgis/io/raster_helpers.py
DELETED
@@ -1,330 +0,0 @@
|
|
1 |
-
"""helpers for computer vision duties"""
|
2 |
-
import numpy as np
|
3 |
-
from numpy import ndarray, bitwise_not
|
4 |
-
from rasterio import open as rasterio_open
|
5 |
-
|
6 |
-
from samgis import app_logger
|
7 |
-
from samgis.utilities.type_hints import XYZTerrainProvidersNames
|
8 |
-
from samgis.utilities.constants import OUTPUT_CRS_STRING
|
9 |
-
|
10 |
-
|
11 |
-
def get_nextzen_terrain_rgb_formula(red: ndarray, green: ndarray, blue: ndarray) -> ndarray:
|
12 |
-
"""
|
13 |
-
Compute a 32-bits 2d digital elevation model from a nextzen 'terrarium' (terrain-rgb) raster.
|
14 |
-
'Terrarium' format PNG tiles contain raw elevation data in meters, in Mercator projection (EPSG:3857).
|
15 |
-
All values are positive with a 32,768 offset, split into the red, green, and blue channels,
|
16 |
-
with 16 bits of integer and 8 bits of fraction. To decode:
|
17 |
-
|
18 |
-
(red * 256 + green + blue / 256) - 32768
|
19 |
-
|
20 |
-
More details on https://www.mapzen.com/blog/elevation/
|
21 |
-
|
22 |
-
Args:
|
23 |
-
red: red-valued channel image array
|
24 |
-
green: green-valued channel image array
|
25 |
-
blue: blue-valued channel image array
|
26 |
-
|
27 |
-
Returns:
|
28 |
-
ndarray: nextzen 'terrarium' 2d digital elevation model raster at 32 bits
|
29 |
-
|
30 |
-
"""
|
31 |
-
return (red * 256 + green + blue / 256) - 32768
|
32 |
-
|
33 |
-
|
34 |
-
def get_mapbox__terrain_rgb_formula(red: ndarray, green: ndarray, blue: ndarray) -> ndarray:
|
35 |
-
return ((red * 256 * 256 + green * 256 + blue) * 0.1) - 10000
|
36 |
-
|
37 |
-
|
38 |
-
providers_terrain_rgb_formulas = {
|
39 |
-
XYZTerrainProvidersNames.MAPBOX_TERRAIN_TILES_NAME: get_mapbox__terrain_rgb_formula,
|
40 |
-
XYZTerrainProvidersNames.NEXTZEN_TERRAIN_TILES_NAME: get_nextzen_terrain_rgb_formula
|
41 |
-
}
|
42 |
-
|
43 |
-
|
44 |
-
def _get_2d_array_from_3d(arr: ndarray) -> ndarray:
|
45 |
-
return arr.reshape(arr.shape[0], arr.shape[1])
|
46 |
-
|
47 |
-
|
48 |
-
def _channel_split(arr: ndarray) -> list[ndarray]:
|
49 |
-
from numpy import dsplit
|
50 |
-
|
51 |
-
return dsplit(arr, arr.shape[-1])
|
52 |
-
|
53 |
-
|
54 |
-
def get_raster_terrain_rgb_like(arr: ndarray, xyz_provider_name, nan_value_int: int = -12000):
|
55 |
-
"""
|
56 |
-
Compute a 32-bits 2d digital elevation model from a terrain-rgb raster.
|
57 |
-
|
58 |
-
Args:
|
59 |
-
arr: rgb raster
|
60 |
-
xyz_provider_name: xyz provider
|
61 |
-
nan_value_int: threshold int value to replace NaN
|
62 |
-
|
63 |
-
Returns:
|
64 |
-
ndarray: 2d digital elevation model raster at 32 bits
|
65 |
-
"""
|
66 |
-
red, green, blue = _channel_split(arr)
|
67 |
-
dem_rgb = providers_terrain_rgb_formulas[xyz_provider_name](red, green, blue)
|
68 |
-
output = _get_2d_array_from_3d(dem_rgb)
|
69 |
-
output[output < nan_value_int] = np.NaN
|
70 |
-
return output
|
71 |
-
|
72 |
-
|
73 |
-
def get_rgb_prediction_image(raster_cropped: ndarray, slope_cellsize: int, invert_image: bool = True) -> ndarray:
|
74 |
-
"""
|
75 |
-
Return an RGB image from input numpy array
|
76 |
-
|
77 |
-
Args:
|
78 |
-
raster_cropped: input numpy array
|
79 |
-
slope_cellsize: window size to calculate slope and curvature (1st and 2nd degree array derivative)
|
80 |
-
invert_image:
|
81 |
-
|
82 |
-
Returns:
|
83 |
-
tuple of str: image filename, image path (with filename)
|
84 |
-
"""
|
85 |
-
from samgis.utilities.constants import CHANNEL_EXAGGERATIONS_LIST
|
86 |
-
|
87 |
-
try:
|
88 |
-
slope, curvature = get_slope_curvature(raster_cropped, slope_cellsize=slope_cellsize)
|
89 |
-
|
90 |
-
channel0 = raster_cropped
|
91 |
-
channel1 = normalize_array_list(
|
92 |
-
[raster_cropped, slope, curvature], CHANNEL_EXAGGERATIONS_LIST, title="channel1_normlist")
|
93 |
-
channel2 = curvature
|
94 |
-
|
95 |
-
return get_rgb_image(channel0, channel1, channel2, invert_image=invert_image)
|
96 |
-
except ValueError as ve_get_rgb_prediction_image:
|
97 |
-
msg = f"ve_get_rgb_prediction_image:{ve_get_rgb_prediction_image}."
|
98 |
-
app_logger.error(msg)
|
99 |
-
raise ve_get_rgb_prediction_image
|
100 |
-
|
101 |
-
|
102 |
-
def get_rgb_image(arr_channel0: ndarray, arr_channel1: ndarray, arr_channel2: ndarray,
|
103 |
-
invert_image: bool = True) -> ndarray:
|
104 |
-
"""
|
105 |
-
Return an RGB image from input R,G,B channel arrays
|
106 |
-
|
107 |
-
Args:
|
108 |
-
arr_channel0: channel image 0
|
109 |
-
arr_channel1: channel image 1
|
110 |
-
arr_channel2: channel image 2
|
111 |
-
invert_image: invert the RGB image channel order
|
112 |
-
|
113 |
-
Returns:
|
114 |
-
ndarray: RGB image
|
115 |
-
|
116 |
-
"""
|
117 |
-
try:
|
118 |
-
# RED curvature, GREEN slope, BLUE dem, invert_image=True
|
119 |
-
if len(arr_channel0.shape) != 2:
|
120 |
-
msg = f"arr_size, wrong type:{type(arr_channel0)} or arr_size:{arr_channel0.shape}."
|
121 |
-
app_logger.error(msg)
|
122 |
-
raise ValueError(msg)
|
123 |
-
data_rgb = np.zeros((arr_channel0.shape[0], arr_channel0.shape[1], 3), dtype=np.uint8)
|
124 |
-
app_logger.debug(f"arr_container data_rgb, type:{type(data_rgb)}, arr_shape:{data_rgb.shape}.")
|
125 |
-
data_rgb[:, :, 0] = normalize_array(
|
126 |
-
arr_channel0.astype(float), high=1, norm_type="float", title="RGB:channel0") * 64
|
127 |
-
data_rgb[:, :, 1] = normalize_array(
|
128 |
-
arr_channel1.astype(float), high=1, norm_type="float", title="RGB:channel1") * 128
|
129 |
-
data_rgb[:, :, 2] = normalize_array(
|
130 |
-
arr_channel2.astype(float), high=1, norm_type="float", title="RGB:channel2") * 192
|
131 |
-
if invert_image:
|
132 |
-
app_logger.debug(f"data_rgb:{type(data_rgb)}, {data_rgb.dtype}.")
|
133 |
-
data_rgb = bitwise_not(data_rgb)
|
134 |
-
return data_rgb
|
135 |
-
except ValueError as ve_get_rgb_image:
|
136 |
-
msg = f"ve_get_rgb_image:{ve_get_rgb_image}."
|
137 |
-
app_logger.error(msg)
|
138 |
-
raise ve_get_rgb_image
|
139 |
-
|
140 |
-
|
141 |
-
def get_slope_curvature(dem: ndarray, slope_cellsize: int, title: str = "") -> tuple[ndarray, ndarray]:
|
142 |
-
"""
|
143 |
-
Return a tuple of two numpy arrays representing slope and curvature (1st grade derivative and 2nd grade derivative)
|
144 |
-
|
145 |
-
Args:
|
146 |
-
dem: input numpy array
|
147 |
-
slope_cellsize: window size to calculate slope and curvature
|
148 |
-
title: array name
|
149 |
-
|
150 |
-
Returns:
|
151 |
-
tuple of ndarrays: slope image, curvature image
|
152 |
-
|
153 |
-
"""
|
154 |
-
|
155 |
-
app_logger.info(f"dem shape:{dem.shape}, slope_cellsize:{slope_cellsize}.")
|
156 |
-
|
157 |
-
try:
|
158 |
-
dem = dem.astype(float)
|
159 |
-
app_logger.debug("get_slope_curvature:: start")
|
160 |
-
slope = calculate_slope(dem, slope_cellsize)
|
161 |
-
app_logger.debug("get_slope_curvature:: created slope raster")
|
162 |
-
s2c = calculate_slope(slope, slope_cellsize)
|
163 |
-
curvature = normalize_array(s2c, norm_type="float", title=f"SC:curvature_{title}")
|
164 |
-
app_logger.debug("get_slope_curvature:: created curvature raster")
|
165 |
-
|
166 |
-
return slope, curvature
|
167 |
-
except ValueError as ve_get_slope_curvature:
|
168 |
-
msg = f"ve_get_slope_curvature:{ve_get_slope_curvature}."
|
169 |
-
app_logger.error(msg)
|
170 |
-
raise ve_get_slope_curvature
|
171 |
-
|
172 |
-
|
173 |
-
def calculate_slope(dem_array: ndarray, cell_size: int, calctype: str = "degree") -> ndarray:
|
174 |
-
"""
|
175 |
-
Return a numpy array representing slope (1st grade derivative)
|
176 |
-
|
177 |
-
Args:
|
178 |
-
dem_array: input numpy array
|
179 |
-
cell_size: window size to calculate slope
|
180 |
-
calctype: calculus type
|
181 |
-
|
182 |
-
Returns:
|
183 |
-
ndarray: slope image
|
184 |
-
|
185 |
-
"""
|
186 |
-
|
187 |
-
try:
|
188 |
-
gradx, grady = np.gradient(dem_array, cell_size)
|
189 |
-
dem_slope = np.sqrt(gradx ** 2 + grady ** 2)
|
190 |
-
if calctype == "degree":
|
191 |
-
dem_slope = np.degrees(np.arctan(dem_slope))
|
192 |
-
app_logger.debug(f"extracted slope with calctype:{calctype}.")
|
193 |
-
return dem_slope
|
194 |
-
except ValueError as ve_calculate_slope:
|
195 |
-
msg = f"ve_calculate_slope:{ve_calculate_slope}."
|
196 |
-
app_logger.error(msg)
|
197 |
-
raise ve_calculate_slope
|
198 |
-
|
199 |
-
|
200 |
-
def normalize_array(arr: ndarray, high: int = 255, norm_type: str = "float", invert: bool = False, title: str = "") -> ndarray:
|
201 |
-
"""
|
202 |
-
Return normalized numpy array between 0 and 'high' value. Default normalization type is int
|
203 |
-
|
204 |
-
Args:
|
205 |
-
arr: input numpy array
|
206 |
-
high: max value to use for normalization
|
207 |
-
norm_type: type of normalization: could be 'float' or 'int'
|
208 |
-
invert: bool to choose if invert the normalized numpy array
|
209 |
-
title: array title name
|
210 |
-
|
211 |
-
Returns:
|
212 |
-
ndarray: normalized numpy array
|
213 |
-
|
214 |
-
"""
|
215 |
-
np.seterr("raise")
|
216 |
-
|
217 |
-
h_min_arr = np.nanmin(arr)
|
218 |
-
h_arr_max = np.nanmax(arr)
|
219 |
-
try:
|
220 |
-
h_diff = h_arr_max - h_min_arr
|
221 |
-
app_logger.debug(
|
222 |
-
f"normalize_array:: '{title}',h_min_arr:{h_min_arr},h_arr_max:{h_arr_max},h_diff:{h_diff}, dtype:{arr.dtype}.")
|
223 |
-
except Exception as e_h_diff:
|
224 |
-
app_logger.error(f"e_h_diff:{e_h_diff}.")
|
225 |
-
raise ValueError(e_h_diff)
|
226 |
-
|
227 |
-
if check_empty_array(arr, high) or check_empty_array(arr, h_diff):
|
228 |
-
msg_ve = f"normalize_array::empty array '{title}',h_min_arr:{h_min_arr},h_arr_max:{h_arr_max},h_diff:{h_diff}, dtype:{arr.dtype}."
|
229 |
-
app_logger.error(msg_ve)
|
230 |
-
raise ValueError(msg_ve)
|
231 |
-
try:
|
232 |
-
normalized = high * (arr - h_min_arr) / h_diff
|
233 |
-
normalized = np.nanmax(normalized) - normalized if invert else normalized
|
234 |
-
return normalized.astype(int) if norm_type == "int" else normalized
|
235 |
-
except FloatingPointError as fe:
|
236 |
-
msg = f"normalize_array::{title}:h_arr_max:{h_arr_max},h_min_arr:{h_min_arr},fe:{fe}."
|
237 |
-
app_logger.error(msg)
|
238 |
-
raise ValueError(msg)
|
239 |
-
|
240 |
-
|
241 |
-
def normalize_array_list(arr_list: list[ndarray], exaggerations_list: list[float] = None, title: str = "") -> ndarray:
|
242 |
-
"""
|
243 |
-
Return a normalized numpy array from a list of numpy array and an optional list of exaggeration values.
|
244 |
-
|
245 |
-
Args:
|
246 |
-
arr_list: list of array to use for normalization
|
247 |
-
exaggerations_list: list of exaggeration values
|
248 |
-
title: array title name
|
249 |
-
|
250 |
-
Returns:
|
251 |
-
ndarray: normalized numpy array
|
252 |
-
|
253 |
-
"""
|
254 |
-
|
255 |
-
if not arr_list:
|
256 |
-
msg = f"input list can't be empty:{arr_list}."
|
257 |
-
app_logger.error(msg)
|
258 |
-
raise ValueError(msg)
|
259 |
-
if exaggerations_list is None:
|
260 |
-
exaggerations_list = list(np.ones(len(arr_list)))
|
261 |
-
arr_tmp = np.zeros(arr_list[0].shape)
|
262 |
-
for a, exaggeration in zip(arr_list, exaggerations_list):
|
263 |
-
app_logger.debug(f"normalize_array_list::exaggeration:{exaggeration}.")
|
264 |
-
arr_tmp += normalize_array(a, norm_type="float", title=f"ARRLIST:{title}.") * exaggeration
|
265 |
-
return arr_tmp / len(arr_list)
|
266 |
-
|
267 |
-
|
268 |
-
def check_empty_array(arr: ndarray, val: float) -> bool:
|
269 |
-
"""
|
270 |
-
Return True if the input numpy array is empy. Check if
|
271 |
-
- all values are all the same value (0, 1 or given 'val' input float value)
|
272 |
-
- all values that are not NaN are a given 'val' float value
|
273 |
-
|
274 |
-
Args:
|
275 |
-
arr: input numpy array
|
276 |
-
val: value to use for check if array is empty
|
277 |
-
|
278 |
-
Returns:
|
279 |
-
bool: True if the input numpy array is empty, False otherwise
|
280 |
-
|
281 |
-
"""
|
282 |
-
|
283 |
-
arr_check5_tmp = np.copy(arr)
|
284 |
-
arr_size = arr.shape[0]
|
285 |
-
arr_check3 = np.ones((arr_size, arr_size))
|
286 |
-
check1 = np.array_equal(arr, arr_check3)
|
287 |
-
check2 = np.array_equal(arr, np.zeros((arr_size, arr_size)))
|
288 |
-
arr_check3 *= val
|
289 |
-
check3 = np.array_equal(arr, arr_check3)
|
290 |
-
arr[np.isnan(arr)] = 0
|
291 |
-
check4 = np.array_equal(arr, np.zeros((arr_size, arr_size)))
|
292 |
-
arr_check5 = np.ones((arr_size, arr_size)) * val
|
293 |
-
arr_check5_tmp[np.isnan(arr_check5_tmp)] = val
|
294 |
-
check5 = np.array_equal(arr_check5_tmp, arr_check5)
|
295 |
-
app_logger.debug(f"array checks:{check1}, {check2}, {check3}, {check4}, {check5}.")
|
296 |
-
return check1 or check2 or check3 or check4 or check5
|
297 |
-
|
298 |
-
|
299 |
-
def write_raster_png(arr, transform, prefix: str, suffix: str, folder_output_path="/tmp"):
|
300 |
-
from pathlib import Path
|
301 |
-
from rasterio.plot import reshape_as_raster
|
302 |
-
|
303 |
-
output_filename = Path(folder_output_path) / f"{prefix}_{suffix}.png"
|
304 |
-
|
305 |
-
with rasterio_open(
|
306 |
-
output_filename, 'w', driver='PNG',
|
307 |
-
height=arr.shape[0],
|
308 |
-
width=arr.shape[1],
|
309 |
-
count=3,
|
310 |
-
dtype=str(arr.dtype),
|
311 |
-
crs=OUTPUT_CRS_STRING,
|
312 |
-
transform=transform) as dst:
|
313 |
-
dst.write(reshape_as_raster(arr))
|
314 |
-
app_logger.info(f"written:{output_filename} as PNG, use {OUTPUT_CRS_STRING} as CRS.")
|
315 |
-
|
316 |
-
|
317 |
-
def write_raster_tiff(arr, transform, prefix: str, suffix: str, folder_output_path="/tmp"):
|
318 |
-
from pathlib import Path
|
319 |
-
output_filename = Path(folder_output_path) / f"{prefix}_{suffix}.tiff"
|
320 |
-
|
321 |
-
with rasterio_open(
|
322 |
-
output_filename, 'w', driver='GTiff',
|
323 |
-
height=arr.shape[0],
|
324 |
-
width=arr.shape[1],
|
325 |
-
count=1,
|
326 |
-
dtype=str(arr.dtype),
|
327 |
-
crs=OUTPUT_CRS_STRING,
|
328 |
-
transform=transform) as dst:
|
329 |
-
dst.write(arr, 1)
|
330 |
-
app_logger.info(f"written:{output_filename} as TIFF, use {OUTPUT_CRS_STRING} as CRS.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samgis/io/tms2geotiff.py
DELETED
@@ -1,181 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
|
3 |
-
from numpy import ndarray
|
4 |
-
from samgis_core.utilities.type_hints import TupleFloat
|
5 |
-
from xyzservices import TileProvider
|
6 |
-
|
7 |
-
from samgis import app_logger
|
8 |
-
from samgis.utilities.constants import (OUTPUT_CRS_STRING, DRIVER_RASTERIO_GTIFF, N_MAX_RETRIES, N_CONNECTION, N_WAIT,
|
9 |
-
ZOOM_AUTO, BOOL_USE_CACHE)
|
10 |
-
from samgis.utilities.type_hints import tuple_ndarray_transform
|
11 |
-
|
12 |
-
|
13 |
-
bool_use_cache = int(os.getenv("BOOL_USE_CACHE", BOOL_USE_CACHE))
|
14 |
-
n_connection = int(os.getenv("N_CONNECTION", N_CONNECTION))
|
15 |
-
n_max_retries = int(os.getenv("N_MAX_RETRIES", N_MAX_RETRIES))
|
16 |
-
n_wait = int(os.getenv("N_WAIT", N_WAIT))
|
17 |
-
zoom_auto_string = os.getenv("ZOOM_AUTO", ZOOM_AUTO)
|
18 |
-
|
19 |
-
|
20 |
-
def download_extent(w: float, s: float, e: float, n: float, zoom: int or str = zoom_auto_string,
|
21 |
-
source: TileProvider or str = None,
|
22 |
-
wait: int = n_wait, max_retries: int = n_max_retries, n_connections: int = n_connection,
|
23 |
-
use_cache: bool = bool_use_cache) -> tuple_ndarray_transform:
|
24 |
-
"""
|
25 |
-
Download, merge and crop a list of tiles into a single geo-referenced image or a raster geodata
|
26 |
-
|
27 |
-
Args:
|
28 |
-
w: West edge
|
29 |
-
s: South edge
|
30 |
-
e: East edge
|
31 |
-
n: North edge
|
32 |
-
zoom: Level of detail
|
33 |
-
source: The tile source: web tile provider or path to local file. The web tile provider can be in the form of
|
34 |
-
a :class:`xyzservices.TileProvider` object or a URL. The placeholders for the XYZ in the URL need to be
|
35 |
-
`{x}`, `{y}`, `{z}`, respectively. For local file paths, the file is read with `rasterio` and all bands are
|
36 |
-
loaded into the basemap. IMPORTANT: tiles are assumed to be in the Spherical Mercator projection
|
37 |
-
(EPSG:3857), unless the `crs` keyword is specified.
|
38 |
-
wait: if the tile API is rate-limited, the number of seconds to wait
|
39 |
-
between a failed request and the next try
|
40 |
-
max_retries: total number of rejected requests allowed before contextily will stop trying to fetch more tiles
|
41 |
-
from a rate-limited API.
|
42 |
-
n_connections: Number of connections for downloading tiles in parallel. Be careful not to overload the tile
|
43 |
-
server and to check the tile provider's terms of use before increasing this value. E.g., OpenStreetMap has
|
44 |
-
a max. value of 2 (https://operations.osmfoundation.org/policies/tiles/). If allowed to download in
|
45 |
-
parallel, a recommended value for n_connections is 16, and should never be larger than 64.
|
46 |
-
use_cache: If False, caching of the downloaded tiles will be disabled. This can be useful in resource
|
47 |
-
constrained environments, especially when using n_connections > 1, or when a tile provider's terms of use
|
48 |
-
don't allow caching.
|
49 |
-
|
50 |
-
Returns:
|
51 |
-
parsed request input
|
52 |
-
"""
|
53 |
-
try:
|
54 |
-
from samgis import contextily_tile
|
55 |
-
from samgis.io.coordinates_pixel_conversion import _from4326_to3857
|
56 |
-
|
57 |
-
app_logger.info(f"connection number:{n_connections}, type:{type(n_connections)}.")
|
58 |
-
app_logger.info(f"zoom:{zoom}, type:{type(zoom)}.")
|
59 |
-
app_logger.debug(f"download raster from source:{source} with bounding box w:{w}, s:{s}, e:{e}, n:{n}.")
|
60 |
-
app_logger.debug(f"types w:{type(w)}, s:{type(s)}, e:{type(e)}, n:{type(n)}.")
|
61 |
-
downloaded_raster, bbox_raster = contextily_tile.bounds2img(
|
62 |
-
w, s, e, n, zoom=zoom, source=source, ll=True, wait=wait, max_retries=max_retries,
|
63 |
-
n_connections=n_connections, use_cache=use_cache)
|
64 |
-
xp0, yp0 = _from4326_to3857(n, e)
|
65 |
-
xp1, yp1 = _from4326_to3857(s, w)
|
66 |
-
cropped_image_ndarray, cropped_transform = crop_raster(yp1, xp1, yp0, xp0, downloaded_raster, bbox_raster)
|
67 |
-
return cropped_image_ndarray, cropped_transform
|
68 |
-
except Exception as e_download_extent:
|
69 |
-
app_logger.exception(f"e_download_extent:{e_download_extent}.", exc_info=True)
|
70 |
-
raise e_download_extent
|
71 |
-
|
72 |
-
|
73 |
-
def crop_raster(w: float, s: float, e: float, n: float, raster: ndarray, raster_bbox: TupleFloat,
|
74 |
-
crs: str = OUTPUT_CRS_STRING, driver: str = DRIVER_RASTERIO_GTIFF) -> tuple_ndarray_transform:
|
75 |
-
"""
|
76 |
-
Crop a raster using given bounding box (w, s, e, n) values
|
77 |
-
|
78 |
-
Args:
|
79 |
-
w: cropping west edge
|
80 |
-
s: cropping south edge
|
81 |
-
e: cropping east edge
|
82 |
-
n: cropping north edge
|
83 |
-
raster: raster image to crop
|
84 |
-
raster_bbox: bounding box of raster to crop
|
85 |
-
crs: The coordinate reference system. Required in 'w' or 'w+' modes, it is ignored in 'r' or 'r+' modes.
|
86 |
-
driver: A short format driver name (e.g. "GTiff" or "JPEG") or a list of such names (see GDAL docs at
|
87 |
-
https://gdal.org/drivers/raster/index.html ). In 'w' or 'w+' modes a single name is required. In 'r' or 'r+'
|
88 |
-
modes the driver can usually be omitted. Registered drivers will be tried sequentially until a match is
|
89 |
-
found. When multiple drivers are available for a format such as JPEG2000, one of them can be selected by
|
90 |
-
using this keyword argument.
|
91 |
-
|
92 |
-
Returns:
|
93 |
-
cropped raster with its Affine transform
|
94 |
-
"""
|
95 |
-
try:
|
96 |
-
from rasterio.io import MemoryFile
|
97 |
-
from rasterio.mask import mask as rio_mask
|
98 |
-
from shapely.geometry import Polygon
|
99 |
-
from geopandas import GeoSeries
|
100 |
-
|
101 |
-
app_logger.debug(f"raster: type {type(raster)}, raster_ext:{type(raster_bbox)}, {raster_bbox}.")
|
102 |
-
img_to_save, transform = get_transform_raster(raster, raster_bbox)
|
103 |
-
img_height, img_width, number_bands = img_to_save.shape
|
104 |
-
# https://rasterio.readthedocs.io/en/latest/topics/memory-files.html
|
105 |
-
with MemoryFile() as rio_mem_file:
|
106 |
-
app_logger.debug("writing raster in-memory to crop it with rasterio.mask.mask()")
|
107 |
-
with rio_mem_file.open(
|
108 |
-
driver=driver,
|
109 |
-
height=img_height,
|
110 |
-
width=img_width,
|
111 |
-
count=number_bands,
|
112 |
-
dtype=str(img_to_save.dtype.name),
|
113 |
-
crs=crs,
|
114 |
-
transform=transform,
|
115 |
-
) as src_raster_rw:
|
116 |
-
for band in range(number_bands):
|
117 |
-
src_raster_rw.write(img_to_save[:, :, band], band + 1)
|
118 |
-
app_logger.debug("cropping raster in-memory with rasterio.mask.mask()")
|
119 |
-
with rio_mem_file.open() as src_raster_ro:
|
120 |
-
shapes_crop_polygon = Polygon([(n, e), (s, e), (s, w), (n, w), (n, e)])
|
121 |
-
shapes_crop = GeoSeries([shapes_crop_polygon])
|
122 |
-
app_logger.debug(f"cropping with polygon::{shapes_crop_polygon}.")
|
123 |
-
cropped_image, cropped_transform = rio_mask(src_raster_ro, shapes=shapes_crop, crop=True)
|
124 |
-
cropped_image_ndarray = reshape_as_image(cropped_image)
|
125 |
-
app_logger.info(f"cropped image::{cropped_image_ndarray.shape}.")
|
126 |
-
return cropped_image_ndarray, cropped_transform
|
127 |
-
except Exception as e_crop_raster:
|
128 |
-
try:
|
129 |
-
app_logger.error(f"raster type:{type(raster)}.")
|
130 |
-
app_logger.error(f"raster shape:{raster.shape}, dtype:{raster.dtype}.")
|
131 |
-
except Exception as e_shape_dtype:
|
132 |
-
app_logger.exception(f"raster shape or dtype not found:{e_shape_dtype}.", exc_info=True)
|
133 |
-
app_logger.exception(f"e_crop_raster:{e_crop_raster}.", exc_info=True)
|
134 |
-
raise e_crop_raster
|
135 |
-
|
136 |
-
|
137 |
-
def get_transform_raster(raster: ndarray, raster_bbox: TupleFloat) -> tuple_ndarray_transform:
|
138 |
-
"""
|
139 |
-
Convert the input raster image to RGB and extract the Affine
|
140 |
-
|
141 |
-
Args:
|
142 |
-
raster: raster image to geo-reference
|
143 |
-
raster_bbox: bounding box of raster to crop
|
144 |
-
|
145 |
-
Returns:
|
146 |
-
rgb raster image and its Affine transform
|
147 |
-
"""
|
148 |
-
try:
|
149 |
-
from rasterio.transform import from_origin
|
150 |
-
from numpy import array as np_array, linspace as np_linspace, uint8 as np_uint8
|
151 |
-
from PIL.Image import fromarray
|
152 |
-
|
153 |
-
app_logger.debug(f"raster: type {type(raster)}, raster_ext:{type(raster_bbox)}, {raster_bbox}.")
|
154 |
-
rgb = fromarray(np_uint8(raster)).convert('RGB')
|
155 |
-
np_rgb = np_array(rgb)
|
156 |
-
img_height, img_width, _ = np_rgb.shape
|
157 |
-
|
158 |
-
min_x, max_x, min_y, max_y = raster_bbox
|
159 |
-
app_logger.debug(f"raster rgb shape:{np_rgb.shape}, raster rgb bbox {raster_bbox}.")
|
160 |
-
x = np_linspace(min_x, max_x, img_width)
|
161 |
-
y = np_linspace(min_y, max_y, img_height)
|
162 |
-
res_x = (x[-1] - x[0]) / img_width
|
163 |
-
res_y = (y[-1] - y[0]) / img_height
|
164 |
-
transform = from_origin(x[0] - res_x / 2, y[-1] + res_y / 2, res_x, res_y)
|
165 |
-
return np_rgb, transform
|
166 |
-
except Exception as e_get_transform_raster:
|
167 |
-
app_logger.error(f"arguments raster: {type(raster)}, {raster}.")
|
168 |
-
app_logger.error(f"arguments raster_bbox: {type(raster_bbox)}, {raster_bbox}.")
|
169 |
-
app_logger.exception(f"e_get_transform_raster:{e_get_transform_raster}.", exc_info=True)
|
170 |
-
raise e_get_transform_raster
|
171 |
-
|
172 |
-
|
173 |
-
def reshape_as_image(arr):
|
174 |
-
try:
|
175 |
-
from numpy import swapaxes
|
176 |
-
|
177 |
-
return swapaxes(swapaxes(arr, 0, 2), 0, 1)
|
178 |
-
except Exception as e_reshape_as_image:
|
179 |
-
app_logger.error(f"arguments: {type(arr)}, {arr}.")
|
180 |
-
app_logger.exception(f"e_reshape_as_image:{e_reshape_as_image}.", exc_info=True)
|
181 |
-
raise e_reshape_as_image
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samgis/io/wrappers_helpers.py
DELETED
@@ -1,224 +0,0 @@
|
|
1 |
-
"""lambda helper functions"""
|
2 |
-
import logging
|
3 |
-
from sys import stdout
|
4 |
-
from typing import Dict
|
5 |
-
|
6 |
-
import loguru
|
7 |
-
from xyzservices import providers, TileProvider
|
8 |
-
|
9 |
-
from samgis import app_logger
|
10 |
-
from samgis.io.coordinates_pixel_conversion import get_latlng_to_pixel_coordinates
|
11 |
-
from samgis.utilities.constants import COMPLETE_URL_TILES_MAPBOX, COMPLETE_URL_TILES_NEXTZEN, CUSTOM_RESPONSE_MESSAGES
|
12 |
-
from samgis.utilities.type_hints import ApiRequestBody, ContentTypes, XYZTerrainProvidersNames, XYZDefaultProvidersNames
|
13 |
-
from samgis_core.utilities.utilities import base64_decode
|
14 |
-
|
15 |
-
|
16 |
-
def get_response(status: int, start_time: float, request_id: str, response_body: Dict = None) -> str:
|
17 |
-
"""
|
18 |
-
Response composer
|
19 |
-
|
20 |
-
Args:
|
21 |
-
status: status response
|
22 |
-
start_time: request start time (float)
|
23 |
-
request_id: str
|
24 |
-
response_body: dict we embed into our response
|
25 |
-
|
26 |
-
Returns:
|
27 |
-
json response
|
28 |
-
|
29 |
-
"""
|
30 |
-
from json import dumps
|
31 |
-
from time import time
|
32 |
-
|
33 |
-
app_logger.debug(f"response_body:{response_body}.")
|
34 |
-
response_body["duration_run"] = time() - start_time
|
35 |
-
response_body["message"] = CUSTOM_RESPONSE_MESSAGES[status]
|
36 |
-
response_body["request_id"] = request_id
|
37 |
-
|
38 |
-
response = {
|
39 |
-
"statusCode": status,
|
40 |
-
"header": {"Content-Type": ContentTypes.APPLICATION_JSON},
|
41 |
-
"body": dumps(response_body),
|
42 |
-
"isBase64Encoded": False
|
43 |
-
}
|
44 |
-
app_logger.debug(f"response type:{type(response)} => {response}.")
|
45 |
-
return dumps(response)
|
46 |
-
|
47 |
-
|
48 |
-
def get_parsed_bbox_points(request_input: ApiRequestBody) -> Dict:
|
49 |
-
"""
|
50 |
-
Parse the raw input request into bbox, prompt and zoom
|
51 |
-
|
52 |
-
Args:
|
53 |
-
request_input: input dict
|
54 |
-
|
55 |
-
Returns:
|
56 |
-
dict with bounding box, prompt and zoom
|
57 |
-
"""
|
58 |
-
|
59 |
-
app_logger.info(f"try to parsing input request {request_input}...")
|
60 |
-
|
61 |
-
bbox = request_input.bbox
|
62 |
-
app_logger.debug(f"request bbox: {type(bbox)}, value:{bbox}.")
|
63 |
-
ne = bbox.ne
|
64 |
-
sw = bbox.sw
|
65 |
-
app_logger.debug(f"request ne: {type(ne)}, value:{ne}.")
|
66 |
-
app_logger.debug(f"request sw: {type(sw)}, value:{sw}.")
|
67 |
-
ne_latlng = [float(ne.lat), float(ne.lng)]
|
68 |
-
sw_latlng = [float(sw.lat), float(sw.lng)]
|
69 |
-
new_zoom = int(request_input.zoom)
|
70 |
-
new_prompt_list = _get_parsed_prompt_list(ne, sw, new_zoom, request_input.prompt)
|
71 |
-
|
72 |
-
app_logger.debug(f"bbox => {bbox}.")
|
73 |
-
app_logger.debug(f'request_input-prompt updated => {new_prompt_list}.')
|
74 |
-
|
75 |
-
app_logger.info("unpacking elaborated request...")
|
76 |
-
return {
|
77 |
-
"bbox": [ne_latlng, sw_latlng],
|
78 |
-
"prompt": new_prompt_list,
|
79 |
-
"zoom": new_zoom,
|
80 |
-
"source": get_url_tile(request_input.source_type)
|
81 |
-
}
|
82 |
-
|
83 |
-
|
84 |
-
def _get_parsed_prompt_list(bbox_ne, bbox_sw, zoom, prompt_list):
|
85 |
-
new_prompt_list = []
|
86 |
-
for prompt in prompt_list:
|
87 |
-
app_logger.debug(f"current prompt: {type(prompt)}, value:{prompt}.")
|
88 |
-
new_prompt = {"type": prompt.type.value}
|
89 |
-
if prompt.type == "point":
|
90 |
-
new_prompt_data = _get_new_prompt_data_point(bbox_ne, bbox_sw, prompt, zoom)
|
91 |
-
new_prompt["label"] = prompt.label.value
|
92 |
-
elif prompt.type == "rectangle":
|
93 |
-
new_prompt_data = _get_new_prompt_data_rectangle(bbox_ne, bbox_sw, prompt, zoom)
|
94 |
-
else:
|
95 |
-
msg = "Valid prompt type: 'point' or 'rectangle', not '{}'. Check ApiRequestBody parsing/validation."
|
96 |
-
raise TypeError(msg.format(prompt.type))
|
97 |
-
app_logger.debug(f"new_prompt_data: {type(new_prompt_data)}, value:{new_prompt_data}.")
|
98 |
-
new_prompt["data"] = new_prompt_data
|
99 |
-
new_prompt_list.append(new_prompt)
|
100 |
-
return new_prompt_list
|
101 |
-
|
102 |
-
|
103 |
-
def _get_new_prompt_data_point(bbox_ne, bbox_sw, prompt, zoom):
|
104 |
-
current_point = get_latlng_to_pixel_coordinates(bbox_ne, bbox_sw, prompt.data, zoom, prompt.type)
|
105 |
-
app_logger.debug(f"current prompt: {type(current_point)}, value:{current_point}, label: {prompt.label}.")
|
106 |
-
return [current_point['x'], current_point['y']]
|
107 |
-
|
108 |
-
|
109 |
-
def _get_new_prompt_data_rectangle(bbox_ne, bbox_sw, prompt, zoom):
|
110 |
-
current_point_ne = get_latlng_to_pixel_coordinates(bbox_ne, bbox_sw, prompt.data.ne, zoom, prompt.type)
|
111 |
-
app_logger.debug(
|
112 |
-
f"rectangle:: current_point_ne prompt: {type(current_point_ne)}, value:{current_point_ne}.")
|
113 |
-
current_point_sw = get_latlng_to_pixel_coordinates(bbox_ne, bbox_sw, prompt.data.sw, zoom, prompt.type)
|
114 |
-
app_logger.debug(
|
115 |
-
f"rectangle:: current_point_sw prompt: {type(current_point_sw)}, value:{current_point_sw}.")
|
116 |
-
# correct order for rectangle prompt
|
117 |
-
return [
|
118 |
-
current_point_sw["x"],
|
119 |
-
current_point_ne["y"],
|
120 |
-
current_point_ne["x"],
|
121 |
-
current_point_sw["y"]
|
122 |
-
]
|
123 |
-
|
124 |
-
|
125 |
-
def get_parsed_request_body(event: Dict or str) -> ApiRequestBody:
|
126 |
-
"""
|
127 |
-
Validator for the raw input request lambda event
|
128 |
-
|
129 |
-
Args:
|
130 |
-
event: input dict
|
131 |
-
|
132 |
-
Returns:
|
133 |
-
parsed request input
|
134 |
-
"""
|
135 |
-
from json import dumps, loads
|
136 |
-
from logging import getLevelName
|
137 |
-
|
138 |
-
def _get_current_log_level(logger: loguru.logger) -> [str, loguru._logger.Level]:
|
139 |
-
levels = logger._core.levels
|
140 |
-
current_log_level = logger._core.min_level
|
141 |
-
level_filt = [l for l in levels.items() if l[1].no == current_log_level]
|
142 |
-
return level_filt[0]
|
143 |
-
|
144 |
-
app_logger.info(f"event:{dumps(event)}...")
|
145 |
-
try:
|
146 |
-
raw_body = event["body"]
|
147 |
-
except Exception as e_constants1:
|
148 |
-
app_logger.error(f"e_constants1:{e_constants1}.")
|
149 |
-
raw_body = event
|
150 |
-
app_logger.debug(f"raw_body, #1: {type(raw_body)}, {raw_body}...")
|
151 |
-
if isinstance(raw_body, str):
|
152 |
-
body_decoded_str = base64_decode(raw_body)
|
153 |
-
app_logger.debug(f"body_decoded_str: {type(body_decoded_str)}, {body_decoded_str}...")
|
154 |
-
raw_body = loads(body_decoded_str)
|
155 |
-
app_logger.info(f"body, #2: {type(raw_body)}, {raw_body}...")
|
156 |
-
|
157 |
-
parsed_body = ApiRequestBody.model_validate(raw_body)
|
158 |
-
log_level = "DEBUG" if parsed_body.debug else "INFO"
|
159 |
-
app_logger.remove()
|
160 |
-
app_logger.add(stdout, level=log_level)
|
161 |
-
try:
|
162 |
-
current_log_level_name, _ = _get_current_log_level(app_logger)
|
163 |
-
app_logger.warning(f"set log level to {getLevelName(current_log_level_name)}.")
|
164 |
-
except Exception as ex:
|
165 |
-
print("failing setting parsing bbox, logger is ok? ex:", ex, "#")
|
166 |
-
|
167 |
-
return parsed_body
|
168 |
-
|
169 |
-
|
170 |
-
mapbox_terrain_rgb = TileProvider(
|
171 |
-
name=XYZTerrainProvidersNames.MAPBOX_TERRAIN_TILES_NAME,
|
172 |
-
url=COMPLETE_URL_TILES_MAPBOX,
|
173 |
-
attribution=""
|
174 |
-
)
|
175 |
-
nextzen_terrain_rgb = TileProvider(
|
176 |
-
name=XYZTerrainProvidersNames.NEXTZEN_TERRAIN_TILES_NAME,
|
177 |
-
url=COMPLETE_URL_TILES_NEXTZEN,
|
178 |
-
attribution=""
|
179 |
-
)
|
180 |
-
|
181 |
-
|
182 |
-
def get_url_tile(source_type: str):
|
183 |
-
try:
|
184 |
-
match source_type.lower():
|
185 |
-
case XYZDefaultProvidersNames.DEFAULT_TILES_NAME_SHORT:
|
186 |
-
return providers.query_name(XYZDefaultProvidersNames.DEFAULT_TILES_NAME)
|
187 |
-
case XYZTerrainProvidersNames.MAPBOX_TERRAIN_TILES_NAME:
|
188 |
-
return mapbox_terrain_rgb
|
189 |
-
case XYZTerrainProvidersNames.NEXTZEN_TERRAIN_TILES_NAME:
|
190 |
-
app_logger.info("nextzen_terrain_rgb:", nextzen_terrain_rgb)
|
191 |
-
return nextzen_terrain_rgb
|
192 |
-
case _:
|
193 |
-
return providers.query_name(source_type)
|
194 |
-
except ValueError as ve:
|
195 |
-
from pydantic_core import ValidationError
|
196 |
-
|
197 |
-
app_logger.error("ve:", str(ve))
|
198 |
-
raise ValidationError(ve)
|
199 |
-
|
200 |
-
|
201 |
-
def check_source_type_is_terrain(source: str | TileProvider):
|
202 |
-
return isinstance(source, TileProvider) and source.name in list(XYZTerrainProvidersNames)
|
203 |
-
|
204 |
-
|
205 |
-
def get_source_name(source: str | TileProvider) -> str | bool:
|
206 |
-
try:
|
207 |
-
match source.lower():
|
208 |
-
case XYZDefaultProvidersNames.DEFAULT_TILES_NAME_SHORT:
|
209 |
-
source_output = providers.query_name(XYZDefaultProvidersNames.DEFAULT_TILES_NAME)
|
210 |
-
case _:
|
211 |
-
source_output = providers.query_name(source)
|
212 |
-
if isinstance(source_output, str):
|
213 |
-
return source_output
|
214 |
-
try:
|
215 |
-
source_dict = dict(source_output)
|
216 |
-
app_logger.info(f"source_dict:{type(source_dict)}, {'name' in source_dict}, source_dict:{source_dict}.")
|
217 |
-
return source_dict["name"]
|
218 |
-
except KeyError as ke:
|
219 |
-
app_logger.error(f"ke:{ke}.")
|
220 |
-
except ValueError as ve:
|
221 |
-
app_logger.info(f"source name::{source}, ve:{ve}.")
|
222 |
-
app_logger.info(f"source name::{source}.")
|
223 |
-
|
224 |
-
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samgis/prediction_api/__init__.py
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
"""functions useful to handle machine learning models"""
|
|
|
|
samgis/prediction_api/predictors.py
DELETED
@@ -1,92 +0,0 @@
|
|
1 |
-
"""functions using machine learning instance model(s)"""
|
2 |
-
from datetime import datetime
|
3 |
-
from os import getenv
|
4 |
-
|
5 |
-
from samgis import app_logger, MODEL_FOLDER
|
6 |
-
from samgis.io.geo_helpers import get_vectorized_raster_as_geojson
|
7 |
-
from samgis.io.raster_helpers import get_raster_terrain_rgb_like, get_rgb_prediction_image, write_raster_png, write_raster_tiff
|
8 |
-
from samgis.io.tms2geotiff import download_extent
|
9 |
-
from samgis.io.wrappers_helpers import check_source_type_is_terrain
|
10 |
-
from samgis.utilities.constants import DEFAULT_URL_TILES, MODEL_NAME, SLOPE_CELLSIZE
|
11 |
-
from samgis_core.prediction_api.sam_onnx2 import SegmentAnythingONNX2
|
12 |
-
from samgis_core.prediction_api.sam_onnx_inference import get_raster_inference_with_embedding_from_dict
|
13 |
-
from samgis_core.utilities.constants import MODEL_ENCODER_NAME, MODEL_DECODER_NAME, DEFAULT_INPUT_SHAPE
|
14 |
-
from samgis_core.utilities.type_hints import LlistFloat, DictStrInt, ListDict
|
15 |
-
|
16 |
-
|
17 |
-
models_dict = {MODEL_NAME: {"instance": None}}
|
18 |
-
embedding_dict = {}
|
19 |
-
msg_write_tmp_on_disk = "found option to write images and geojson output..."
|
20 |
-
|
21 |
-
|
22 |
-
def samexporter_predict(
|
23 |
-
bbox: LlistFloat,
|
24 |
-
prompt: ListDict,
|
25 |
-
zoom: float,
|
26 |
-
model_name: str = MODEL_NAME,
|
27 |
-
source: str = DEFAULT_URL_TILES,
|
28 |
-
source_name: str = None
|
29 |
-
) -> DictStrInt:
|
30 |
-
"""
|
31 |
-
Return predictions as a geojson from a geo-referenced image using the given input prompt.
|
32 |
-
|
33 |
-
1. if necessary instantiate a segment anything machine learning instance model
|
34 |
-
2. download a geo-referenced raster image delimited by the coordinates bounding box (bbox)
|
35 |
-
3. get a prediction image from the segment anything instance model using the input prompt
|
36 |
-
4. get a geo-referenced geojson from the prediction image
|
37 |
-
|
38 |
-
Args:
|
39 |
-
bbox: coordinates bounding box
|
40 |
-
prompt: machine learning input prompt
|
41 |
-
zoom: Level of detail
|
42 |
-
model_name: machine learning model name
|
43 |
-
source: xyz tile provider object
|
44 |
-
source_name: name of tile provider
|
45 |
-
|
46 |
-
Returns:
|
47 |
-
Affine transform
|
48 |
-
"""
|
49 |
-
if models_dict[model_name]["instance"] is None:
|
50 |
-
app_logger.info(f"missing instance model {model_name}, instantiating it now!")
|
51 |
-
model_instance = SegmentAnythingONNX2(
|
52 |
-
encoder_model_path=MODEL_FOLDER / MODEL_ENCODER_NAME,
|
53 |
-
decoder_model_path=MODEL_FOLDER / MODEL_DECODER_NAME
|
54 |
-
)
|
55 |
-
models_dict[model_name]["instance"] = model_instance
|
56 |
-
app_logger.debug(f"using a {model_name} instance model...")
|
57 |
-
models_instance = models_dict[model_name]["instance"]
|
58 |
-
|
59 |
-
pt0, pt1 = bbox
|
60 |
-
app_logger.info(f"tile_source: {source}: downloading geo-referenced raster with bbox {bbox}, zoom {zoom}.")
|
61 |
-
img, transform = download_extent(w=pt1[1], s=pt1[0], e=pt0[1], n=pt0[0], zoom=zoom, source=source)
|
62 |
-
if check_source_type_is_terrain(source):
|
63 |
-
app_logger.info("terrain-rgb like raster: transforms it into a DEM")
|
64 |
-
dem = get_raster_terrain_rgb_like(img, source.name)
|
65 |
-
# set a slope cell size proportional to the image width
|
66 |
-
slope_cellsize = int(img.shape[1] * SLOPE_CELLSIZE / DEFAULT_INPUT_SHAPE[1])
|
67 |
-
app_logger.info(f"terrain-rgb like raster: compute slope, curvature using {slope_cellsize} as cell size.")
|
68 |
-
img = get_rgb_prediction_image(dem, slope_cellsize)
|
69 |
-
|
70 |
-
folder_write_tmp_on_disk = getenv("WRITE_TMP_ON_DISK", "")
|
71 |
-
app_logger.info(f"folder_write_tmp_on_disk:{folder_write_tmp_on_disk}.")
|
72 |
-
prefix = f"w{pt1[1]},s{pt1[0]},e{pt0[1]},n{pt0[0]}_"
|
73 |
-
if bool(folder_write_tmp_on_disk):
|
74 |
-
now = datetime.now().strftime('%Y%m%d_%H%M%S')
|
75 |
-
app_logger.info(msg_write_tmp_on_disk + f"with coords {prefix}, shape:{img.shape}, {len(img.shape)}.")
|
76 |
-
if img.shape and len(img.shape) == 2:
|
77 |
-
write_raster_tiff(img, transform, f"{source_name}_{prefix}_{now}_", f"raw_tiff", folder_write_tmp_on_disk)
|
78 |
-
if img.shape and len(img.shape) == 3 and img.shape[2] == 3:
|
79 |
-
write_raster_png(img, transform, f"{source_name}_{prefix}_{now}_", f"raw_img", folder_write_tmp_on_disk)
|
80 |
-
|
81 |
-
app_logger.info(
|
82 |
-
f"img type {type(img)} with shape/size:{img.size}, transform type: {type(transform)}, transform:{transform}.")
|
83 |
-
app_logger.info(f"source_name:{source_name}, source_name type:{type(source_name)}.")
|
84 |
-
embedding_key = f"{source_name}_z{zoom}_w{pt1[1]},s{pt1[0]},e{pt0[1]},n{pt0[0]}"
|
85 |
-
mask, n_predictions = get_raster_inference_with_embedding_from_dict(
|
86 |
-
img, prompt, models_instance, model_name, embedding_key, embedding_dict)
|
87 |
-
app_logger.info(f"created {n_predictions} masks, type {type(mask)}, size {mask.size}: preparing geojson conversion")
|
88 |
-
app_logger.info(f"mask shape:{mask.shape}.")
|
89 |
-
return {
|
90 |
-
"n_predictions": n_predictions,
|
91 |
-
**get_vectorized_raster_as_geojson(mask, transform)
|
92 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samgis/utilities/__init__.py
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
"""various helpers functions"""
|
|
|
|
samgis/utilities/constants.py
DELETED
@@ -1,44 +0,0 @@
|
|
1 |
-
"""Project constants"""
|
2 |
-
import os
|
3 |
-
|
4 |
-
|
5 |
-
INPUT_CRS_STRING = "EPSG:4326"
|
6 |
-
OUTPUT_CRS_STRING = "EPSG:3857"
|
7 |
-
DRIVER_RASTERIO_GTIFF = "GTiff"
|
8 |
-
ROOT = "/tmp"
|
9 |
-
CUSTOM_RESPONSE_MESSAGES = {
|
10 |
-
200: "ok",
|
11 |
-
400: "Bad Request",
|
12 |
-
422: "Missing required parameter",
|
13 |
-
500: "Internal server error"
|
14 |
-
}
|
15 |
-
TILE_SIZE = 256
|
16 |
-
EARTH_EQUATORIAL_RADIUS = 6378137.0
|
17 |
-
WKT_3857 = 'PROJCS["WGS 84 / Pseudo-Mercator",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,'
|
18 |
-
WKT_3857 += 'AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
|
19 |
-
WKT_3857 += 'UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],'
|
20 |
-
WKT_3857 += 'PROJECTION["Mercator_1SP"],PARAMETER["central_meridian",0],PARAMETER["scale_factor",1],'
|
21 |
-
WKT_3857 += 'PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],'
|
22 |
-
WKT_3857 += 'AXIS["X",EAST],AXIS["Y",NORTH],EXTENSION["PROJ4","+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 '
|
23 |
-
WKT_3857 += '+x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs"],AUTHORITY["EPSG","3857"]]'
|
24 |
-
SERVICE_NAME = "sam-gis"
|
25 |
-
DEFAULT_LOG_LEVEL = 'INFO'
|
26 |
-
RETRY_DOWNLOAD = 3
|
27 |
-
TIMEOUT_DOWNLOAD = 60
|
28 |
-
CALLBACK_INTERVAL_DOWNLOAD = 0.05
|
29 |
-
BOOL_USE_CACHE = True
|
30 |
-
N_WAIT = 0
|
31 |
-
N_MAX_RETRIES = 2
|
32 |
-
N_CONNECTION = 2
|
33 |
-
ZOOM_AUTO = "auto"
|
34 |
-
DEFAULT_URL_TILES = 'https://tile.openstreetmap.org/{z}/{x}/{y}.png'
|
35 |
-
DOMAIN_URL_TILES_MAPBOX = "api.mapbox.com"
|
36 |
-
RELATIVE_URL_TILES_MAPBOX = "v/mapbox.terrain-rgb/{zoom}/{x}/{y}{@2x}.pngraw?access_token={TOKEN}"
|
37 |
-
COMPLETE_URL_TILES_MAPBOX = f"https://{DOMAIN_URL_TILES_MAPBOX}/{RELATIVE_URL_TILES_MAPBOX}"
|
38 |
-
# https://s3.amazonaws.com/elevation-tiles-prod/terrarium/13/1308/3167.png
|
39 |
-
DOMAIN_URL_TILES_NEXTZEN = "s3.amazonaws.com"
|
40 |
-
RELATIVE_URL_TILES_NEXTZEN = "elevation-tiles-prod/terrarium/{z}/{x}/{y}.png" # "terrarium/{z}/{x}/{y}.png"
|
41 |
-
COMPLETE_URL_TILES_NEXTZEN = f"https://{DOMAIN_URL_TILES_NEXTZEN}/{RELATIVE_URL_TILES_NEXTZEN}"
|
42 |
-
CHANNEL_EXAGGERATIONS_LIST = [2.5, 1.1, 2.0]
|
43 |
-
SLOPE_CELLSIZE = 61
|
44 |
-
MODEL_NAME = os.getenv("MODEL_NAME", "mobile_sam")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
samgis/utilities/type_hints.py
DELETED
@@ -1,103 +0,0 @@
|
|
1 |
-
"""custom type hints"""
|
2 |
-
from enum import IntEnum, Enum
|
3 |
-
from typing import TypedDict
|
4 |
-
|
5 |
-
from affine import Affine
|
6 |
-
from numpy import ndarray
|
7 |
-
from pydantic import BaseModel
|
8 |
-
|
9 |
-
from samgis_core.utilities.type_hints import StrEnum
|
10 |
-
|
11 |
-
|
12 |
-
tuple_ndarray_transform = tuple[ndarray, Affine]
|
13 |
-
|
14 |
-
|
15 |
-
class XYZDefaultProvidersNames(StrEnum):
|
16 |
-
"""Default xyz provider names"""
|
17 |
-
DEFAULT_TILES_NAME_SHORT = "openstreetmap"
|
18 |
-
DEFAULT_TILES_NAME = "openstreetmap.mapnik"
|
19 |
-
|
20 |
-
|
21 |
-
class XYZTerrainProvidersNames(StrEnum):
|
22 |
-
"""Custom xyz provider names for digital elevation models"""
|
23 |
-
MAPBOX_TERRAIN_TILES_NAME = "mapbox.terrain-rgb"
|
24 |
-
NEXTZEN_TERRAIN_TILES_NAME = "nextzen.terrarium"
|
25 |
-
|
26 |
-
|
27 |
-
class LatLngDict(BaseModel):
|
28 |
-
"""Generic geographic latitude-longitude type"""
|
29 |
-
lat: float
|
30 |
-
lng: float
|
31 |
-
|
32 |
-
|
33 |
-
class ContentTypes(str, Enum):
|
34 |
-
"""Segment Anything: validation point prompt type"""
|
35 |
-
APPLICATION_JSON = "application/json"
|
36 |
-
TEXT_PLAIN = "text/plain"
|
37 |
-
TEXT_HTML = "text/html"
|
38 |
-
|
39 |
-
|
40 |
-
class PromptPointType(str, Enum):
|
41 |
-
"""Segment Anything: validation point prompt type"""
|
42 |
-
point = "point"
|
43 |
-
|
44 |
-
|
45 |
-
class PromptRectangleType(str, Enum):
|
46 |
-
"""Segment Anything: validation rectangle prompt type"""
|
47 |
-
rectangle = "rectangle"
|
48 |
-
|
49 |
-
|
50 |
-
class PromptLabel(IntEnum):
|
51 |
-
"""Valid prompt label type"""
|
52 |
-
EXCLUDE = 0
|
53 |
-
INCLUDE = 1
|
54 |
-
|
55 |
-
|
56 |
-
class ImagePixelCoordinates(TypedDict):
|
57 |
-
"""Image pixel coordinates type"""
|
58 |
-
x: int
|
59 |
-
y: int
|
60 |
-
|
61 |
-
|
62 |
-
class RawBBox(BaseModel):
|
63 |
-
"""Input lambda bbox request type (not yet parsed)"""
|
64 |
-
ne: LatLngDict
|
65 |
-
sw: LatLngDict
|
66 |
-
|
67 |
-
|
68 |
-
class RawPromptPoint(BaseModel):
|
69 |
-
"""Input lambda prompt request of type 'PromptPointType' - point (not yet parsed)"""
|
70 |
-
type: PromptPointType
|
71 |
-
data: LatLngDict
|
72 |
-
label: PromptLabel
|
73 |
-
|
74 |
-
|
75 |
-
class RawPromptRectangle(BaseModel):
|
76 |
-
"""Input lambda prompt request of type 'PromptRectangleType' - rectangle (not yet parsed)"""
|
77 |
-
type: PromptRectangleType
|
78 |
-
data: RawBBox
|
79 |
-
|
80 |
-
def get_type_str(self):
|
81 |
-
return self.type
|
82 |
-
|
83 |
-
|
84 |
-
class ApiRequestBody(BaseModel):
|
85 |
-
"""Input lambda request validator type (not yet parsed)"""
|
86 |
-
id: str = ""
|
87 |
-
bbox: RawBBox
|
88 |
-
prompt: list[RawPromptPoint | RawPromptRectangle]
|
89 |
-
zoom: int | float
|
90 |
-
source_type: str = "OpenStreetMap.Mapnik"
|
91 |
-
debug: bool = False
|
92 |
-
|
93 |
-
|
94 |
-
class ApiResponseBodyFailure(BaseModel):
|
95 |
-
duration_run: float
|
96 |
-
message: str
|
97 |
-
request_id: str
|
98 |
-
|
99 |
-
|
100 |
-
class ApiResponseBodySuccess(ApiResponseBodyFailure):
|
101 |
-
n_predictions: int
|
102 |
-
geojson: str
|
103 |
-
n_shapes_geojson: int
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
scripts/extract-openapi-fastapi.py
CHANGED
@@ -7,7 +7,8 @@ import sys
|
|
7 |
import yaml
|
8 |
from uvicorn.importer import import_from_string
|
9 |
|
10 |
-
from
|
|
|
11 |
|
12 |
parser = argparse.ArgumentParser(prog="extract-openapi-fastapi.py")
|
13 |
parser.add_argument("app", help='App import string. Eg. "main:app"', default="main:app")
|
@@ -28,7 +29,7 @@ if __name__ == "__main__":
|
|
28 |
version = openapi.get("openapi", "unknown version")
|
29 |
|
30 |
logging.info(f"writing openapi spec v{version}...")
|
31 |
-
output_dir_path =
|
32 |
with open(output_dir_path / "output.json", "w") as f:
|
33 |
json.dump(openapi, f)
|
34 |
with open(output_dir_path / "output.yaml", "w") as f:
|
|
|
7 |
import yaml
|
8 |
from uvicorn.importer import import_from_string
|
9 |
|
10 |
+
from app import project_root_folder
|
11 |
+
|
12 |
|
13 |
parser = argparse.ArgumentParser(prog="extract-openapi-fastapi.py")
|
14 |
parser.add_argument("app", help='App import string. Eg. "main:app"', default="main:app")
|
|
|
29 |
version = openapi.get("openapi", "unknown version")
|
30 |
|
31 |
logging.info(f"writing openapi spec v{version}...")
|
32 |
+
output_dir_path = project_root_folder / "docs" / "specs"
|
33 |
with open(output_dir_path / "output.json", "w") as f:
|
34 |
json.dump(openapi, f)
|
35 |
with open(output_dir_path / "output.yaml", "w") as f:
|
scripts/extract-openapi-lambda.py
CHANGED
@@ -1,11 +1,11 @@
|
|
1 |
import json
|
2 |
|
3 |
-
from
|
|
|
4 |
|
5 |
-
if __name__ == '__main__':
|
6 |
-
from samgis.utilities.type_hints import ApiRequestBody, ApiResponseBodyFailure, ApiResponseBodySuccess
|
7 |
|
8 |
-
|
|
|
9 |
json.dump({
|
10 |
"ApiRequestBody": ApiRequestBody.model_json_schema(),
|
11 |
"ApiResponseBodyFailure": ApiResponseBodyFailure.model_json_schema(),
|
|
|
1 |
import json
|
2 |
|
3 |
+
from app import project_root_folder
|
4 |
+
from samgis_web.utilities.type_hints import ApiRequestBody, ApiResponseBodyFailure, ApiResponseBodySuccess
|
5 |
|
|
|
|
|
6 |
|
7 |
+
if __name__ == '__main__':
|
8 |
+
with open(project_root_folder / "docs" / "specs" / "openapi_lambda_wip.json", "w") as output_json:
|
9 |
json.dump({
|
10 |
"ApiRequestBody": ApiRequestBody.model_json_schema(),
|
11 |
"ApiResponseBodyFailure": ApiResponseBodyFailure.model_json_schema(),
|
static/src/App.vue
CHANGED
@@ -6,10 +6,10 @@
|
|
6 |
:mapName="mapName"
|
7 |
:mapBounds='[{
|
8 |
"lat": 46.235421781941776,
|
9 |
-
"lng": 9.
|
10 |
}, {
|
11 |
"lat": 46.1351347810282,
|
12 |
-
"lng": 9.
|
13 |
}]'
|
14 |
:description=description
|
15 |
/>
|
|
|
6 |
:mapName="mapName"
|
7 |
:mapBounds='[{
|
8 |
"lat": 46.235421781941776,
|
9 |
+
"lng": 9.47699401855469
|
10 |
}, {
|
11 |
"lat": 46.1351347810282,
|
12 |
+
"lng": 9.30121276855469
|
13 |
}]'
|
14 |
:description=description
|
15 |
/>
|
static/vite.config.ts
CHANGED
@@ -1,16 +1,27 @@
|
|
1 |
-
import {
|
2 |
-
|
3 |
-
import {
|
4 |
import vue from '@vitejs/plugin-vue'
|
5 |
|
6 |
// https://vitejs.dev/config/
|
7 |
-
export default defineConfig({
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
}
|
15 |
-
}
|
16 |
})
|
|
|
1 |
+
import {fileURLToPath, URL} from 'node:url'
|
2 |
+
import {resolve} from 'node:path'
|
3 |
+
import {defineConfig, loadEnv} from 'vite'
|
4 |
import vue from '@vitejs/plugin-vue'
|
5 |
|
6 |
// https://vitejs.dev/config/
|
7 |
+
export default defineConfig(({mode}) => {
|
8 |
+
const env = loadEnv(mode, process.cwd())
|
9 |
+
const frontendPrefix = env.VITE_INDEX_URL ? env.VITE_INDEX_URL : "/"
|
10 |
+
console.log(`VITE_PREFIX:${env.VITE_INDEX_URL}, frontend_prefix:${frontendPrefix}, mode:${mode} ...`)
|
11 |
+
return {
|
12 |
+
plugins: [vue()],
|
13 |
+
base: frontendPrefix,
|
14 |
+
resolve: {
|
15 |
+
alias: {
|
16 |
+
'@': fileURLToPath(new URL('./src', import.meta.url))
|
17 |
+
}
|
18 |
+
},
|
19 |
+
build: {
|
20 |
+
rollupOptions: {
|
21 |
+
input: {
|
22 |
+
index: resolve(__dirname, "index.html"),
|
23 |
+
},
|
24 |
+
},
|
25 |
+
}
|
26 |
}
|
|
|
27 |
})
|
tests/__init__.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
-
from
|
2 |
|
3 |
|
|
|
4 |
TEST_ROOT_FOLDER = PROJECT_ROOT_FOLDER / "tests"
|
5 |
TEST_EVENTS_FOLDER = TEST_ROOT_FOLDER / "events"
|
6 |
LOCAL_URL_TILE = "http://localhost:8000/lambda_handler/{z}/{x}/{y}.png"
|
|
|
1 |
+
from pathlib import Path
|
2 |
|
3 |
|
4 |
+
PROJECT_ROOT_FOLDER = Path(globals().get("__file__", "./_")).absolute().parent.parent
|
5 |
TEST_ROOT_FOLDER = PROJECT_ROOT_FOLDER / "tests"
|
6 |
TEST_EVENTS_FOLDER = TEST_ROOT_FOLDER / "events"
|
7 |
LOCAL_URL_TILE = "http://localhost:8000/lambda_handler/{z}/{x}/{y}.png"
|
tests/io/__init__.py
DELETED
File without changes
|
tests/io/test_coordinates_pixel_conversion.py
DELETED
@@ -1,27 +0,0 @@
|
|
1 |
-
import json
|
2 |
-
|
3 |
-
from samgis.io.coordinates_pixel_conversion import get_latlng_to_pixel_coordinates
|
4 |
-
from samgis.utilities.type_hints import LatLngDict
|
5 |
-
from tests import TEST_EVENTS_FOLDER
|
6 |
-
|
7 |
-
|
8 |
-
def test_get_latlng_to_pixel_coordinates():
|
9 |
-
name_fn = "get_latlng_to_pixel_coordinates"
|
10 |
-
|
11 |
-
with open(TEST_EVENTS_FOLDER / f"{name_fn}.json") as tst_json:
|
12 |
-
inputs_outputs = json.load(tst_json)
|
13 |
-
for k, input_output in inputs_outputs.items():
|
14 |
-
print(f"k:{k}")
|
15 |
-
current_input = input_output["input"]
|
16 |
-
zoom = current_input["zoom"]
|
17 |
-
latlng_origin_ne = LatLngDict.model_validate(current_input["latlng_origin_ne"])
|
18 |
-
latlng_origin_sw = LatLngDict.model_validate(current_input["latlng_origin_sw"])
|
19 |
-
latlng_current_point = LatLngDict.model_validate(current_input["latlng_current_point"])
|
20 |
-
output = get_latlng_to_pixel_coordinates(
|
21 |
-
latlng_origin_ne=latlng_origin_ne,
|
22 |
-
latlng_origin_sw=latlng_origin_sw,
|
23 |
-
latlng_current_point=latlng_current_point,
|
24 |
-
zoom=zoom,
|
25 |
-
k=k
|
26 |
-
)
|
27 |
-
assert output == input_output["output"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tests/io/test_geo_helpers.py
DELETED
@@ -1,104 +0,0 @@
|
|
1 |
-
import json
|
2 |
-
import unittest
|
3 |
-
import numpy as np
|
4 |
-
import shapely
|
5 |
-
|
6 |
-
from samgis.io.geo_helpers import load_affine_transformation_from_matrix
|
7 |
-
from tests import TEST_EVENTS_FOLDER
|
8 |
-
|
9 |
-
|
10 |
-
class TestGeoHelpers(unittest.TestCase):
|
11 |
-
def test_load_affine_transformation_from_matrix(self):
|
12 |
-
name_fn = "samexporter_predict"
|
13 |
-
|
14 |
-
expected_output = {
|
15 |
-
'europe': (
|
16 |
-
1524458.6551710723, 0.0, 152.87405657035242, 4713262.318571913, -762229.3275855362, -2356860.470370812
|
17 |
-
),
|
18 |
-
'north_america': (
|
19 |
-
-13855281.495084189, 0.0, 1222.9924525628194, 6732573.451358326, 6927640.747542094, -3368121.214358007
|
20 |
-
),
|
21 |
-
'oceania': (
|
22 |
-
7269467.138033403, 0.0, 9783.93962050256, -166326.9735485418, -3634733.5690167015, 68487.57734351706
|
23 |
-
),
|
24 |
-
'south_america': (
|
25 |
-
-7922544.351904369, 0.0, 305.74811314070394, -5432228.234830927, 3961272.1759521845, 2715655.4952457524
|
26 |
-
)}
|
27 |
-
|
28 |
-
with open(TEST_EVENTS_FOLDER / f"{name_fn}.json") as tst_json:
|
29 |
-
inputs_outputs = json.load(tst_json)
|
30 |
-
for k, input_output in inputs_outputs.items():
|
31 |
-
print(f"k:{k}.")
|
32 |
-
|
33 |
-
output = load_affine_transformation_from_matrix(input_output["input"]["matrix"])
|
34 |
-
assert output.to_shapely() == expected_output[k]
|
35 |
-
|
36 |
-
def test_load_affine_transformation_from_matrix_value_error(self):
|
37 |
-
name_fn = "samexporter_predict"
|
38 |
-
with open(TEST_EVENTS_FOLDER / f"{name_fn}.json") as tst_json:
|
39 |
-
inputs_outputs = json.load(tst_json)
|
40 |
-
with self.assertRaises(ValueError):
|
41 |
-
try:
|
42 |
-
io_value_error = inputs_outputs["europe"]["input"]["matrix"][:5]
|
43 |
-
load_affine_transformation_from_matrix(io_value_error)
|
44 |
-
except ValueError as ve:
|
45 |
-
print(f"ve:{ve}.")
|
46 |
-
self.assertEqual(str(ve), "Expected 6 coefficients, found 5; argument type: <class 'list'>.")
|
47 |
-
raise ve
|
48 |
-
|
49 |
-
def test_load_affine_transformation_from_matrix_exception(self):
|
50 |
-
name_fn = "samexporter_predict"
|
51 |
-
with open(TEST_EVENTS_FOLDER / f"{name_fn}.json") as tst_json:
|
52 |
-
inputs_outputs = json.load(tst_json)
|
53 |
-
with self.assertRaises(Exception):
|
54 |
-
try:
|
55 |
-
io_exception = inputs_outputs["europe"]["input"]["matrix"]
|
56 |
-
io_exception[0] = "ciao"
|
57 |
-
load_affine_transformation_from_matrix(io_exception)
|
58 |
-
except Exception as e:
|
59 |
-
print(f"e:{e}.")
|
60 |
-
self.assertEqual(str(e), "exception:could not convert string to float: 'ciao', "
|
61 |
-
"check https://github.com/rasterio/affine project for updates")
|
62 |
-
raise e
|
63 |
-
|
64 |
-
def test_get_vectorized_raster_as_geojson_ok(self):
|
65 |
-
from rasterio.transform import Affine
|
66 |
-
from samgis.io.geo_helpers import get_vectorized_raster_as_geojson
|
67 |
-
|
68 |
-
name_fn = "samexporter_predict"
|
69 |
-
|
70 |
-
with open(TEST_EVENTS_FOLDER / f"{name_fn}.json") as tst_json:
|
71 |
-
inputs_outputs = json.load(tst_json)
|
72 |
-
for k, input_output in inputs_outputs.items():
|
73 |
-
print(f"k:{k}.")
|
74 |
-
mask = np.load(TEST_EVENTS_FOLDER / name_fn / k / "mask.npy")
|
75 |
-
|
76 |
-
transform = Affine.from_gdal(*input_output["input"]["matrix"])
|
77 |
-
output = get_vectorized_raster_as_geojson(mask=mask, transform=transform)
|
78 |
-
assert output["n_shapes_geojson"] == input_output["output"]["n_shapes_geojson"]
|
79 |
-
output_geojson = shapely.from_geojson(output["geojson"])
|
80 |
-
assert isinstance(output_geojson, shapely.GeometryCollection)
|
81 |
-
output_geojson_dict = json.loads(output["geojson"])
|
82 |
-
assert len(output_geojson_dict["features"]) > 0
|
83 |
-
|
84 |
-
def test_get_vectorized_raster_as_geojson_fail(self):
|
85 |
-
from samgis.io.geo_helpers import get_vectorized_raster_as_geojson
|
86 |
-
|
87 |
-
name_fn = "samexporter_predict"
|
88 |
-
|
89 |
-
with open(TEST_EVENTS_FOLDER / f"{name_fn}.json") as tst_json:
|
90 |
-
inputs_outputs = json.load(tst_json)
|
91 |
-
for k, input_output in inputs_outputs.items():
|
92 |
-
print(f"k:{k}.")
|
93 |
-
mask = np.load(TEST_EVENTS_FOLDER / name_fn / k / "mask.npy")
|
94 |
-
|
95 |
-
# Could be also another generic Exception, here we intercept TypeError caused by wrong matrix input on
|
96 |
-
# rasterio.Affine.from_gdal() wrapped by get_affine_transform_from_gdal()
|
97 |
-
with self.assertRaises(IndexError):
|
98 |
-
try:
|
99 |
-
wrong_matrix = 1.0,
|
100 |
-
get_vectorized_raster_as_geojson(mask=mask, transform=wrong_matrix)
|
101 |
-
except IndexError as te:
|
102 |
-
print(f"te:{te}.")
|
103 |
-
self.assertEqual(str(te), 'tuple index out of range')
|
104 |
-
raise te
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tests/io/test_raster_helpers.py
DELETED
@@ -1,254 +0,0 @@
|
|
1 |
-
import unittest
|
2 |
-
from unittest.mock import patch
|
3 |
-
import numpy as np
|
4 |
-
|
5 |
-
from samgis_core.utilities.utilities import hash_calculate
|
6 |
-
from samgis.io import raster_helpers
|
7 |
-
|
8 |
-
|
9 |
-
def get_three_channels(size=5, param1=1000, param2=3, param3=-88):
|
10 |
-
arr_base = np.arange(size*size).reshape(size, size) / size**2
|
11 |
-
channel_0 = arr_base * param1
|
12 |
-
channel_1 = arr_base * param2
|
13 |
-
channel_2 = arr_base * param3
|
14 |
-
return channel_0, channel_1, channel_2
|
15 |
-
|
16 |
-
|
17 |
-
def helper_bell(size=10, param1=0.1, param2=2):
|
18 |
-
x = np.linspace(-size, size, num=size**2)
|
19 |
-
y = np.linspace(-size, size, num=size**2)
|
20 |
-
x, y = np.meshgrid(x, y)
|
21 |
-
return np.exp(-param1 * x ** param2 - param1 * y ** param2)
|
22 |
-
|
23 |
-
|
24 |
-
arr_5x5x5 = np.arange(125).reshape((5, 5, 5)) / 25
|
25 |
-
arr = np.arange(25).resize((5, 5))
|
26 |
-
channel0, channel1, channel2 = get_three_channels()
|
27 |
-
z = helper_bell()
|
28 |
-
slope_z_cellsize3, curvature_z_cellsize3 = raster_helpers.get_slope_curvature(z, slope_cellsize=3)
|
29 |
-
|
30 |
-
|
31 |
-
class Test(unittest.TestCase):
|
32 |
-
|
33 |
-
def test_get_rgb_prediction_image_real(self):
|
34 |
-
output = raster_helpers.get_rgb_prediction_image(z, slope_cellsize=61, invert_image=True)
|
35 |
-
hash_output = hash_calculate(output)
|
36 |
-
assert hash_output == b'QpQ9yxgCLw9cf3klNFKNFXIDHaSkuiZxkbpeQApR8pA='
|
37 |
-
output = raster_helpers.get_rgb_prediction_image(z, slope_cellsize=61, invert_image=False)
|
38 |
-
hash_output = hash_calculate(output)
|
39 |
-
assert hash_output == b'Y+iXO9w/sKzNVOw2rBh2JrVGJUFRqaa8/0F9hpevmLs='
|
40 |
-
|
41 |
-
@patch.object(raster_helpers, "get_slope_curvature")
|
42 |
-
@patch.object(raster_helpers, "normalize_array_list")
|
43 |
-
@patch.object(raster_helpers, "get_rgb_image")
|
44 |
-
def test_get_rgb_prediction_image_mocked(self, get_rgb_image_mocked, normalize_array_list, get_slope_curvature):
|
45 |
-
local_arr = np.array(z * 100, dtype=np.uint8)
|
46 |
-
|
47 |
-
get_slope_curvature.return_value = slope_z_cellsize3, curvature_z_cellsize3
|
48 |
-
normalize_array_list.side_effect = None
|
49 |
-
get_rgb_image_mocked.return_value = np.bitwise_not(local_arr)
|
50 |
-
output = raster_helpers.get_rgb_prediction_image(local_arr, slope_cellsize=61, invert_image=True)
|
51 |
-
hash_output = hash_calculate(output)
|
52 |
-
assert hash_output == b'BPIyVH64RgVunj42EuQAx4/v59Va8ZAjcMnuiGNqTT0='
|
53 |
-
get_rgb_image_mocked.return_value = local_arr
|
54 |
-
output = raster_helpers.get_rgb_prediction_image(local_arr, slope_cellsize=61, invert_image=False)
|
55 |
-
hash_output = hash_calculate(output)
|
56 |
-
assert hash_output == b'XX54sdLQQUrhkUHT6ikQZYSloMYDSfh/AGITDq6jnRM='
|
57 |
-
|
58 |
-
@patch.object(raster_helpers, "get_slope_curvature")
|
59 |
-
def test_get_rgb_prediction_image_value_error(self, get_slope_curvature):
|
60 |
-
msg = "this is a value error"
|
61 |
-
get_slope_curvature.side_effect = ValueError(msg)
|
62 |
-
|
63 |
-
with self.assertRaises(ValueError):
|
64 |
-
try:
|
65 |
-
raster_helpers.get_rgb_prediction_image(arr, slope_cellsize=3)
|
66 |
-
except ValueError as ve:
|
67 |
-
self.assertEqual(str(ve), msg)
|
68 |
-
raise ve
|
69 |
-
|
70 |
-
def test_get_rgb_image(self):
|
71 |
-
output = raster_helpers.get_rgb_image(channel0, channel1, channel2, invert_image=True)
|
72 |
-
hash_output = hash_calculate(output)
|
73 |
-
assert hash_output == b'YVnRWla5Ptfet6reSfM+OEIsGytLkeso6X+CRs34YHk='
|
74 |
-
output = raster_helpers.get_rgb_image(channel0, channel1, channel2, invert_image=False)
|
75 |
-
hash_output = hash_calculate(output)
|
76 |
-
assert hash_output == b'LC/kIZGUZULSrwwSXCeP1My2spTZdW9D7LH+tltwERs='
|
77 |
-
|
78 |
-
def test_get_rgb_image_value_error_1(self):
|
79 |
-
with self.assertRaises(ValueError):
|
80 |
-
try:
|
81 |
-
raster_helpers.get_rgb_image(arr_5x5x5, arr_5x5x5, arr_5x5x5, invert_image=True)
|
82 |
-
except ValueError as ve:
|
83 |
-
self.assertEqual(f"arr_size, wrong type:{type(arr_5x5x5)} or arr_size:{arr_5x5x5.shape}.", str(ve))
|
84 |
-
raise ve
|
85 |
-
|
86 |
-
def test_get_rgb_image_value_error2(self):
|
87 |
-
arr_0 = np.arange(25).reshape((5, 5))
|
88 |
-
arr_1 = np.arange(4).reshape((2, 2))
|
89 |
-
with self.assertRaises(ValueError):
|
90 |
-
try:
|
91 |
-
raster_helpers.get_rgb_image(arr_0, arr_1, channel2, invert_image=True)
|
92 |
-
except ValueError as ve:
|
93 |
-
self.assertEqual('could not broadcast input array from shape (2,2) into shape (5,5)', str(ve))
|
94 |
-
raise ve
|
95 |
-
|
96 |
-
def test_get_slope_curvature(self):
|
97 |
-
slope_output, curvature_output = raster_helpers.get_slope_curvature(z, slope_cellsize=3)
|
98 |
-
hash_curvature = hash_calculate(curvature_output)
|
99 |
-
hash_slope = hash_calculate(slope_output)
|
100 |
-
assert hash_curvature == b'LAL9JFOjJP9D6X4X3fVCpnitx9VPM9drS5YMHwMZ3iE='
|
101 |
-
assert hash_slope == b'IYf6x4G0lmR47j6HRS5kUYWdtmimhLz2nak8py75nwc='
|
102 |
-
|
103 |
-
def test_get_slope_curvature_value_error(self):
|
104 |
-
from samgis.io import raster_helpers
|
105 |
-
|
106 |
-
with self.assertRaises(ValueError):
|
107 |
-
try:
|
108 |
-
raster_helpers.get_slope_curvature(np.array(1), slope_cellsize=3)
|
109 |
-
except ValueError as ve:
|
110 |
-
self.assertEqual('not enough values to unpack (expected 2, got 0)', str(ve))
|
111 |
-
raise ve
|
112 |
-
|
113 |
-
def test_calculate_slope(self):
|
114 |
-
slope_output = raster_helpers.calculate_slope(z, cell_size=3)
|
115 |
-
hash_output = hash_calculate(slope_output)
|
116 |
-
assert hash_output == b'IYf6x4G0lmR47j6HRS5kUYWdtmimhLz2nak8py75nwc='
|
117 |
-
|
118 |
-
def test_calculate_slope_value_error(self):
|
119 |
-
with self.assertRaises(ValueError):
|
120 |
-
try:
|
121 |
-
raster_helpers.calculate_slope(np.array(1), cell_size=3)
|
122 |
-
except ValueError as ve:
|
123 |
-
self.assertEqual('not enough values to unpack (expected 2, got 0)', str(ve))
|
124 |
-
raise ve
|
125 |
-
|
126 |
-
def test_normalize_array(self):
|
127 |
-
def check_ndarrays_almost_equal(cls, arr1, arr2, places, check_type="float", check_ndiff=1):
|
128 |
-
count_abs_diff = 0
|
129 |
-
for list00, list01 in zip(arr1.tolist(), arr2.tolist()):
|
130 |
-
for el00, el01 in zip(list00, list01):
|
131 |
-
ndiff = abs(el00 - el01)
|
132 |
-
if el00 != el01:
|
133 |
-
count_abs_diff += 1
|
134 |
-
if check_type == "float":
|
135 |
-
cls.assertAlmostEqual(el00, el01, places=places)
|
136 |
-
cls.assertLess(ndiff, check_ndiff)
|
137 |
-
print("count_abs_diff:", count_abs_diff)
|
138 |
-
|
139 |
-
normalized_array = raster_helpers.normalize_array(z)
|
140 |
-
hash_output = hash_calculate(normalized_array)
|
141 |
-
assert hash_output == b'MPkQwiiQa5NxL7LDvCS9V143YUEJT/Qh1aNEKc/Ehvo='
|
142 |
-
|
143 |
-
mult_variable = 3.423
|
144 |
-
test_array_input = np.arange(256).reshape((16, 16))
|
145 |
-
test_array_output = raster_helpers.normalize_array(test_array_input * mult_variable)
|
146 |
-
check_ndarrays_almost_equal(self, test_array_output, test_array_input, places=8)
|
147 |
-
|
148 |
-
test_array_output1 = raster_helpers.normalize_array(test_array_input * mult_variable, high=128, norm_type="int")
|
149 |
-
o = np.arange(256).reshape((16, 16)) / 2
|
150 |
-
expected_array_output1 = o.astype(int)
|
151 |
-
check_ndarrays_almost_equal(
|
152 |
-
self, test_array_output1, expected_array_output1, places=2, check_type="int", check_ndiff=2)
|
153 |
-
|
154 |
-
@patch.object(np, "nanmin")
|
155 |
-
@patch.object(np, "nanmax")
|
156 |
-
def test_normalize_array_floating_point_error_mocked(self, nanmax_mocked, nanmin_mocked):
|
157 |
-
nanmax_mocked.return_value = 100
|
158 |
-
nanmin_mocked.return_value = 100
|
159 |
-
|
160 |
-
with self.assertRaises(ValueError):
|
161 |
-
try:
|
162 |
-
raster_helpers.normalize_array(
|
163 |
-
np.arange(25).reshape((5, 5))
|
164 |
-
)
|
165 |
-
except ValueError as ve:
|
166 |
-
self.assertEqual(
|
167 |
-
"normalize_array:::h_arr_max:100,h_min_arr:100,fe:divide by zero encountered in divide.",
|
168 |
-
str(ve)
|
169 |
-
)
|
170 |
-
raise ve
|
171 |
-
|
172 |
-
@patch.object(np, "nanmin")
|
173 |
-
@patch.object(np, "nanmax")
|
174 |
-
def test_normalize_array_exception_error_mocked(self, nanmax_mocked, nanmin_mocked):
|
175 |
-
nanmax_mocked.return_value = 100
|
176 |
-
nanmin_mocked.return_value = np.NaN
|
177 |
-
with self.assertRaises(ValueError):
|
178 |
-
try:
|
179 |
-
raster_helpers.normalize_array(
|
180 |
-
np.arange(25).reshape((5, 5))
|
181 |
-
)
|
182 |
-
except ValueError as ve:
|
183 |
-
self.assertEqual("cannot convert float NaN to integer", str(ve))
|
184 |
-
raise ve
|
185 |
-
|
186 |
-
def test_normalize_array_value_error(self):
|
187 |
-
with self.assertRaises(ValueError):
|
188 |
-
try:
|
189 |
-
raster_helpers.normalize_array(
|
190 |
-
np.zeros((5, 5))
|
191 |
-
)
|
192 |
-
except ValueError as ve:
|
193 |
-
self.assertEqual(
|
194 |
-
"normalize_array::empty array '',h_min_arr:0.0,h_arr_max:0.0,h_diff:0.0, " 'dtype:float64.',
|
195 |
-
str(ve)
|
196 |
-
)
|
197 |
-
raise ve
|
198 |
-
|
199 |
-
def test_normalize_array_list(self):
|
200 |
-
normalized_array = raster_helpers.normalize_array_list([channel0, channel1, channel2])
|
201 |
-
hash_output = hash_calculate(normalized_array)
|
202 |
-
assert hash_output == b'+6IbhIpyb3vPElTgqqPkQdIR0umf4uFP2c7t5IaBVvI='
|
203 |
-
|
204 |
-
test_norm_list_output2 = raster_helpers.normalize_array_list(
|
205 |
-
[channel0, channel1, channel2], exaggerations_list=[2.0, 3.0, 5.0])
|
206 |
-
hash_variable2 = hash_calculate(test_norm_list_output2)
|
207 |
-
assert hash_variable2 == b'yYCYWCKO3i8NYsWk/wgYOzSRRLSLUprEs7mChJkdL+A='
|
208 |
-
|
209 |
-
def test_normalize_array_list_value_error(self):
|
210 |
-
with self.assertRaises(ValueError):
|
211 |
-
try:
|
212 |
-
raster_helpers.normalize_array_list([])
|
213 |
-
except ValueError as ve:
|
214 |
-
self.assertEqual("input list can't be empty:[].", str(ve))
|
215 |
-
raise ve
|
216 |
-
|
217 |
-
def test_check_empty_array(self):
|
218 |
-
a = np.zeros((10, 10))
|
219 |
-
b = np.ones((10, 10))
|
220 |
-
c = np.ones((10, 10)) * 2
|
221 |
-
d = np.zeros((10, 10))
|
222 |
-
d[1, 1] = np.nan
|
223 |
-
e = np.ones((10, 10)) * 3
|
224 |
-
e[1, 1] = np.nan
|
225 |
-
|
226 |
-
self.assertTrue(raster_helpers.check_empty_array(a, 999))
|
227 |
-
self.assertTrue(raster_helpers.check_empty_array(b, 0))
|
228 |
-
self.assertTrue(raster_helpers.check_empty_array(c, 2))
|
229 |
-
self.assertTrue(raster_helpers.check_empty_array(d, 0))
|
230 |
-
self.assertTrue(raster_helpers.check_empty_array(e, 3))
|
231 |
-
self.assertFalse(raster_helpers.check_empty_array(z, 3))
|
232 |
-
|
233 |
-
def test_get_nextzen_terrain_rgb_formula(self):
|
234 |
-
output = raster_helpers.get_nextzen_terrain_rgb_formula(channel0, channel1, channel2)
|
235 |
-
hash_output = hash_calculate(output)
|
236 |
-
assert hash_output == b'3KJ81YKmQRdccRZARbByfwo1iMVLj8xxz9mfsWki/qA='
|
237 |
-
|
238 |
-
def test_get_mapbox__terrain_rgb_formula(self):
|
239 |
-
output = raster_helpers.get_mapbox__terrain_rgb_formula(channel0, channel1, channel2)
|
240 |
-
hash_output = hash_calculate(output)
|
241 |
-
assert hash_output == b'RU7CcoKoR3Fkh5LE+m48DHRVUy/vGq6UgfOFUMXx07M='
|
242 |
-
|
243 |
-
def test_get_raster_terrain_rgb_like(self):
|
244 |
-
from samgis.utilities.type_hints import XYZTerrainProvidersNames
|
245 |
-
|
246 |
-
arr_input = raster_helpers.get_rgb_image(channel0, channel1, channel2, invert_image=True)
|
247 |
-
output_nextzen = raster_helpers.get_raster_terrain_rgb_like(
|
248 |
-
arr_input, XYZTerrainProvidersNames.NEXTZEN_TERRAIN_TILES_NAME)
|
249 |
-
hash_nextzen = hash_calculate(output_nextzen)
|
250 |
-
assert hash_nextzen == b'+o2OTJliJkkBoqiAIGnhJ4s0xoLQ4MxHOvevLhNxysE='
|
251 |
-
output_mapbox = raster_helpers.get_raster_terrain_rgb_like(
|
252 |
-
arr_input, XYZTerrainProvidersNames.MAPBOX_TERRAIN_TILES_NAME)
|
253 |
-
hash_mapbox = hash_calculate(output_mapbox)
|
254 |
-
assert hash_mapbox == b'zWmekyKrpnmHnuDACnveCJl+o4GuhtHJmGlRDVwsce4='
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tests/io/test_tms2geotiff.py
DELETED
@@ -1,138 +0,0 @@
|
|
1 |
-
import unittest
|
2 |
-
|
3 |
-
import numpy as np
|
4 |
-
from samgis_core.utilities.utilities import hash_calculate
|
5 |
-
|
6 |
-
from samgis import app_logger
|
7 |
-
from samgis.io.tms2geotiff import download_extent
|
8 |
-
from tests import LOCAL_URL_TILE, TEST_EVENTS_FOLDER
|
9 |
-
|
10 |
-
|
11 |
-
input_bbox = [[39.036252959636606, 15.040283203125002], [38.302869955150044, 13.634033203125002]]
|
12 |
-
|
13 |
-
|
14 |
-
class TestTms2geotiff(unittest.TestCase):
|
15 |
-
# def test_download_extent_simple_source(self):
|
16 |
-
# from rasterio import Affine
|
17 |
-
# from xyzservices import TileProvider
|
18 |
-
# from tests.local_tiles_http_server import LocalTilesHttpServer
|
19 |
-
#
|
20 |
-
# listen_port = 8000
|
21 |
-
#
|
22 |
-
# with LocalTilesHttpServer.http_server("localhost", listen_port, directory=TEST_EVENTS_FOLDER):
|
23 |
-
# pt0, pt1 = input_bbox
|
24 |
-
# zoom = 10
|
25 |
-
#
|
26 |
-
# n_lat = pt0[0]
|
27 |
-
# e_lng = pt0[1]
|
28 |
-
# s_lat = pt1[0]
|
29 |
-
# w_lng = pt1[1]
|
30 |
-
#
|
31 |
-
# source = TileProvider(name="local_tile_provider", url=LOCAL_URL_TILE, attribution="")
|
32 |
-
# img, matrix = download_extent(w=w_lng, s=s_lat, e=e_lng, n=n_lat, zoom=zoom, source=source)
|
33 |
-
# app_logger.info(f"# DOWNLOAD ENDED, shape: {img.shape} #")
|
34 |
-
# np_img = np.ascontiguousarray(img)
|
35 |
-
# output_hash = hash_calculate(np_img)
|
36 |
-
# assert output_hash == b'UmbkwbPJpRT1XXcLnLUapUDP320w7YhS/AmT3H7u+b4='
|
37 |
-
# assert Affine.to_gdal(matrix) == (
|
38 |
-
# 1517657.1966021745, 152.8740565703525, 0.0, 4726942.266183584, 0.0, -152.87405657034955)
|
39 |
-
|
40 |
-
def test_download_extent_source_with_parameter(self):
|
41 |
-
from rasterio import Affine
|
42 |
-
from xyzservices import TileProvider
|
43 |
-
from tests.local_tiles_http_server import LocalTilesHttpServer
|
44 |
-
|
45 |
-
listen_port = 8000
|
46 |
-
|
47 |
-
with LocalTilesHttpServer.http_server("localhost", listen_port, directory=TEST_EVENTS_FOLDER):
|
48 |
-
pt0, pt1 = input_bbox
|
49 |
-
zoom = 10
|
50 |
-
|
51 |
-
n_lat = pt0[0]
|
52 |
-
e_lng = pt0[1]
|
53 |
-
s_lat = pt1[0]
|
54 |
-
w_lng = pt1[1]
|
55 |
-
|
56 |
-
local_url = "http://localhost:8000/{parameter}/{z}/{x}/{y}.png"
|
57 |
-
download_extent_args_no_parameter = {"name": "local_tile_provider", "url": LOCAL_URL_TILE, "attribution": ""}
|
58 |
-
download_extent_args = {
|
59 |
-
"no_parameter": download_extent_args_no_parameter,
|
60 |
-
"with_parameter": {"url": local_url, "parameter": "lambda_handler", **download_extent_args_no_parameter}
|
61 |
-
}
|
62 |
-
for _args_names, _args in download_extent_args.items():
|
63 |
-
app_logger.info(f"args_names:{_args_names}.")
|
64 |
-
source = TileProvider(**_args)
|
65 |
-
img, matrix = download_extent(w=w_lng, s=s_lat, e=e_lng, n=n_lat, zoom=zoom, source=source)
|
66 |
-
app_logger.info(f"# DOWNLOAD ENDED, shape: {img.shape} #")
|
67 |
-
np_img = np.ascontiguousarray(img)
|
68 |
-
output_hash = hash_calculate(np_img)
|
69 |
-
assert output_hash == b'UmbkwbPJpRT1XXcLnLUapUDP320w7YhS/AmT3H7u+b4='
|
70 |
-
assert Affine.to_gdal(matrix) == (
|
71 |
-
1517657.1966021745, 152.8740565703525, 0.0, 4726942.266183584, 0.0, -152.87405657034955)
|
72 |
-
|
73 |
-
def test_download_extent_source_with_parameter_key_error(self):
|
74 |
-
from xyzservices import TileProvider
|
75 |
-
|
76 |
-
with self.assertRaises(KeyError):
|
77 |
-
try:
|
78 |
-
pt0, pt1 = input_bbox
|
79 |
-
zoom = 10
|
80 |
-
|
81 |
-
n_lat = pt0[0]
|
82 |
-
e_lng = pt0[1]
|
83 |
-
s_lat = pt1[0]
|
84 |
-
w_lng = pt1[1]
|
85 |
-
|
86 |
-
local_url_tile2 = "http://localhost:8000/{parameter}/{z}/{x}/{y}.png"
|
87 |
-
source = TileProvider(name="local_tile_provider", url=local_url_tile2, attribution="")
|
88 |
-
download_extent(w=w_lng, s=s_lat, e=e_lng, n=n_lat, zoom=zoom, source=source)
|
89 |
-
except KeyError as ke:
|
90 |
-
assert str(ke) == "'parameter'"
|
91 |
-
raise ke
|
92 |
-
|
93 |
-
def test_download_extent_io_error1(self):
|
94 |
-
|
95 |
-
with self.assertRaises(Exception):
|
96 |
-
try:
|
97 |
-
pt0, pt1 = input_bbox
|
98 |
-
zoom = 10
|
99 |
-
|
100 |
-
n_lat = pt0[0]
|
101 |
-
e_lng = pt0[1]
|
102 |
-
s_lat = pt1[0]
|
103 |
-
w_lng = pt1[1]
|
104 |
-
|
105 |
-
download_extent(w=w_lng, s=s_lat, e=e_lng, n=n_lat, zoom=zoom, source=f"http://{LOCAL_URL_TILE}")
|
106 |
-
print("exception not raised")
|
107 |
-
except ConnectionError as ioe1:
|
108 |
-
app_logger.error(f"ioe1:{ioe1}.")
|
109 |
-
msg0 = "HTTPConnectionPool(host='localhost', port=8000): Max retries exceeded with url: /lambda_handler"
|
110 |
-
msg1 = "Caused by NewConnectionError"
|
111 |
-
msg2 = ": Failed to establish a new connection: [Errno 61] Connection refused'))"
|
112 |
-
assert msg0 in str(ioe1)
|
113 |
-
assert msg1 in str(ioe1)
|
114 |
-
assert msg2 in str(ioe1)
|
115 |
-
raise ioe1
|
116 |
-
|
117 |
-
def test_download_extent_io_error2(self):
|
118 |
-
from requests import HTTPError
|
119 |
-
from tests.local_tiles_http_server import LocalTilesHttpServer
|
120 |
-
|
121 |
-
listen_port = 8000
|
122 |
-
with LocalTilesHttpServer.http_server("localhost", listen_port, directory=TEST_EVENTS_FOLDER):
|
123 |
-
pt0, pt1 = input_bbox
|
124 |
-
zoom = 10
|
125 |
-
|
126 |
-
with self.assertRaises(HTTPError):
|
127 |
-
try:
|
128 |
-
n_lat = pt0[0]
|
129 |
-
e_lng = pt0[1]
|
130 |
-
s_lat = pt1[0]
|
131 |
-
w_lng = pt1[1]
|
132 |
-
|
133 |
-
download_extent(w=w_lng, s=s_lat, e=e_lng, n=n_lat, zoom=zoom,
|
134 |
-
source=LOCAL_URL_TILE + "_not_found_raster!")
|
135 |
-
except HTTPError as http_e:
|
136 |
-
app_logger.error(f"ae:{http_e}.")
|
137 |
-
assert "Tile URL resulted in a 404 error. Double-check your tile url:" in str(http_e)
|
138 |
-
raise http_e
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tests/io/test_wrappers_helpers.py
DELETED
@@ -1,135 +0,0 @@
|
|
1 |
-
import json
|
2 |
-
import time
|
3 |
-
import unittest
|
4 |
-
|
5 |
-
from http import HTTPStatus
|
6 |
-
from unittest.mock import patch
|
7 |
-
|
8 |
-
from samgis.io import wrappers_helpers
|
9 |
-
from samgis.io.wrappers_helpers import get_parsed_bbox_points, get_parsed_request_body, get_response
|
10 |
-
from samgis.utilities.type_hints import ApiRequestBody
|
11 |
-
from tests import TEST_EVENTS_FOLDER
|
12 |
-
|
13 |
-
|
14 |
-
class WrappersHelpersTest(unittest.TestCase):
|
15 |
-
@patch.object(time, "time")
|
16 |
-
def test_get_response(self, time_mocked):
|
17 |
-
time_diff = 108
|
18 |
-
end_run = 1000
|
19 |
-
time_mocked.return_value = end_run
|
20 |
-
start_time = end_run - time_diff
|
21 |
-
aws_request_id = "test_invoke_id"
|
22 |
-
|
23 |
-
with open(TEST_EVENTS_FOLDER / "get_response.json") as tst_json:
|
24 |
-
inputs_outputs = json.load(tst_json)
|
25 |
-
|
26 |
-
response_type = "200"
|
27 |
-
body_response = inputs_outputs[response_type]["input"]
|
28 |
-
output = get_response(HTTPStatus.OK.value, start_time, aws_request_id, body_response)
|
29 |
-
assert json.loads(output) == inputs_outputs[response_type]["output"]
|
30 |
-
|
31 |
-
response_type = "400"
|
32 |
-
response_400 = get_response(HTTPStatus.BAD_REQUEST.value, start_time, aws_request_id, {})
|
33 |
-
assert response_400 == inputs_outputs[response_type]["output"]
|
34 |
-
|
35 |
-
response_type = "422"
|
36 |
-
response_422 = get_response(HTTPStatus.UNPROCESSABLE_ENTITY.value, start_time, aws_request_id, {})
|
37 |
-
assert response_422 == inputs_outputs[response_type]["output"]
|
38 |
-
|
39 |
-
response_type = "500"
|
40 |
-
response_500 = get_response(HTTPStatus.INTERNAL_SERVER_ERROR.value, start_time, aws_request_id, {})
|
41 |
-
assert response_500 == inputs_outputs[response_type]["output"]
|
42 |
-
|
43 |
-
@staticmethod
|
44 |
-
def test_get_parsed_bbox_points():
|
45 |
-
with open(TEST_EVENTS_FOLDER / "get_parsed_bbox_prompts_single_point.json") as tst_json:
|
46 |
-
inputs_outputs = json.load(tst_json)
|
47 |
-
for k, input_output in inputs_outputs.items():
|
48 |
-
print(f"k:{k}.")
|
49 |
-
raw_body = get_parsed_request_body(**input_output["input"])
|
50 |
-
output = get_parsed_bbox_points(raw_body)
|
51 |
-
assert output == input_output["output"]
|
52 |
-
|
53 |
-
@staticmethod
|
54 |
-
def test_get_parsed_bbox_other_inputs():
|
55 |
-
for json_filename in ["single_rectangle", "multi_prompt"]:
|
56 |
-
with open(TEST_EVENTS_FOLDER / f"get_parsed_bbox_prompts_{json_filename}.json") as tst_json:
|
57 |
-
inputs_outputs = json.load(tst_json)
|
58 |
-
parsed_input = ApiRequestBody.model_validate(inputs_outputs["input"])
|
59 |
-
output = get_parsed_bbox_points(parsed_input)
|
60 |
-
assert output == inputs_outputs["output"]
|
61 |
-
|
62 |
-
@staticmethod
|
63 |
-
def test_get_parsed_request_body():
|
64 |
-
from samgis_core.utilities.utilities import base64_encode
|
65 |
-
|
66 |
-
input_event = {
|
67 |
-
"event": {
|
68 |
-
"bbox": {
|
69 |
-
"ne": {"lat": 38.03932961278458, "lng": 15.36808069832851},
|
70 |
-
"sw": {"lat": 37.455509218936974, "lng": 14.632807441554068}
|
71 |
-
},
|
72 |
-
"prompt": [{"type": "point", "data": {"lat": 37.0, "lng": 15.0}, "label": 0}],
|
73 |
-
"zoom": 10, "source_type": "OpenStreetMap.Mapnik", "debug": True
|
74 |
-
}
|
75 |
-
}
|
76 |
-
expected_output_dict = {
|
77 |
-
"bbox": {
|
78 |
-
"ne": {"lat": 38.03932961278458, "lng": 15.36808069832851},
|
79 |
-
"sw": {"lat": 37.455509218936974, "lng": 14.632807441554068}
|
80 |
-
},
|
81 |
-
"prompt": [{"type": "point", "data": {"lat": 37.0, "lng": 15.0}, "label": 0}],
|
82 |
-
"zoom": 10, "source_type": "OpenStreetMap.Mapnik", "debug": True
|
83 |
-
}
|
84 |
-
output = get_parsed_request_body(input_event["event"])
|
85 |
-
assert output == ApiRequestBody.model_validate(input_event["event"])
|
86 |
-
|
87 |
-
input_event_str = json.dumps(input_event["event"])
|
88 |
-
output = get_parsed_request_body(input_event_str)
|
89 |
-
assert output == ApiRequestBody.model_validate(expected_output_dict)
|
90 |
-
|
91 |
-
event = {"body": base64_encode(input_event_str).decode("utf-8")}
|
92 |
-
output = get_parsed_request_body(event)
|
93 |
-
assert output == ApiRequestBody.model_validate(expected_output_dict)
|
94 |
-
|
95 |
-
@patch.object(wrappers_helpers, "providers")
|
96 |
-
def test_get_url_tile(self, providers_mocked):
|
97 |
-
import xyzservices
|
98 |
-
from samgis.io.wrappers_helpers import get_url_tile
|
99 |
-
|
100 |
-
from tests import LOCAL_URL_TILE
|
101 |
-
|
102 |
-
local_tile_provider = xyzservices.TileProvider(name="local_tile_provider", url=LOCAL_URL_TILE, attribution="")
|
103 |
-
expected_output = {'name': 'local_tile_provider', 'url': LOCAL_URL_TILE, 'attribution': ''}
|
104 |
-
providers_mocked.query_name.return_value = local_tile_provider
|
105 |
-
assert get_url_tile("OpenStreetMap") == expected_output
|
106 |
-
|
107 |
-
local_url = 'http://localhost:8000/{parameter}/{z}/{x}/{y}.png'
|
108 |
-
local_tile_provider = xyzservices.TileProvider(
|
109 |
-
name="local_tile_provider_param", url=local_url, attribution="", parameter="lamda_handler"
|
110 |
-
)
|
111 |
-
providers_mocked.query_name.return_value = local_tile_provider
|
112 |
-
assert get_url_tile("OpenStreetMap.HOT") == {
|
113 |
-
"parameter": "lamda_handler", 'name': 'local_tile_provider_param', 'url': local_url, 'attribution': ''
|
114 |
-
}
|
115 |
-
|
116 |
-
@staticmethod
|
117 |
-
def test_get_url_tile_real():
|
118 |
-
from samgis.io.wrappers_helpers import get_url_tile
|
119 |
-
|
120 |
-
assert get_url_tile("OpenStreetMap") == {
|
121 |
-
'url': 'https://tile.openstreetmap.org/{z}/{x}/{y}.png', 'max_zoom': 19,
|
122 |
-
'html_attribution': '© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors',
|
123 |
-
'attribution': '(C) OpenStreetMap contributors',
|
124 |
-
'name': 'OpenStreetMap.Mapnik'}
|
125 |
-
|
126 |
-
html_attribution_hot = '© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors, '
|
127 |
-
html_attribution_hot += 'Tiles style by <a href="https://www.hotosm.org/" target="_blank">Humanitarian '
|
128 |
-
html_attribution_hot += 'OpenStreetMap Team</a> hosted by <a href="https://openstreetmap.fr/" target="_blank">'
|
129 |
-
html_attribution_hot += 'OpenStreetMap France</a>'
|
130 |
-
attribution_hot = '(C) OpenStreetMap contributors, Tiles style by Humanitarian OpenStreetMap Team hosted by '
|
131 |
-
attribution_hot += 'OpenStreetMap France'
|
132 |
-
assert get_url_tile("OpenStreetMap.HOT") == {
|
133 |
-
'url': 'https://{s}.tile.openstreetmap.fr/hot/{z}/{x}/{y}.png', 'max_zoom': 19,
|
134 |
-
'html_attribution': html_attribution_hot, 'attribution': attribution_hot, 'name': 'OpenStreetMap.HOT'
|
135 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tests/local_tiles_http_server.py
DELETED
@@ -1,46 +0,0 @@
|
|
1 |
-
import logging
|
2 |
-
import time
|
3 |
-
import unittest
|
4 |
-
|
5 |
-
|
6 |
-
class LocalTilesHttpServer(unittest.TestCase):
|
7 |
-
from contextlib import contextmanager
|
8 |
-
|
9 |
-
@staticmethod
|
10 |
-
@contextmanager
|
11 |
-
def http_server(host: str, port: int, directory: str):
|
12 |
-
"""Function http_server defined within this test class to avoid pytest error "fixture 'host' not found"."""
|
13 |
-
from functools import partial
|
14 |
-
from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer
|
15 |
-
from threading import Thread
|
16 |
-
|
17 |
-
server = ThreadingHTTPServer(
|
18 |
-
(host, port), partial(SimpleHTTPRequestHandler, directory=directory)
|
19 |
-
)
|
20 |
-
print("dir:", directory, "#")
|
21 |
-
server_thread = Thread(target=server.serve_forever, name="http_server")
|
22 |
-
server_thread.start()
|
23 |
-
logging.info(f"listen:: host {host}, port {port}.")
|
24 |
-
|
25 |
-
try:
|
26 |
-
yield
|
27 |
-
finally:
|
28 |
-
server.shutdown()
|
29 |
-
server_thread.join()
|
30 |
-
|
31 |
-
|
32 |
-
if __name__ == '__main__':
|
33 |
-
# from tests import TEST_ROOT_FOLDER
|
34 |
-
from pathlib import Path
|
35 |
-
|
36 |
-
PROJECT_ROOT_FOLDER = Path(globals().get("__file__", "./_")).absolute().parent.parent
|
37 |
-
|
38 |
-
TEST_ROOT_FOLDER = PROJECT_ROOT_FOLDER / "tests"
|
39 |
-
TEST_EVENTS_FOLDER = TEST_ROOT_FOLDER / "events"
|
40 |
-
|
41 |
-
main_listen_port = 8000
|
42 |
-
logging.info(f"http_basedir_serve: {TEST_ROOT_FOLDER}.")
|
43 |
-
with LocalTilesHttpServer.http_server("localhost", main_listen_port, directory=str(TEST_ROOT_FOLDER)):
|
44 |
-
time.sleep(1000)
|
45 |
-
logging.info("""import time; time.sleep(10)""")
|
46 |
-
# logging.info("Http server stopped.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tests/prediction_api/__init__.py
DELETED
File without changes
|
tests/prediction_api/test_predictors.py
DELETED
@@ -1,81 +0,0 @@
|
|
1 |
-
import json
|
2 |
-
import unittest
|
3 |
-
from unittest.mock import patch
|
4 |
-
|
5 |
-
import numpy as np
|
6 |
-
import shapely
|
7 |
-
from affine import Affine
|
8 |
-
|
9 |
-
from samgis.prediction_api import predictors
|
10 |
-
from samgis.prediction_api.predictors import samexporter_predict
|
11 |
-
from tests import TEST_EVENTS_FOLDER
|
12 |
-
|
13 |
-
|
14 |
-
class TestPredictors(unittest.TestCase):
|
15 |
-
@patch.object(predictors, "download_extent")
|
16 |
-
def test_get_raster_inference(self, download_extent_mocked):
|
17 |
-
name_fn = "samexporter_predict"
|
18 |
-
|
19 |
-
with open(TEST_EVENTS_FOLDER / f"{name_fn}.json") as tst_json:
|
20 |
-
inputs_outputs = json.load(tst_json)
|
21 |
-
for k, input_output in inputs_outputs.items():
|
22 |
-
input_payload = input_output["input"]
|
23 |
-
prompt = input_payload["prompt"]
|
24 |
-
model_name = input_payload["model_name"]
|
25 |
-
bbox = input_payload["bbox"]
|
26 |
-
zoom = input_payload["zoom"]
|
27 |
-
print(f"k:{k}.")
|
28 |
-
img = np.load(TEST_EVENTS_FOLDER / f"{name_fn}" / k / "img.npy")
|
29 |
-
affine_transform = Affine.from_gdal(*input_payload["matrix"])
|
30 |
-
download_extent_mocked.return_value = img, affine_transform
|
31 |
-
expected_output = input_output["output"]
|
32 |
-
|
33 |
-
output_dict = samexporter_predict(
|
34 |
-
bbox,
|
35 |
-
prompt,
|
36 |
-
zoom,
|
37 |
-
model_name
|
38 |
-
)
|
39 |
-
len_inference_out = output_dict["n_predictions"]
|
40 |
-
geojson = output_dict["geojson"]
|
41 |
-
n_shapes_geojson = output_dict["n_shapes_geojson"]
|
42 |
-
|
43 |
-
assert isinstance(geojson, str)
|
44 |
-
assert isinstance(n_shapes_geojson, int)
|
45 |
-
assert len_inference_out == expected_output["n_predictions"]
|
46 |
-
|
47 |
-
output_geojson = shapely.from_geojson(geojson)
|
48 |
-
print("output_geojson::{}.".format(output_geojson))
|
49 |
-
assert isinstance(output_geojson, shapely.GeometryCollection)
|
50 |
-
assert len(output_geojson.geoms) > 0
|
51 |
-
|
52 |
-
@patch.object(predictors, "get_raster_inference_with_embedding_from_dict")
|
53 |
-
@patch.object(predictors, "SegmentAnythingONNX2")
|
54 |
-
@patch.object(predictors, "download_extent")
|
55 |
-
@patch.object(predictors, "get_vectorized_raster_as_geojson")
|
56 |
-
def test_samexporter_predict_mocked(
|
57 |
-
self,
|
58 |
-
get_vectorized_raster_as_geojson_mocked,
|
59 |
-
download_extent_mocked,
|
60 |
-
segment_anything_onnx2_mocked,
|
61 |
-
get_raster_inference_with_embedding_from_dict_mocked
|
62 |
-
):
|
63 |
-
"""
|
64 |
-
model_instance = SegmentAnythingONNX()
|
65 |
-
img, matrix = download_extent(DEFAULT_TMS, pt0[0], pt0[1], pt1[0], pt1[1], zoom)
|
66 |
-
transform = get_affine_transform_from_gdal(matrix)
|
67 |
-
mask, n_predictions = get_raster_inference(img, prompt, models_instance, model_name)
|
68 |
-
get_vectorized_raster_as_geojson(mask, matrix)
|
69 |
-
"""
|
70 |
-
aff = 1, 2, 3, 4, 5, 6
|
71 |
-
segment_anything_onnx2_mocked.return_value = "SegmentAnythingONNX2_instance"
|
72 |
-
input_downloaded = np.arange(0, 300, 1).reshape((10, 10, 3))
|
73 |
-
download_extent_mocked.return_value = input_downloaded, aff
|
74 |
-
mask_output = np.zeros((10, 10))
|
75 |
-
mask_output[4:4, 6:6] = 255.0
|
76 |
-
get_raster_inference_with_embedding_from_dict_mocked.return_value = mask_output, 1
|
77 |
-
get_vectorized_raster_as_geojson_mocked.return_value = {"geojson": "{}", "n_shapes_geojson": 2}
|
78 |
-
output = samexporter_predict(
|
79 |
-
bbox=[[1, 2], [3, 4]], prompt=[{}], zoom=10, model_name="mobile_sam", source_name="localtest"
|
80 |
-
)
|
81 |
-
assert output == {"n_predictions": 1, "geojson": "{}", "n_shapes_geojson": 2}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tests/{test_fastapi_app.py → test_app.py}
RENAMED
@@ -4,19 +4,16 @@ import unittest
|
|
4 |
from unittest.mock import patch
|
5 |
|
6 |
from fastapi.testclient import TestClient
|
|
|
|
|
7 |
|
8 |
-
|
9 |
-
from samgis.io import wrappers_helpers
|
10 |
-
from tests import TEST_EVENTS_FOLDER
|
11 |
-
from tests.local_tiles_http_server import LocalTilesHttpServer
|
12 |
-
from wrappers import fastapi_wrapper
|
13 |
-
from wrappers.fastapi_wrapper import app
|
14 |
|
15 |
|
16 |
infer_samgis = "/infer_samgis"
|
17 |
response_status_code = "response.status_code:{}."
|
18 |
response_body_loaded = "response.body_loaded:{}."
|
19 |
-
client = TestClient(app)
|
20 |
source = {
|
21 |
'url': 'https://tile.openstreetmap.org/{z}/{x}/{y}.png', 'max_zoom': 19,
|
22 |
'html_attribution': '© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors',
|
@@ -60,45 +57,24 @@ class TestFastapiApp(unittest.TestCase):
|
|
60 |
body = response.json()
|
61 |
assert body == {"msg": "still alive..."}
|
62 |
|
63 |
-
def
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
response_body = response.json()
|
76 |
-
assert response_body == response_bodies_post_test[json_filename]
|
77 |
-
|
78 |
-
def test_fastapi_handler_post_test_422(self):
|
79 |
-
response = client.post("/post_test", json={})
|
80 |
-
assert response.status_code == 422
|
81 |
-
body = response.json()
|
82 |
-
assert body == {'msg': 'Error - Unprocessable Entity'}
|
83 |
-
|
84 |
-
def test_index(self):
|
85 |
-
import subprocess
|
86 |
-
|
87 |
-
subprocess.run(["pnpm", "build"], cwd=PROJECT_ROOT_FOLDER / "static")
|
88 |
-
subprocess.run(["pnpm", "tailwindcss", "-i", "./src/input.css", "-o", "./dist/output.css"],
|
89 |
-
cwd=PROJECT_ROOT_FOLDER / "static")
|
90 |
-
response = client.get("/")
|
91 |
-
assert response.status_code == 200
|
92 |
-
html_body = response.read().decode("utf-8")
|
93 |
-
assert "html" in html_body
|
94 |
-
assert "head" in html_body
|
95 |
-
assert "body" in html_body
|
96 |
|
97 |
def test_404(self):
|
98 |
response = client.get("/404")
|
99 |
assert response.status_code == 404
|
100 |
|
101 |
-
def
|
102 |
response = client.post(infer_samgis, json={})
|
103 |
print(response_status_code.format(response.status_code))
|
104 |
assert response.status_code == 422
|
@@ -106,22 +82,20 @@ class TestFastapiApp(unittest.TestCase):
|
|
106 |
print(response_body_loaded.format(body_loaded))
|
107 |
assert body_loaded == {"msg": "Error - Unprocessable Entity"}
|
108 |
|
109 |
-
def
|
110 |
from copy import deepcopy
|
111 |
local_event = deepcopy(event)
|
112 |
|
113 |
local_event["source_type"] = "source_fake"
|
114 |
response = client.post(infer_samgis, json=local_event)
|
115 |
print(response_status_code.format(response.status_code))
|
116 |
-
assert response.status_code ==
|
117 |
body_loaded = response.json()
|
118 |
print(response_body_loaded.format(body_loaded))
|
119 |
-
assert body_loaded == {
|
120 |
|
121 |
-
@patch.object(
|
122 |
-
|
123 |
-
def test_infer_samgis_500(self, samexporter_predict_mocked, time_mocked):
|
124 |
-
time_mocked.return_value = 0
|
125 |
samexporter_predict_mocked.side_effect = ValueError("I raise a value error!")
|
126 |
|
127 |
response = client.post(infer_samgis, json=event)
|
@@ -131,7 +105,7 @@ class TestFastapiApp(unittest.TestCase):
|
|
131 |
print(response_body_loaded.format(body))
|
132 |
assert body == {'msg': 'Error - Internal Server Error'}
|
133 |
|
134 |
-
@patch.object(
|
135 |
@patch.object(time, "time")
|
136 |
def test_infer_samgis_real_200(self, time_mocked, get_url_tile_mocked):
|
137 |
import shapely
|
@@ -162,7 +136,7 @@ class TestFastapiApp(unittest.TestCase):
|
|
162 |
assert len(output_geojson.geoms) == 3
|
163 |
|
164 |
@patch.object(time, "time")
|
165 |
-
@patch.object(
|
166 |
def test_infer_samgis_mocked_200(self, samexporter_predict_mocked, time_mocked):
|
167 |
self.maxDiff = None
|
168 |
|
|
|
4 |
from unittest.mock import patch
|
5 |
|
6 |
from fastapi.testclient import TestClient
|
7 |
+
from samgis_web.utilities.local_tiles_http_server import LocalTilesHttpServer
|
8 |
+
from samgis_web.web import web_helpers
|
9 |
|
10 |
+
import app
|
|
|
|
|
|
|
|
|
|
|
11 |
|
12 |
|
13 |
infer_samgis = "/infer_samgis"
|
14 |
response_status_code = "response.status_code:{}."
|
15 |
response_body_loaded = "response.body_loaded:{}."
|
16 |
+
client = TestClient(app.app)
|
17 |
source = {
|
18 |
'url': 'https://tile.openstreetmap.org/{z}/{x}/{y}.png', 'max_zoom': 19,
|
19 |
'html_attribution': '© <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors',
|
|
|
57 |
body = response.json()
|
58 |
assert body == {"msg": "still alive..."}
|
59 |
|
60 |
+
# def test_index(self):
|
61 |
+
# import subprocess
|
62 |
+
#
|
63 |
+
# subprocess.run(["pnpm", "build"], cwd=project_root_folder / "static")
|
64 |
+
# subprocess.run(["pnpm", "tailwindcss", "-i", "./src/input.css", "-o", "./dist/output.css"],
|
65 |
+
# cwd=project_root_folder / "static")
|
66 |
+
# response = client.get("/")
|
67 |
+
# assert response.status_code == 200
|
68 |
+
# html_body = response.read().decode("utf-8")
|
69 |
+
# assert "html" in html_body
|
70 |
+
# assert "head" in html_body
|
71 |
+
# assert "body" in html_body
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
72 |
|
73 |
def test_404(self):
|
74 |
response = client.get("/404")
|
75 |
assert response.status_code == 404
|
76 |
|
77 |
+
def test_infer_samgis_empty_body_422(self):
|
78 |
response = client.post(infer_samgis, json={})
|
79 |
print(response_status_code.format(response.status_code))
|
80 |
assert response.status_code == 422
|
|
|
82 |
print(response_body_loaded.format(body_loaded))
|
83 |
assert body_loaded == {"msg": "Error - Unprocessable Entity"}
|
84 |
|
85 |
+
def test_infer_samgis_source_422(self):
|
86 |
from copy import deepcopy
|
87 |
local_event = deepcopy(event)
|
88 |
|
89 |
local_event["source_type"] = "source_fake"
|
90 |
response = client.post(infer_samgis, json=local_event)
|
91 |
print(response_status_code.format(response.status_code))
|
92 |
+
assert response.status_code == 422
|
93 |
body_loaded = response.json()
|
94 |
print(response_body_loaded.format(body_loaded))
|
95 |
+
assert body_loaded == {"msg": "Error - Unprocessable Entity"}
|
96 |
|
97 |
+
@patch.object(app, "samexporter_predict")
|
98 |
+
def test_infer_samgis_500(self, samexporter_predict_mocked):
|
|
|
|
|
99 |
samexporter_predict_mocked.side_effect = ValueError("I raise a value error!")
|
100 |
|
101 |
response = client.post(infer_samgis, json=event)
|
|
|
105 |
print(response_body_loaded.format(body))
|
106 |
assert body == {'msg': 'Error - Internal Server Error'}
|
107 |
|
108 |
+
@patch.object(web_helpers, "get_url_tile")
|
109 |
@patch.object(time, "time")
|
110 |
def test_infer_samgis_real_200(self, time_mocked, get_url_tile_mocked):
|
111 |
import shapely
|
|
|
136 |
assert len(output_geojson.geoms) == 3
|
137 |
|
138 |
@patch.object(time, "time")
|
139 |
+
@patch.object(app, "samexporter_predict")
|
140 |
def test_infer_samgis_mocked_200(self, samexporter_predict_mocked, time_mocked):
|
141 |
self.maxDiff = None
|
142 |
|
tests/test_lambda_app.py
DELETED
@@ -1,232 +0,0 @@
|
|
1 |
-
import json
|
2 |
-
import time
|
3 |
-
import unittest
|
4 |
-
from unittest.mock import patch
|
5 |
-
|
6 |
-
from samgis import IS_AWS_LAMBDA
|
7 |
-
|
8 |
-
if IS_AWS_LAMBDA:
|
9 |
-
try:
|
10 |
-
from awslambdaric.lambda_context import LambdaContext
|
11 |
-
|
12 |
-
from samgis.io import wrappers_helpers
|
13 |
-
from wrappers import lambda_wrapper
|
14 |
-
from tests.local_tiles_http_server import LocalTilesHttpServer
|
15 |
-
|
16 |
-
|
17 |
-
class TestLambdaApp(unittest.TestCase):
|
18 |
-
@patch.object(time, "time")
|
19 |
-
@patch.object(lambda_wrapper, "samexporter_predict")
|
20 |
-
@patch.object(lambda_wrapper, "get_parsed_bbox_points")
|
21 |
-
@patch.object(lambda_wrapper, "get_parsed_request_body")
|
22 |
-
def test_lambda_handler_500(
|
23 |
-
self,
|
24 |
-
get_parsed_request_body_mocked,
|
25 |
-
get_parsed_bbox_points_mocked,
|
26 |
-
samexporter_predict_mocked,
|
27 |
-
time_mocked
|
28 |
-
):
|
29 |
-
from wrappers.lambda_wrapper import lambda_handler
|
30 |
-
|
31 |
-
time_mocked.return_value = 0
|
32 |
-
get_parsed_request_body_mocked.value = {}
|
33 |
-
get_parsed_bbox_points_mocked.return_value = {"bbox": "bbox_object", "prompt": "prompt_object",
|
34 |
-
"zoom": 1}
|
35 |
-
samexporter_predict_mocked.side_effect = ValueError("I raise a value error!")
|
36 |
-
|
37 |
-
event = {"body": {}, "version": 1.0}
|
38 |
-
lambda_context = LambdaContext(
|
39 |
-
invoke_id="test_invoke_id",
|
40 |
-
client_context=None,
|
41 |
-
cognito_identity=None,
|
42 |
-
epoch_deadline_time_in_ms=time.time()
|
43 |
-
)
|
44 |
-
expected_response_500 = '{"statusCode": 500, "header": {"Content-Type": "application/json"}, '
|
45 |
-
expected_response_500 += '"body": "{\\"duration_run\\": 0, \\"message\\": \\"Internal server error\\", '
|
46 |
-
expected_response_500 += '\\"request_id\\": \\"test_invoke_id\\"}", "isBase64Encoded": false}'
|
47 |
-
|
48 |
-
assert lambda_handler(event, lambda_context) == expected_response_500
|
49 |
-
|
50 |
-
|
51 |
-
@patch.object(time, "time")
|
52 |
-
@patch.object(lambda_wrapper, "get_parsed_request_body")
|
53 |
-
def test_lambda_handler_400(self, get_parsed_request_body_mocked, time_mocked):
|
54 |
-
from wrappers.lambda_wrapper import lambda_handler
|
55 |
-
|
56 |
-
time_mocked.return_value = 0
|
57 |
-
get_parsed_request_body_mocked.return_value = {}
|
58 |
-
|
59 |
-
event = {"body": {}, "version": 1.0}
|
60 |
-
lambda_context = LambdaContext(
|
61 |
-
invoke_id="test_invoke_id",
|
62 |
-
client_context=None,
|
63 |
-
cognito_identity=None,
|
64 |
-
epoch_deadline_time_in_ms=time.time()
|
65 |
-
)
|
66 |
-
|
67 |
-
assert lambda_handler(event, lambda_context) == (
|
68 |
-
'{"statusCode": 400, "header": {"Content-Type": "application/json"}, '
|
69 |
-
'"body": "{\\"duration_run\\": 0, \\"message\\": \\"Bad Request\\", '
|
70 |
-
'\\"request_id\\": \\"test_invoke_id\\"}", "isBase64Encoded": false}')
|
71 |
-
|
72 |
-
|
73 |
-
@patch.object(time, "time")
|
74 |
-
def test_lambda_handler_422(self, time_mocked):
|
75 |
-
from wrappers.lambda_wrapper import lambda_handler
|
76 |
-
|
77 |
-
time_mocked.return_value = 0
|
78 |
-
event = {"body": {}, "version": 1.0}
|
79 |
-
lambda_context = LambdaContext(
|
80 |
-
invoke_id="test_invoke_id",
|
81 |
-
client_context=None,
|
82 |
-
cognito_identity=None,
|
83 |
-
epoch_deadline_time_in_ms=time.time()
|
84 |
-
)
|
85 |
-
|
86 |
-
response_422 = lambda_handler(event, lambda_context)
|
87 |
-
expected_response_422 = '{"statusCode": 422, "header": {"Content-Type": "application/json"}, '
|
88 |
-
expected_response_422 += '"body": "{\\"duration_run\\": 0, \\"message\\": \\"Missing required parameter\\", '
|
89 |
-
expected_response_422 += '\\"request_id\\": \\"test_invoke_id\\"}", "isBase64Encoded": false}'
|
90 |
-
|
91 |
-
assert response_422 == expected_response_422
|
92 |
-
|
93 |
-
|
94 |
-
@patch.object(time, "time")
|
95 |
-
@patch.object(lambda_wrapper, "samexporter_predict")
|
96 |
-
@patch.object(lambda_wrapper, "get_response")
|
97 |
-
@patch.object(lambda_wrapper, "get_parsed_bbox_points")
|
98 |
-
@patch.object(lambda_wrapper, "get_parsed_request_body")
|
99 |
-
def test_lambda_handler_200_mocked(
|
100 |
-
self,
|
101 |
-
get_parsed_request_body_mocked,
|
102 |
-
get_parsed_bbox_points_mocked,
|
103 |
-
get_response_mocked,
|
104 |
-
samexporter_predict_mocked,
|
105 |
-
time_mocked
|
106 |
-
):
|
107 |
-
from wrappers.lambda_wrapper import lambda_handler
|
108 |
-
from tests import TEST_EVENTS_FOLDER
|
109 |
-
|
110 |
-
time_mocked.return_value = 0
|
111 |
-
get_parsed_request_body_mocked.value = {}
|
112 |
-
get_parsed_bbox_points_mocked.return_value = {"bbox": "bbox_object", "prompt": "prompt_object", "zoom": 1}
|
113 |
-
|
114 |
-
response_type = "200"
|
115 |
-
with open(TEST_EVENTS_FOLDER / "get_response.json") as tst_json_get_response:
|
116 |
-
get_response_io = json.load(tst_json_get_response)
|
117 |
-
|
118 |
-
input_200 = {
|
119 |
-
"bbox": {
|
120 |
-
"ne": {"lat": 38.03932961278458, "lng": 15.36808069832851},
|
121 |
-
"sw": {"lat": 37.455509218936974, "lng": 14.632807441554068}
|
122 |
-
},
|
123 |
-
"prompt": [{
|
124 |
-
"type": "point",
|
125 |
-
"data": {"lat": 37.0, "lng": 15.0},
|
126 |
-
"label": 0
|
127 |
-
}],
|
128 |
-
"zoom": 10,
|
129 |
-
"source_type": "OpenStreetMap.Mapnik",
|
130 |
-
"debug": True
|
131 |
-
}
|
132 |
-
|
133 |
-
samexporter_predict_output = get_response_io[response_type]["input"]
|
134 |
-
samexporter_predict_mocked.return_value = samexporter_predict_output
|
135 |
-
samexporter_predict_mocked.side_effect = None
|
136 |
-
get_response_mocked.return_value = get_response_io[response_type]["output"]
|
137 |
-
|
138 |
-
event = {"body": input_200, "version": 1.0}
|
139 |
-
|
140 |
-
lambda_context = LambdaContext(
|
141 |
-
invoke_id="test_invoke_id",
|
142 |
-
client_context=None,
|
143 |
-
cognito_identity=None,
|
144 |
-
epoch_deadline_time_in_ms=time.time()
|
145 |
-
)
|
146 |
-
|
147 |
-
response_200 = lambda_handler(event, lambda_context)
|
148 |
-
expected_response_200 = get_response_io[response_type]["output"]
|
149 |
-
print(f"types: response_200:{type(response_200)}, expected:{type(expected_response_200)}.")
|
150 |
-
assert response_200 == expected_response_200
|
151 |
-
|
152 |
-
|
153 |
-
@patch.object(wrappers_helpers, "get_url_tile")
|
154 |
-
def test_lambda_handler_200_real_single_multi_point(self, get_url_tile_mocked):
|
155 |
-
import xyzservices
|
156 |
-
import shapely
|
157 |
-
|
158 |
-
from wrappers.lambda_wrapper import lambda_handler
|
159 |
-
from tests import LOCAL_URL_TILE, TEST_EVENTS_FOLDER
|
160 |
-
|
161 |
-
local_tile_provider = xyzservices.TileProvider(name="local_tile_provider", url=LOCAL_URL_TILE,
|
162 |
-
attribution="")
|
163 |
-
get_url_tile_mocked.return_value = local_tile_provider
|
164 |
-
fn_name = "lambda_handler"
|
165 |
-
invoke_id = "test_invoke_id"
|
166 |
-
|
167 |
-
for json_filename in [
|
168 |
-
"single_point",
|
169 |
-
"multi_prompt",
|
170 |
-
"single_rectangle"
|
171 |
-
]:
|
172 |
-
with open(TEST_EVENTS_FOLDER / f"{fn_name}_{json_filename}.json") as tst_json:
|
173 |
-
inputs_outputs = json.load(tst_json)
|
174 |
-
lambda_context = LambdaContext(
|
175 |
-
invoke_id=invoke_id,
|
176 |
-
client_context=None,
|
177 |
-
cognito_identity=None,
|
178 |
-
epoch_deadline_time_in_ms=time.time()
|
179 |
-
)
|
180 |
-
expected_response_dict = inputs_outputs["output"]
|
181 |
-
listen_port = 8000
|
182 |
-
expected_response_body = json.loads(expected_response_dict["body"])
|
183 |
-
|
184 |
-
with LocalTilesHttpServer.http_server("localhost", listen_port, directory=TEST_EVENTS_FOLDER):
|
185 |
-
input_event = inputs_outputs["input"]
|
186 |
-
input_event_body = json.loads(input_event["body"])
|
187 |
-
input_event["body"] = json.dumps(input_event_body)
|
188 |
-
response = lambda_handler(event=input_event, context=lambda_context)
|
189 |
-
|
190 |
-
response_dict = json.loads(response)
|
191 |
-
assert response_dict["statusCode"] == 200
|
192 |
-
body_dict = json.loads(response_dict["body"])
|
193 |
-
assert body_dict["n_predictions"] == 1
|
194 |
-
assert body_dict["request_id"] == invoke_id
|
195 |
-
assert body_dict["message"] == "ok"
|
196 |
-
assert body_dict["n_shapes_geojson"] == expected_response_body["n_shapes_geojson"]
|
197 |
-
|
198 |
-
output_geojson = shapely.from_geojson(body_dict["geojson"])
|
199 |
-
print("output_geojson::", type(output_geojson))
|
200 |
-
assert isinstance(output_geojson, shapely.GeometryCollection)
|
201 |
-
assert len(output_geojson.geoms) == expected_response_body["n_shapes_geojson"]
|
202 |
-
|
203 |
-
|
204 |
-
def test_debug(self):
|
205 |
-
from wrappers.lambda_wrapper import lambda_handler
|
206 |
-
|
207 |
-
input_event = {
|
208 |
-
'bbox': {
|
209 |
-
'ne': {'lat': 46.302592089330524, 'lng': 9.49493408203125},
|
210 |
-
'sw': {'lat': 46.14011755129237, 'lng': 9.143371582031252}},
|
211 |
-
'prompt': [
|
212 |
-
{'id': 166, 'type': 'point', 'data': {'lat': 46.18244521829928, 'lng': 9.418544769287111},
|
213 |
-
'label': 1}
|
214 |
-
],
|
215 |
-
'zoom': 12, 'source_type': 'OpenStreetMap'
|
216 |
-
}
|
217 |
-
lambda_context = LambdaContext(
|
218 |
-
invoke_id="test_invoke_id",
|
219 |
-
client_context=None,
|
220 |
-
cognito_identity=None,
|
221 |
-
epoch_deadline_time_in_ms=time.time()
|
222 |
-
)
|
223 |
-
response = lambda_handler(event=input_event, context=lambda_context)
|
224 |
-
print(response)
|
225 |
-
except ModuleNotFoundError as mnfe:
|
226 |
-
print("missing awslambdaric...")
|
227 |
-
raise mnfe
|
228 |
-
|
229 |
-
|
230 |
-
if __name__ == '__main__':
|
231 |
-
if IS_AWS_LAMBDA:
|
232 |
-
unittest.main()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
wrappers/__init__.py
DELETED
File without changes
|
wrappers/fastapi_wrapper.py
DELETED
@@ -1,168 +0,0 @@
|
|
1 |
-
import json
|
2 |
-
import os
|
3 |
-
import uuid
|
4 |
-
import pathlib
|
5 |
-
|
6 |
-
from fastapi import FastAPI, HTTPException, Request, status
|
7 |
-
from fastapi.exceptions import RequestValidationError
|
8 |
-
from fastapi.responses import FileResponse, HTMLResponse, JSONResponse
|
9 |
-
from fastapi.staticfiles import StaticFiles
|
10 |
-
from fastapi.templating import Jinja2Templates
|
11 |
-
from pydantic import ValidationError
|
12 |
-
|
13 |
-
from samgis import PROJECT_ROOT_FOLDER, WORKDIR
|
14 |
-
from samgis.io.wrappers_helpers import get_parsed_bbox_points, get_source_name
|
15 |
-
from samgis.utilities.type_hints import ApiRequestBody
|
16 |
-
from samgis_core.utilities.fastapi_logger import setup_logging
|
17 |
-
from samgis.prediction_api.predictors import samexporter_predict
|
18 |
-
|
19 |
-
|
20 |
-
app_logger = setup_logging(debug=True)
|
21 |
-
app_logger.info(f"PROJECT_ROOT_FOLDER:{PROJECT_ROOT_FOLDER}.")
|
22 |
-
app_logger.info(f"WORKDIR:{WORKDIR}.")
|
23 |
-
app = FastAPI()
|
24 |
-
|
25 |
-
|
26 |
-
@app.middleware("http")
|
27 |
-
async def request_middleware(request, call_next):
|
28 |
-
request_id = str(uuid.uuid4())
|
29 |
-
with app_logger.contextualize(request_id=request_id):
|
30 |
-
app_logger.info("Request started")
|
31 |
-
|
32 |
-
try:
|
33 |
-
response = await call_next(request)
|
34 |
-
|
35 |
-
except Exception as ex:
|
36 |
-
app_logger.error(f"Request failed: {ex}")
|
37 |
-
response = JSONResponse(content={"success": False}, status_code=500)
|
38 |
-
|
39 |
-
finally:
|
40 |
-
response.headers["X-Request-ID"] = request_id
|
41 |
-
app_logger.info("Request ended")
|
42 |
-
|
43 |
-
return response
|
44 |
-
|
45 |
-
|
46 |
-
@app.post("/post_test")
|
47 |
-
async def post_test(request_input: ApiRequestBody) -> JSONResponse:
|
48 |
-
request_body = get_parsed_bbox_points(request_input)
|
49 |
-
app_logger.info(f"request_body:{request_body}.")
|
50 |
-
return JSONResponse(
|
51 |
-
status_code=200,
|
52 |
-
content=get_parsed_bbox_points(request_input)
|
53 |
-
)
|
54 |
-
|
55 |
-
|
56 |
-
@app.get("/health")
|
57 |
-
async def health() -> JSONResponse:
|
58 |
-
from samgis.__version__ import __version__ as version
|
59 |
-
from samgis_core.__version__ import __version__ as version_core
|
60 |
-
|
61 |
-
app_logger.info(f"still alive, version:{version}, version_core:{version_core}.")
|
62 |
-
return JSONResponse(status_code=200, content={"msg": "still alive..."})
|
63 |
-
|
64 |
-
|
65 |
-
@app.post("/infer_samgis")
|
66 |
-
def infer_samgis(request_input: ApiRequestBody) -> JSONResponse:
|
67 |
-
app_logger.info("starting inference request...")
|
68 |
-
|
69 |
-
try:
|
70 |
-
import time
|
71 |
-
|
72 |
-
time_start_run = time.time()
|
73 |
-
body_request = get_parsed_bbox_points(request_input)
|
74 |
-
app_logger.info(f"body_request:{body_request}.")
|
75 |
-
try:
|
76 |
-
source_name = get_source_name(request_input.source_type)
|
77 |
-
app_logger.info(f"source_name = {source_name}.")
|
78 |
-
output = samexporter_predict(
|
79 |
-
bbox=body_request["bbox"], prompt=body_request["prompt"], zoom=body_request["zoom"],
|
80 |
-
source=body_request["source"], source_name=source_name
|
81 |
-
)
|
82 |
-
duration_run = time.time() - time_start_run
|
83 |
-
app_logger.info(f"duration_run:{duration_run}.")
|
84 |
-
body = {
|
85 |
-
"duration_run": duration_run,
|
86 |
-
"output": output
|
87 |
-
}
|
88 |
-
return JSONResponse(status_code=200, content={"body": json.dumps(body)})
|
89 |
-
except Exception as inference_exception:
|
90 |
-
import subprocess
|
91 |
-
home_content = subprocess.run(
|
92 |
-
"ls -l /var/task /var/task/* {WRITE_TMP_ON_DISK} {WRITE_TMP_ON_DISK}/* {PROJECT_ROOT_FOLDER} {PROJECT_ROOT_FOLDER}/* {WORKDIR} {WORKDIR}/*", shell=True, universal_newlines=True, stdout=subprocess.PIPE
|
93 |
-
)
|
94 |
-
app_logger.error(f"/home/user ls -l: {home_content.stdout}.")
|
95 |
-
app_logger.error(f"inference error:{inference_exception}.")
|
96 |
-
raise HTTPException(
|
97 |
-
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Internal server error on inference")
|
98 |
-
except ValidationError as va1:
|
99 |
-
app_logger.error(f"validation error: {str(va1)}.")
|
100 |
-
raise ValidationError("Unprocessable Entity")
|
101 |
-
|
102 |
-
|
103 |
-
@app.exception_handler(RequestValidationError)
|
104 |
-
async def request_validation_exception_handler(request: Request, exc: RequestValidationError) -> JSONResponse:
|
105 |
-
app_logger.error(f"exception errors: {exc.errors()}.")
|
106 |
-
app_logger.error(f"exception body: {exc.body}.")
|
107 |
-
headers = request.headers.items()
|
108 |
-
app_logger.error(f'request header: {dict(headers)}.')
|
109 |
-
params = request.query_params.items()
|
110 |
-
app_logger.error(f'request query params: {dict(params)}.')
|
111 |
-
return JSONResponse(
|
112 |
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
113 |
-
content={"msg": "Error - Unprocessable Entity"}
|
114 |
-
)
|
115 |
-
|
116 |
-
|
117 |
-
@app.exception_handler(HTTPException)
|
118 |
-
async def http_exception_handler(request: Request, exc: HTTPException) -> JSONResponse:
|
119 |
-
app_logger.error(f"exception: {str(exc)}.")
|
120 |
-
headers = request.headers.items()
|
121 |
-
app_logger.error(f'request header: {dict(headers)}.')
|
122 |
-
params = request.query_params.items()
|
123 |
-
app_logger.error(f'request query params: {dict(params)}.')
|
124 |
-
return JSONResponse(
|
125 |
-
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
126 |
-
content={"msg": "Error - Internal Server Error"}
|
127 |
-
)
|
128 |
-
|
129 |
-
|
130 |
-
write_tmp_on_disk = os.getenv("WRITE_TMP_ON_DISK", "")
|
131 |
-
app_logger.info(f"write_tmp_on_disk:{write_tmp_on_disk}.")
|
132 |
-
if bool(write_tmp_on_disk):
|
133 |
-
try:
|
134 |
-
path_write_tmp_on_disk = pathlib.Path(write_tmp_on_disk)
|
135 |
-
try:
|
136 |
-
pathlib.Path.unlink(path_write_tmp_on_disk, missing_ok=True)
|
137 |
-
except PermissionError or OSError as err:
|
138 |
-
app_logger.error(f"{err} while removing old write_tmp_on_disk:{write_tmp_on_disk}.")
|
139 |
-
app_logger.error(f"is file?{path_write_tmp_on_disk.is_file()}.")
|
140 |
-
app_logger.error(f"is symlink?{path_write_tmp_on_disk.is_symlink()}.")
|
141 |
-
app_logger.error(f"is folder?{path_write_tmp_on_disk.is_dir()}.")
|
142 |
-
os.makedirs(write_tmp_on_disk, exist_ok=True)
|
143 |
-
app.mount("/vis_output", StaticFiles(directory=write_tmp_on_disk), name="vis_output")
|
144 |
-
except RuntimeError as rerr:
|
145 |
-
app_logger.error(f"{rerr} while loading the folder write_tmp_on_disk:{write_tmp_on_disk}...")
|
146 |
-
raise rerr
|
147 |
-
templates = Jinja2Templates(directory=PROJECT_ROOT_FOLDER / "static")
|
148 |
-
|
149 |
-
|
150 |
-
@app.get("/vis_output", response_class=HTMLResponse)
|
151 |
-
def list_files(request: Request):
|
152 |
-
|
153 |
-
files = os.listdir(write_tmp_on_disk)
|
154 |
-
files_paths = sorted([f"{request.url._url}/{f}" for f in files])
|
155 |
-
print(files_paths)
|
156 |
-
return templates.TemplateResponse(
|
157 |
-
"list_files.html", {"request": request, "files": files_paths}
|
158 |
-
)
|
159 |
-
|
160 |
-
|
161 |
-
# important: the index() function and the app.mount MUST be at the end
|
162 |
-
app.mount("/", StaticFiles(directory=PROJECT_ROOT_FOLDER / "static" / "dist", html=True), name="static")
|
163 |
-
|
164 |
-
|
165 |
-
@app.get("/")
|
166 |
-
def index() -> FileResponse:
|
167 |
-
return FileResponse(path="/app/static/index.html", media_type="text/html")
|
168 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
wrappers/lambda_wrapper.py
DELETED
@@ -1,58 +0,0 @@
|
|
1 |
-
"""Lambda entry point"""
|
2 |
-
from http import HTTPStatus
|
3 |
-
from typing import Dict
|
4 |
-
|
5 |
-
from aws_lambda_powertools.utilities.typing import LambdaContext
|
6 |
-
from pydantic import ValidationError
|
7 |
-
|
8 |
-
from samgis import app_logger
|
9 |
-
from samgis.io.wrappers_helpers import get_parsed_request_body, get_parsed_bbox_points, get_response
|
10 |
-
from samgis.prediction_api.predictors import samexporter_predict
|
11 |
-
|
12 |
-
|
13 |
-
def lambda_handler(event: Dict, context: LambdaContext) -> str:
|
14 |
-
"""
|
15 |
-
Handle the request for the serverless backend and return the response
|
16 |
-
(success or a type of error based on the exception raised).
|
17 |
-
|
18 |
-
Args:
|
19 |
-
event: request content
|
20 |
-
context: request context
|
21 |
-
|
22 |
-
Returns:
|
23 |
-
json response from get_response() function
|
24 |
-
|
25 |
-
"""
|
26 |
-
from time import time
|
27 |
-
app_logger.info(f"start with aws_request_id:{context.aws_request_id}.")
|
28 |
-
start_time = time()
|
29 |
-
|
30 |
-
if "version" in event:
|
31 |
-
app_logger.info(f"event version: {event['version']}.")
|
32 |
-
|
33 |
-
try:
|
34 |
-
app_logger.info("try get_parsed_event...")
|
35 |
-
request_input = get_parsed_request_body(event)
|
36 |
-
app_logger.info("event parsed: ok")
|
37 |
-
body_request = get_parsed_bbox_points(request_input)
|
38 |
-
app_logger.info(f"body_request => {type(body_request)}, {body_request}.")
|
39 |
-
|
40 |
-
try:
|
41 |
-
body_response = samexporter_predict(
|
42 |
-
body_request["bbox"], body_request["prompt"], body_request["zoom"], source=body_request["source"]
|
43 |
-
)
|
44 |
-
app_logger.info(f"output body_response length:{len(body_response)}.")
|
45 |
-
app_logger.debug(f"output body_response:{body_response}.")
|
46 |
-
response = get_response(HTTPStatus.OK.value, start_time, context.aws_request_id, body_response)
|
47 |
-
except Exception as ex2:
|
48 |
-
app_logger.exception(f"exception2:{ex2}.", exc_info=True)
|
49 |
-
response = get_response(HTTPStatus.INTERNAL_SERVER_ERROR.value, start_time, context.aws_request_id, {})
|
50 |
-
except ValidationError as va1:
|
51 |
-
app_logger.exception(f"ValidationError:{va1}.", exc_info=True)
|
52 |
-
response = get_response(HTTPStatus.UNPROCESSABLE_ENTITY.value, start_time, context.aws_request_id, {})
|
53 |
-
except Exception as ex1:
|
54 |
-
app_logger.exception(f"exception1:{ex1}.", exc_info=True)
|
55 |
-
response = get_response(HTTPStatus.BAD_REQUEST.value, start_time, context.aws_request_id, {})
|
56 |
-
|
57 |
-
app_logger.debug(f"response_dumped:{response}...")
|
58 |
-
return response
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|