ale commited on
Commit
fdd60c2
1 Parent(s): 54a9099

[test] add test cases, add command for test coverage

Browse files
.coveragerc ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ [run]
2
+ source = src
3
+ omit = ./venv/*,*tests*,*apps.py,*manage.py,*__init__.py,*migrations*,*asgi*,*wsgi*,*admin.py,*urls.py,./tests/*
4
+
5
+ [report]
6
+ omit = ./venv/*,*tests*,*apps.py,*manage.py,*__init__.py,*migrations*,*asgi*,*wsgi*,*admin.py,*urls.py,./tests/*
7
+
8
+ exclude_lines =
9
+ if __name__ == .__main__.:
.gitignore CHANGED
@@ -7,3 +7,4 @@ tmp/
7
  *.onnx
8
  .DS_Store
9
  .pytest_cache
 
 
7
  *.onnx
8
  .DS_Store
9
  .pytest_cache
10
+ .coverage
README.md CHANGED
@@ -53,4 +53,13 @@ curl -X 'POST' \
53
  docker push 686901913580.dkr.ecr.eu-west-1.amazonaws.com/lambda-gdal-runner:latest
54
  docker push 686901913580.dkr.ecr.eu-west-1.amazonaws.com/lambda-fastsam-api:latest
55
  ```
56
- 3. It's possible to publish a new aws lambda version from cmd or from lambda page
 
 
 
 
 
 
 
 
 
 
53
  docker push 686901913580.dkr.ecr.eu-west-1.amazonaws.com/lambda-gdal-runner:latest
54
  docker push 686901913580.dkr.ecr.eu-west-1.amazonaws.com/lambda-fastsam-api:latest
55
  ```
56
+ 3. It's possible to publish a new aws lambda version from cmd or from lambda page
57
+
58
+
59
+ ## Tests
60
+
61
+ Tests are defined in the `tests` folder in this project. Use PIP to install the test dependencies and run tests.
62
+
63
+ ```bash
64
+ python -m pytest --cov=src --cov-report=term-missing && coverage html
65
+ ```
pytest.ini ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ [pytest]
2
+ env_override_existing_values = 1
3
+ env_files =
4
+ test/.test.env
5
+ [path]
6
+ source = src
7
+ omit = ./venv/*,*tests*,*apps.py,*manage.py,*__init__.py,*migrations*,*asgi*,*wsgi*,*admin.py,*urls.py,./tests/*
requirements.txt CHANGED
@@ -8,5 +8,6 @@ onnxruntime
8
  opencv-python
9
  pillow
10
  pydantic>=2.0.3
 
11
  rasterio
12
  requests
 
8
  opencv-python
9
  pillow
10
  pydantic>=2.0.3
11
+ python-dotenv
12
  rasterio
13
  requests
requirements_dev.txt CHANGED
@@ -8,5 +8,9 @@ onnxruntime
8
  opencv-python
9
  pillow
10
  pydantic>=2.0.3
 
 
 
 
11
  rasterio
12
  requests
 
8
  opencv-python
9
  pillow
10
  pydantic>=2.0.3
11
+ pytest
12
+ pytest-cov
13
+ pytest-dotenv
14
+ python-dotenv
15
  rasterio
16
  requests
src/io/geo_helpers.py CHANGED
@@ -20,6 +20,7 @@ def load_affine_transformation_from_matrix(matrix_source_coeffs: List):
20
  return center * Affine.translation(-0.5, -0.5)
21
  except Exception as e:
22
  app_logger.error(f"exception:{e}, check https://github.com/rasterio/affine project for updates")
 
23
 
24
 
25
  def get_affine_transform_from_gdal(matrix):
 
20
  return center * Affine.translation(-0.5, -0.5)
21
  except Exception as e:
22
  app_logger.error(f"exception:{e}, check https://github.com/rasterio/affine project for updates")
23
+ raise e
24
 
25
 
26
  def get_affine_transform_from_gdal(matrix):
tests/io/test_geo_helpers.py CHANGED
@@ -1,23 +1,80 @@
1
  import json
 
 
2
  import numpy as np
3
  import shapely
4
 
 
5
  from tests import TEST_EVENTS_FOLDER
6
 
7
 
8
- def test_get_vectorized_raster_as_geojson():
9
- from src.io.geo_helpers import get_vectorized_raster_as_geojson
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
11
- name_fn = "samexporter_predict"
12
 
13
- with open(TEST_EVENTS_FOLDER / f"{name_fn}.json") as tst_json:
14
- inputs_outputs = json.load(tst_json)
15
- for k, input_output in inputs_outputs.items():
16
- print(f"k:{k}.")
17
- mask = np.load(TEST_EVENTS_FOLDER / name_fn / k / "mask.npy")
18
 
19
- output = get_vectorized_raster_as_geojson(mask=mask, matrix=input_output["input"]["matrix"])
20
- assert output["n_shapes_geojson"] == input_output["output"]["n_shapes_geojson"]
21
- output_geojson = shapely.from_geojson(output["geojson"])
22
- expected_output_geojson = shapely.from_geojson(input_output["output"]["geojson"])
23
- assert shapely.equals_exact(output_geojson, expected_output_geojson, tolerance=0.000006)
 
1
  import json
2
+ import unittest
3
+
4
  import numpy as np
5
  import shapely
6
 
7
+ from src.io.geo_helpers import load_affine_transformation_from_matrix
8
  from tests import TEST_EVENTS_FOLDER
9
 
10
 
11
+ class TestGeoHelpers(unittest.TestCase):
12
+ def test_load_affine_transformation_from_matrix(self):
13
+ name_fn = "samexporter_predict"
14
+
15
+ expected_output = {
16
+ 'europe': (
17
+ 1524458.6551710723, 0.0, 152.87405657035242, 4713262.318571913, -762229.3275855362, -2356860.470370812
18
+ ),
19
+ 'north_america': (
20
+ -13855281.495084189, 0.0, 1222.9924525628194, 6732573.451358326, 6927640.747542094, -3368121.214358007
21
+ ),
22
+ 'oceania': (
23
+ 7269467.138033403, 0.0, 9783.93962050256, -166326.9735485418, -3634733.5690167015, 68487.57734351706
24
+ ),
25
+ 'south_america': (
26
+ -7922544.351904369, 0.0, 305.74811314070394, -5432228.234830927, 3961272.1759521845, 2715655.4952457524
27
+ )}
28
+
29
+ with open(TEST_EVENTS_FOLDER / f"{name_fn}.json") as tst_json:
30
+ inputs_outputs = json.load(tst_json)
31
+ for k, input_output in inputs_outputs.items():
32
+ print(f"k:{k}.")
33
+
34
+ output = load_affine_transformation_from_matrix(input_output["input"]["matrix"])
35
+ assert output.to_shapely() == expected_output[k]
36
+
37
+ def test_load_affine_transformation_from_matrix_value_error(self):
38
+ name_fn = "samexporter_predict"
39
+ with open(TEST_EVENTS_FOLDER / f"{name_fn}.json") as tst_json:
40
+ inputs_outputs = json.load(tst_json)
41
+ with self.assertRaises(ValueError):
42
+ try:
43
+ io_value_error = inputs_outputs["europe"]["input"]["matrix"][:5]
44
+ load_affine_transformation_from_matrix(io_value_error)
45
+ except ValueError as ve:
46
+ print(f"ve:{ve}.")
47
+ self.assertEqual(str(ve), "Expected 6 coefficients, found 5; argument type: <class 'list'>.")
48
+ raise ve
49
+
50
+ def test_load_affine_transformation_from_matrix_exception(self):
51
+ name_fn = "samexporter_predict"
52
+ with open(TEST_EVENTS_FOLDER / f"{name_fn}.json") as tst_json:
53
+ inputs_outputs = json.load(tst_json)
54
+ with self.assertRaises(Exception):
55
+ try:
56
+ io_exception = inputs_outputs["europe"]["input"]["matrix"]
57
+ io_exception[0] = "ciao"
58
+ load_affine_transformation_from_matrix(io_exception)
59
+ except Exception as e:
60
+ print(f"e:{e}.")
61
+ self.assertEqual(str(e), "exception:could not convert string to float: 'ciao', "
62
+ "check https://github.com/rasterio/affine project for updates")
63
+ raise e
64
+
65
+ def test_get_vectorized_raster_as_geojson(self):
66
+ from src.io.geo_helpers import get_vectorized_raster_as_geojson
67
 
68
+ name_fn = "samexporter_predict"
69
 
70
+ with open(TEST_EVENTS_FOLDER / f"{name_fn}.json") as tst_json:
71
+ inputs_outputs = json.load(tst_json)
72
+ for k, input_output in inputs_outputs.items():
73
+ print(f"k:{k}.")
74
+ mask = np.load(TEST_EVENTS_FOLDER / name_fn / k / "mask.npy")
75
 
76
+ output = get_vectorized_raster_as_geojson(mask=mask, matrix=input_output["input"]["matrix"])
77
+ assert output["n_shapes_geojson"] == input_output["output"]["n_shapes_geojson"]
78
+ output_geojson = shapely.from_geojson(output["geojson"])
79
+ expected_output_geojson = shapely.from_geojson(input_output["output"]["geojson"])
80
+ assert shapely.equals_exact(output_geojson, expected_output_geojson, tolerance=0.000006)
tests/utilities/__init__.py ADDED
File without changes
tests/utilities/test_serialize.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import unittest
2
+
3
+ import numpy as np
4
+
5
+ from src.utilities.serialize import serialize
6
+
7
+ test_dict_list_dict = {
8
+ "type": "FeatureCollection",
9
+ "name": "volcanoes",
10
+ "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"}},
11
+ "features": [
12
+ {"type": "Feature", "properties": {"Volcano_Number": 283010, "Volcano_Name": "Izu-Tobu", "prop_none": None},
13
+ "geometry": {"type": "Point", "coordinates": [139.098, 34.9]}},
14
+ {"type": "Feature",
15
+ "properties": {"Volcano_Number": 283020, "Volcano_Name": "Hakoneyama", "ndarray": np.array([1])},
16
+ "geometry": {"type": "Point", "coordinates": [139.021, 35.233]}}
17
+ ]
18
+ }
19
+
20
+
21
+ class TestSerialize(unittest.TestCase):
22
+ def test_serialize(self):
23
+ from bson import ObjectId
24
+
25
+ # remove keys with values as bson.ObjectId
26
+ d1 = {"_id": ObjectId()}
27
+ self.assertDictEqual(serialize(d1), dict())
28
+
29
+ # test: serialize nd.float*, number as key => str
30
+ np_int_4 = np.asarray([87], dtype=np.integer)[0]
31
+ d2 = {"b": np.float32(45.0), 3: 33, 1.56: np_int_4, 3.5: 44.0, "d": "b", "tuple": (1, 2)}
32
+ expected_d2 = {
33
+ 'b': 45.0,
34
+ 3: 33,
35
+ 1.56: 87,
36
+ 3.5: 44.0,
37
+ 'd': 'b',
38
+ "tuple": [1, 2]
39
+ }
40
+ serialized_d2 = serialize(d2)
41
+ self.assertDictEqual(serialized_d2, expected_d2)
42
+
43
+ # # nested dict of list of dict, serialize nd.array
44
+ d3 = {"e": [{"q": 123}, {"q": 456}], "a": np.arange(1.1, 16.88).reshape(4, 4)}
45
+ expected_d3 = {
46
+ "e": [{"q": 123}, {"q": 456}],
47
+ 'a': [[1.1, 2.1, 3.1, 4.1], [5.1, 6.1, 7.1, 8.1], [9.1, 10.1, 11.1, 12.1], [13.1, 14.1, 15.1, 16.1]]
48
+ }
49
+ self.assertDictEqual(serialize(d3), expected_d3)
50
+
51
+ def test_serialize_dict_exception(self):
52
+ from json import JSONDecodeError
53
+
54
+ e = JSONDecodeError(msg="x", doc="what we are?", pos=111)
55
+ exception = serialize({"k": e})
56
+ self.assertDictEqual(
57
+ exception,
58
+ {'k': {'msg': 'x', 'type': "<class 'json.decoder.JSONDecodeError'>", 'doc': 'what we are?', 'pos': 111,
59
+ 'lineno': 1, 'colno': 112}}
60
+ )
61
+
62
+ def test_serialize_bytes(self):
63
+ self.assertDictEqual(
64
+ serialize({"k": b"x"}),
65
+ {'k': {'value': 'eA==', 'type': 'bytes'}}
66
+ )
67
+
68
+ def test_serialize_dict_list_dict(self):
69
+ serialized_dict_no_none = serialize(test_dict_list_dict, include_none=False)
70
+ self.assertDictEqual(serialized_dict_no_none, {
71
+ 'type': 'FeatureCollection',
72
+ 'name': 'volcanoes',
73
+ 'crs': {'type': 'name', 'properties': {'name': 'urn:ogc:def:crs:OGC:1.3:CRS84'}},
74
+ 'features': [
75
+ {'type': 'Feature', 'properties': {'Volcano_Number': 283010, 'Volcano_Name': 'Izu-Tobu'},
76
+ 'geometry': {'type': 'Point', 'coordinates': [139.098, 34.9]}},
77
+ {'type': 'Feature',
78
+ 'properties': {'Volcano_Number': 283020, 'Volcano_Name': 'Hakoneyama', 'ndarray': [1]},
79
+ 'geometry': {'type': 'Point', 'coordinates': [139.021, 35.233]}}
80
+ ]
81
+ })
82
+
83
+ serialized_dict_wiht_none = serialize(test_dict_list_dict, include_none=True)
84
+ self.assertDictEqual(serialized_dict_wiht_none, {
85
+ 'type': 'FeatureCollection',
86
+ 'name': 'volcanoes',
87
+ 'crs': {'type': 'name', 'properties': {'name': 'urn:ogc:def:crs:OGC:1.3:CRS84'}},
88
+ 'features': [
89
+ {'type': 'Feature',
90
+ 'properties': {'Volcano_Number': 283010, 'Volcano_Name': 'Izu-Tobu', 'prop_none': None},
91
+ 'geometry': {'type': 'Point', 'coordinates': [139.098, 34.9]}},
92
+ {'type': 'Feature',
93
+ 'properties': {'Volcano_Number': 283020, 'Volcano_Name': 'Hakoneyama', 'ndarray': [1]},
94
+ 'geometry': {'type': 'Point', 'coordinates': [139.021, 35.233]}}
95
+ ]
96
+ })