youl commited on
Commit
d37b207
1 Parent(s): 212d39f

New application

Browse files
Files changed (6) hide show
  1. api.py +104 -0
  2. app.py +74 -0
  3. credentials.json +4 -0
  4. indices.py +157 -0
  5. processing.py +151 -0
  6. requirements.txt +13 -0
api.py ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt
2
+ from datetime import datetime, timedelta,date
3
+ import zipfile
4
+ import rasterio
5
+ from rasterio.plot import show
6
+ from PIL import Image
7
+ import matplotlib.pyplot as plt
8
+ import numpy as np
9
+ import pandas as pd
10
+ import os
11
+ from glob import glob
12
+ from tqdm import tqdm
13
+ #from haversine import haversine, Unit
14
+ #from xml.etree import ElementTree as et
15
+ import xmltodict
16
+ import json
17
+ import warnings
18
+ import shutil
19
+ warnings.filterwarnings('ignore')
20
+
21
+ ##
22
+ def map_number(number):
23
+ return str(0)+str(number) if len(str(number))==1 else str(number)
24
+
25
+ ##
26
+ def download(cordinate):
27
+ #constant
28
+ GEOMAP = read_geojson('data/map.geojson')
29
+
30
+ lon,lat = cordinate[0],cordinate[1]
31
+ A,B,C,D = [lon-0.01,lat+0.01],[lon-0.01,lat-0.01],[lon+0.01,lat-0.01],[lon+0.01,lat+0.01]
32
+ area_of_study = [[A,B,C,D,A]]
33
+ GEOMAP["features"][0]["geometry"]["coordinates"] = area_of_study
34
+
35
+ N_DAYS_AGO = 30
36
+ today = datetime.now()
37
+ current_year_today,current_month_today,current_day_today = today.year, today.month, today.day
38
+ n_days_ago = today - timedelta(days=N_DAYS_AGO)
39
+ current_year_n_days_ago,current_month_n_days_ago,current_day_n_days_ago = n_days_ago.year, n_days_ago.month, n_days_ago.day
40
+ day_format = date(current_year_today,current_month_today,current_day_today)
41
+ n_days_ago_format = str(current_year_n_days_ago)+map_number(current_month_n_days_ago)+map_number(current_day_n_days_ago)
42
+
43
+ # connexion to corpernicus hub
44
+ with open('credentials.json', 'r') as openfile:
45
+ # Reading from json file
46
+ credentials = json.load(openfile)
47
+
48
+ user = credentials["user"]
49
+ pwd = credentials["pwd"]
50
+
51
+ api = SentinelAPI(user, pwd, 'https://apihub.copernicus.eu/apihub')
52
+ #cloudcoverpercentage=(0, 30)
53
+ #limit = 2
54
+ footprint = geojson_to_wkt(GEOMAP) # GEOMAP format (lon,lat)
55
+ products = api.query(footprint,
56
+ date=(n_days_ago_format, day_format),
57
+ platformname='Sentinel-2',
58
+ producttype = "S2MSI2A")
59
+
60
+ # download all results from the search
61
+ api.download_all(products)
62
+
63
+ ##
64
+ def unzip():
65
+ files = glob('*.zip')
66
+ for file in files:
67
+ with zipfile.ZipFile(file, 'r') as zip_ref:
68
+ zip_ref.extractall()
69
+
70
+
71
+ ##
72
+ def select_best_cloud_coverage_tile():
73
+ tile_names = {}
74
+ cld_prob = []
75
+ folders = glob('*.SAFE')
76
+ for fold in folders:
77
+ metadata_path = fold+"/MTD_MSIL2A.xml"
78
+ xml_file=open(metadata_path,"r")
79
+ xml_string=xml_file.read()
80
+ python_dict=xmltodict.parse(xml_string)
81
+ cld = float(python_dict["n1:Level-2A_User_Product"]["n1:Quality_Indicators_Info"]["Cloud_Coverage_Assessment"])
82
+ tile_names[cld] = fold
83
+ cld_prob.append(cld)
84
+ name = tile_names[min(cld_prob)]
85
+ dates = name.split('_')[2][:8]
86
+ acquisition_date = datetime.strptime(dates, "%Y%m%d")
87
+ today = datetime.now()
88
+ delta = (today - acquisition_date)
89
+ days_ago = delta.days
90
+ return name,min(cld_prob),days_ago
91
+
92
+
93
+ ##
94
+ def delete_tiles():
95
+ files = glob('*.zip')
96
+ folders = glob('*.SAFE')
97
+ for f in files:
98
+ os.remove(f)
99
+ for fold in folders:
100
+ shutil.rmtree(fold, ignore_errors=True)
101
+
102
+
103
+
104
+
app.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from api import *
3
+ from processing import *
4
+ import pandas as pd
5
+ from indices import indices
6
+ import xgboost as xgb
7
+ import pickle
8
+
9
+ def predict(lat, lon):
10
+ cord = [lon,lat]
11
+ download(cord)
12
+ unzip()
13
+ name,cld_prob,days_ago = select_best_cloud_coverage_tile()
14
+
15
+ bandes_path_10,bandes_path_20,bandes_path_60,tile_path,path_cld_20,path_cld_60 =paths(name)
16
+ # create image dataset
17
+ images_10 = extract_sub_image(bandes_path_10,tile_path,cord)
18
+
19
+ # bandes with 20m resolution
20
+ #path_cld_20
21
+ images_20 = extract_sub_image(bandes_path_20,tile_path,cord,20,1)
22
+
23
+ # bandes with 60m resolution
24
+ #path_cld_60
25
+ images_60 = extract_sub_image(bandes_path_60,tile_path,cord,60)
26
+ #
27
+ feature = images_10.tolist()+images_20.tolist()+images_60.tolist()
28
+ bands = ['B02', 'B03', 'B04', 'B05', 'B06', 'B07', 'B08', 'B8A', 'B11', 'B12','B01','B09']
29
+ X = pd.DataFrame([feature],columns = bands)
30
+ # vegetation index calculation
31
+ X = indices(X)
32
+ # load the model from disk
33
+ filename = "data/finalized_model.sav"
34
+ loaded_model = pickle.load(open(filename, 'rb'))
35
+ # make prediction
36
+ biomass = loaded_model.predict(X)[0]
37
+ carbon = 0.55*biomass
38
+
39
+ # NDVI
40
+ ndvi_index = ndvi(cord,name)
41
+
42
+ # deleted download files
43
+ delete_tiles()
44
+
45
+ return str(cld_prob)+ " % cloud coverage", str(days_ago)+" days ago",str(biomass)+" Mg/ha", str(carbon)+" MgC/ha","NDVI: "+ str(ndvi_index)
46
+
47
+ # Create title, description and article strings
48
+ title = "🌴BEEPAS : Biomass estimation to Evaluate the Environmental Performance of Agroforestry System🌴"
49
+ description = "This application estimates the biomass of certain areas using AI and satellite images (S2)."
50
+ article = "Created by data354."
51
+
52
+ # Create examples list from "examples/" directory
53
+ #example_list = [["examples/" + example] for example in os.listdir("examples")]
54
+ example_list = [[5.379913, -4.050445],[6.54644,-7.86156],[5.346938, -4.027849]]
55
+
56
+ outputs = [
57
+ gr.Textbox(label="Cloud coverage"),
58
+ gr.Textbox(label="Number of days since sensing"),
59
+ gr.Textbox(label="Above ground biomass density(AGBD) Mg/ha"),
60
+ gr.Textbox(label="Carbon stock density MgC/ha "),
61
+ gr.Textbox(label="Mean NDVI"),]
62
+
63
+
64
+ demo = gr.Interface(
65
+ fn=predict,
66
+ inputs=["number", "number"],
67
+ outputs=outputs, #[ "text", "text","text","text","text"],
68
+ examples=example_list,
69
+ title=title,
70
+ description=description,
71
+ article=article,
72
+ )
73
+
74
+ demo.launch(share=True)
credentials.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "user": "youl",
3
+ "pwd": "Abidjan2@"
4
+ }
indices.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ def indices(X):
4
+ # Calculate vegetation indices
5
+ #S2REP
6
+ #OTHERS
7
+
8
+ X["MTCI"] = (X["B06"]-X["B05"])/(X["B05"]-X["B04"])
9
+ X["AWEInsh"] = 4.0*(X["B03"]-X["B11"])-0.25*X["B08"]+2.75*X["B12"]
10
+ X["NBSIMS"] = 0.36*(X["B03"]+X["B04"]+X["B08"]) - (((X["B02"]+X["B12"])/X["B03"])+X["B11"])
11
+ X["MuWIR"] = -4.0*((X["B02"]-X["B03"])/(X["B02"]+X["B03"]))+2.0*((X["B03"]-X["B08"])/(X["B03"]+X["B08"]))+2.0*((X["B03"]-X["B12"])/(X["B03"]+X["B12"]))-((X["B03"]-X["B11"])/(X["B03"]+X["B11"]))
12
+ X["VARI700"] = (X["B05"]-1.7*X["B04"]+0.7*X["B02"])/(X["B05"]+1.3*X["B04"]-1.3*X["B02"])
13
+
14
+ X["S2WI"] = (X["B05"]-X["B12"])/(X["B05"]+X["B12"])
15
+
16
+ X["NBAI"] = ((X["B12"]-X["B11"])/X["B03"])/((X["B12"]+X["B11"])/X["B03"])
17
+
18
+ X["TCARI"] = 3*((X["B05"]-X["B04"]))-0.2*(X["B05"]-X["B03"])*(X["B05"]/X["B04"])
19
+
20
+ X["WI2015"] = 1.7204+171*X["B03"]+3*X["B04"]-70*X["B08"]-45*X["B11"]-71*X["B12"]
21
+
22
+ X["BAIM"] = 1.0/((0.05-X["B08"])**2.0)+((0.2-X["B12"])**2.0)
23
+
24
+ X["NDDI"] = (( (X["B08"]-X["B04"])/(X["B08"]+X["B04"])-((X["B03"]-X["B08"])/(X["B03"]+X["B08"])))/((X["B08"]-X["B04"])/(X["B08"]+X["B04"]))+((X["B03"]-X["B08"])/(X["B03"]+X["B08"])))
25
+ X["BCC"] = X["B02"]/(X["B04"]+X["B03"]+X["B02"])
26
+
27
+ X["RCC"] = X["B04"]/(X["B04"]+X["B03"]+X["B02"])
28
+
29
+ X["IKAW"] = (X["B08"]-X["B02"])/(X["B08"]+X["B02"])
30
+
31
+ X["ARI"] = (1/X["B03"])-(1/X["B05"])
32
+
33
+ X["MIRBI"] = 10.0*X["B12"]-9.8*X["B11"]+2.0
34
+
35
+ X["NMDI2"] = (X["B08"]-(X["B11"]-X["B12"]))/(X["B08"]+(X["B11"]-X["B12"]))
36
+
37
+ X["TTVI"] = 0.5*((865.0-740.0)*(X["B8A"]-X["B06"]-(X["B07"]-X["B06"])*(783.0-740)))
38
+
39
+ X["NHFD"] = (X["B05"]-X["B01"])/(X["B05"]+X["B01"])
40
+
41
+ X["NDSWIR"] = (X["B11"]-X["B8A"])/(X["B11"]+X["B8A"])
42
+
43
+ X["NBRSWIR"] = (X["B12"]-X["B11"]-0.02)/(X["B12"]+X["B11"]+0.1)
44
+
45
+ X["NBR"] = (X["B12"]-X["B8A"])/(X["B12"]+X["B8A"])
46
+
47
+ X["NBRplus"] = (X["B12"]-X["B8A"]-X["B03"]-X["B02"])/(X["B12"]+X["B8A"]+X["B03"]+X["B02"])
48
+
49
+ X["NDWI2"] = (X["B02"]-X["B08"])/(X["B02"]+X["B08"])
50
+
51
+ X["NDWI2"] = (X["B01"]-X["B08"])/(X["B01"]+X["B08"])
52
+
53
+
54
+ X["S2REP"] = 705 + 35 * ((((X["B07"] + X["B04"])/2) - X["B05"])/(X["B06"] - X["B05"]))
55
+ #S2REP = 705 + 35 * ((((X[:, 6, :] + X[:, 3, :])/2) - X[:, 4, :])/(X[:, 5, :] - X[:, 4, :]))
56
+
57
+ X["CCCI"] = ((X["B08"] - X["B05"]) / (X["B08"] + X["B05"])) / ((X["B08"] - X["B04"]) / (X["B08"] + X["B04"]))
58
+ #CCCI = ((X[:, 7, :] - X[:, 4, :]) / (X[:, 7, :] + X[:, 4, :])) / ((X[:, 7, :] - X[:, 3, :]) / (X[:, 7, :] + X[:, 3, :]))
59
+
60
+ X["MCARI"] = ((X["B05"] - X["B04"]) - 2 * (X["B05"] - X["B03"])) * (X["B05"] / X["B04"])
61
+ #MCARI = ((X[:, 4, :] - X[:, 3, :]) - 2 * (X[:, 4, :] - X[:, 2, :])) * (X[:, 4, :] / X[:, 3, :])
62
+
63
+ X["TCARI"] = 3 * ((X["B05"] - X["B04"]) - 0.2 * (X["B05"] - X["B03"]) * (X["B05"] / X["B04"]))
64
+ #TCARI = 3 * ((X[:, 4, :] - X[:, 3, :]) - 0.2 * (X[:, 4, :] - X[:, 2, :]) * (X[:, 4, :] / X[:, 3, :]))
65
+
66
+ X["PVI"] = (X["B08"] - 0.3 * X["B04"] - 0.5) / ((1 + 0.3 * 2) ** (1/2.0))
67
+ #PVI = (X[:, 7, :] - 0.3 * X[:, 3, :] - 0.5) / ((1 + 0.3 * 2) ** (1/2.0))
68
+
69
+ X["ndvi"] = (X["B08"] - X["B04"]) / (X["B08"] + X["B04"])
70
+ #ndvi = (X[:, 7, :] - X[:, 3, :]) / (X[:, 7, :] + X[:, 3, :])
71
+
72
+ X["evi"] = 2.5 * (X["B08"] - X["B04"]) / (X["B08"] + 6 * X["B04"] - 7.5 * X["B02"] + 1)
73
+ #evi = 2.5 * (X[:, 7, :] - X[:, 3, :]) / (X[:, 7, :] + 6 * X[:, 3, :] - 7.5 * X[:, 1, :] + 1)
74
+
75
+ X["savi"] = (X["B08"] - X["B04"]) / (X["B08"] + X["B04"] + 0.5)
76
+ #savi = (X[:, 7, :] - X[:, 3, :]) / (X[:, 7, :] + X[:, 3, :] + 0.5)
77
+ X["mndwi"] = (X["B03"] - X["B08"]) / (X["B03"] + X["B08"])
78
+ #mndwi = (X[:, 2, :] - X[:, 7, :]) / (X[:, 2, :] + X[:, 7, :])
79
+
80
+
81
+ X["ARVI"] = (X["B08"] - (2 * X["B04"]) + X["B02"]) / (X["B08"] + (2 * X["B04"]) + X["B02"])
82
+ #ARVI = (X[:, 7, :] - (2 * X[:, 3, :]) + X[:, 1, :]) / (X[:, 7, :] + (2 * X[:, 3, :]) + X[:, 1, :])
83
+
84
+ X["SIPI"] = (X["B08"] - X["B02"]) / (X["B08"] - X["B04"])
85
+ #SIPI = (X[:, 7, :] - X[:, 1, :]) / (X[:, 7, :] - X[:, 3, :])
86
+
87
+ X["RENDVI"] = (X["B06"] - X["B05"]) / (X["B06"] + X["B05"])
88
+ #RENDVI = (X[:, 5, :] - X[:, 4, :]) / (X[:, 5, :] + X[:, 4, :])
89
+
90
+ X["MRESR"] = (X["B06"] - X["B01"]) / (X["B05"] - X["B01"])
91
+ #MRESR = (X[:, 5, :] - X[:, 0, :]) / (X[:, 4, :] - X[:, 0, :])
92
+
93
+ # CANOLA
94
+ X["RYI"] = X["B03"] / X["B02"]
95
+ #RYI = X[:, 2, :] / X[:, 1, :]
96
+
97
+ X["NDYI"] = (X["B03"] - X["B02"]) / (X["B03"] + X["B02"])
98
+ #NDYI = (X[:, 2, :] - X[:, 1, :]) / (X[:, 2, :] + X[:, 1, :])
99
+
100
+ X["DYI"] = X["B03"] - X["B02"]
101
+ #DYI = X[:, 2, :] - X[:, 1, :]
102
+
103
+ X["ACI"] = X["B08"] * (X["B04"] + X["B03"])
104
+ #ACI = X[:, 7, :] * (X[:, 3, :] + X[:, 2, :])
105
+
106
+ # WEED
107
+ X["CVI"] = (X["B08"] / X["B03"]) * (X["B04"] / X["B03"])
108
+ #CVI = (X[:, 7, :] / X[:, 2, :]) * (X[:, 3, :] / X[:, 2, :])
109
+
110
+ X["AVI"] = (X["B08"] * (1 - X["B04"]) * (X["B08"] - X["B04"]))
111
+ #AVI = (X[:, 7, :] * (1 - X[:, 3, :]) * (X[:, 7, :] - X[:, 3, :]))
112
+
113
+ X["SI"] = ((1 - X["B02"]) * (1 - X["B03"]) * (1 - X["B04"]))
114
+ #SI= ((1 - X[:, 1, :]) * (1 - X[:, 2, :]) * (1 - X[:, 3, :]))
115
+
116
+ X["BSI"] = ((X["B11"] + X["B04"]) - (X["B08"] + X["B02"])) / ((X["B11"] + X["B04"]) + (X["B08"] + X["B02"]))
117
+ #BSI= ((X[:, 10, :] + X[:, 3, :]) - (X[:, 7, :] + X[:, 1, :])) / ((X[:, 10, :] + X[:, 3, :]) + (X[:, 7, :] + X[:, 1, :]))
118
+
119
+ # WINE GRAPES
120
+ X["MTCI"] = (X["B06"] - X["B05"])/(X["B05"] - X["B04"])
121
+ #MTCI = (X[:, 5, :] - X[:, 4, :])/(X[:, 4, :] - X[:, 3, :])
122
+
123
+ X["NPCRI"] = (X["B04"] - X["B02"]) / (X["B04"] + X["B02"])
124
+ #NPCRI = (X[:, 3, :] - X[:, 1, :]) / (X[:, 3, :] + X[:, 1, :])
125
+
126
+
127
+
128
+ # ROOIBOS
129
+ X["BAI"] = 1/((0.1 - X["B04"]) ** 2 + (0.06 - X["B08"]) ** 2)
130
+ #BAI = 1/((0.1 - X[:, 3, :]) ** 2 + (0.06 - X[:, 7, :]) ** 2)
131
+
132
+ #MTVI2 = list(1.5*(1.2 * (i - j) - 2.5 * (k - j))* ((2 * i + 1)**2-(6 * i - 5 * k ** (1/2.0)) - 0.5)**(1/2.0) for i, j, k in zip(X[:, 7, :], X[:, 2, :], X[:, 3, :]))
133
+ MTVI2 = list(1.5*(1.2 * (i - j) - 2.5 * (k - j))* ((2 * i + 1)**2-(6 * i - 5 * k ** (1/2.0)) - 0.5)**(1/2.0) for i, j, k in zip(X["B08"], X["B03"], X["B04"]))
134
+ X["MTVI2"] = np.array(MTVI2)
135
+
136
+ X["NDSI"] = (X["B03"] - X["B11"]) / (X["B03"] + X["B11"])
137
+ #NDSI = (X[:, 2, :] - X[:, 10, :]) / (X[:, 2, :] + X[:, 10, :])
138
+
139
+
140
+
141
+ # DRYNESS / DROUGHT
142
+ X["NDMI"] = (X["B08"] - X["B11"])/(X["B08"] + X["B11"])
143
+ #NDMI = (X[:, 7, :] - X[:, 10, :])/(X[:, 7, :] + X[:, 10, :])
144
+
145
+ TNDVI = [(x)**(1/2.0) for x in ((X["B08"] - X["B04"]) / (X["B08"] + X["B04"]) + 0.5)]
146
+ X["TNDVI"] = np.array(TNDVI)
147
+
148
+
149
+ # GENERAL
150
+ X["TVI"] = (120 * (X["B06"] - X["B03"]) - 200 * (X["B04"] - X["B03"])) / 2
151
+ #TVI = (120 * (X[:, 5, :] - X[:, 2, :]) - 200 * (X[:, 3, :] - X[:, 2, :])) / 2
152
+ X["EXG"] = 2 * X["B03"] - X["B04"] - X["B02"]
153
+ #EXG = 2 * X[:, 2, :] - X[:, 3, :] - X[:, 1, :]
154
+ X["PSRI"] = (X["B04"] - X["B02"]) / X["B06"]
155
+ #PSRI = (X[:, 3, :] - X[:, 1, :]) / X[:, 5, :]
156
+
157
+ return X
processing.py ADDED
@@ -0,0 +1,151 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ from PIL import Image
3
+ import matplotlib.pyplot as plt
4
+ import numpy as np
5
+ import pandas as pd
6
+ from glob import glob
7
+ import os
8
+ import utm
9
+ import rasterio
10
+ from tqdm import tqdm
11
+ #from xml.etree import ElementTree as et
12
+ import xmltodict
13
+
14
+ ##
15
+ def cloud_masking(image,cld):
16
+ cloud_mask = cld > 30
17
+ band_mean = image.mean()
18
+ image[cloud_mask] = band_mean
19
+ return image
20
+
21
+ ##
22
+ def load_file(fp):
23
+ """Takes a PosixPath object or string filepath
24
+ and returns np array"""
25
+
26
+ return np.array(Image.open(fp.__str__()))
27
+
28
+ def paths (name):
29
+
30
+ fold_band_10 = glob(name+"/GRANULE/*/IMG_DATA/R10m")[0]
31
+ fold_band_20 = glob(name+"/GRANULE/*/IMG_DATA/R20m")[0]
32
+ fold_band_60 = glob(name+"/GRANULE/*/IMG_DATA/R60m")[0]
33
+ path = name+"/GRANULE/*/IMG_DATA/R10m"+"/*.jp2"
34
+ x = glob(path)
35
+ lists = x[0].split("/")[-1].split("_")
36
+ fixe = lists[0]+'_'+lists[1]
37
+
38
+ band_10 = ['B02', 'B03', 'B04','B08']
39
+ band_20 = ['B05', 'B06', 'B07','B8A','B11', 'B12']
40
+ band_60 = ['B01','B09']
41
+ images_name_10m = [fixe+"_"+band+"_10m.jp2" for band in band_10 ]
42
+ images_name_20m = [fixe+"_"+band+"_20m.jp2" for band in band_20 ]
43
+ images_name_60m = [fixe+"_"+band+"_60m.jp2" for band in band_60 ]
44
+ #
45
+ bandes_path_10 = [os.path.join(fold_band_10,img) for img in images_name_10m]
46
+ bandes_path_20 = [os.path.join(fold_band_20,img) for img in images_name_20m]
47
+ bandes_path_60 = [os.path.join(fold_band_60,img) for img in images_name_60m]
48
+ #
49
+ tile_path = name+"/INSPIRE.xml"
50
+ path_cld_20 = glob(name+"/GRANULE/*/QI_DATA/MSK_CLDPRB_20m.jp2")[0]
51
+ path_cld_60 = glob(name+"/GRANULE/*/QI_DATA/MSK_CLDPRB_60m.jp2")[0]
52
+
53
+ return bandes_path_10,bandes_path_20,bandes_path_60,tile_path,path_cld_20,path_cld_60
54
+
55
+ ##
56
+ def coords_to_pixels(ref, utm, m=10):
57
+ """ Convert UTM coordinates to pixel coordinates"""
58
+
59
+ x = int((utm[0] - ref[0])/m)
60
+ y = int((ref[1] - utm[1])/m)
61
+
62
+ return x, y
63
+
64
+ ##
65
+ def extract_sub_image(bandes_path,tile_path,area,resolution=10, d= 3, cld_path = None):
66
+
67
+ xml_file=open(tile_path,"r")
68
+ xml_string=xml_file.read()
69
+ python_dict=xmltodict.parse(xml_string)
70
+ tile_coordonnates = python_dict["gmd:MD_Metadata"]["gmd:identificationInfo"]["gmd:MD_DataIdentification"]["gmd:abstract"]["gco:CharacterString"].split()
71
+
72
+ # S2 tile coordonnates
73
+ lat,lon = float(tile_coordonnates[0]),float(tile_coordonnates[1])
74
+ tile_coordonnate = [lat,lon]
75
+
76
+ refx, refy, _, _ = utm.from_latlon(tile_coordonnate[0], tile_coordonnate[1])
77
+ ax,ay,_,_ = utm.from_latlon(area[1],area[0]) # lat,lon
78
+
79
+ ref = [refx, refy]
80
+ utm_cord = [ax,ay]
81
+ x,y = coords_to_pixels(ref,utm_cord,resolution)
82
+
83
+ images = []
84
+ # sub_image_extraction
85
+ for band_path in tqdm(bandes_path, total=len(bandes_path)):
86
+ image = load_file(band_path).astype(np.float32)
87
+ if resolution==60:
88
+ sub_image = image[y,x]
89
+ images.append(sub_image)
90
+
91
+ else:
92
+ sub_image = image[y-d:y+d,x-d:x+d]
93
+ images.append(sub_image)
94
+
95
+ images = np.array(images)
96
+
97
+
98
+ # verify if the study are is cloudy
99
+ if cld_path is not None:
100
+ cld_mask = load_file(cld_path).astype(np.float32)
101
+ cld = cld_mask[y-d:y+d,x-d:x+d]
102
+ # cloud removing
103
+ images = cloud_masking(images,cld)
104
+
105
+ if resolution==60:
106
+ return images
107
+ else:
108
+ return images.mean((1,2))
109
+
110
+
111
+ def ndvi(area, tile_name):
112
+ """
113
+ polygone: (lon,lat) format
114
+ tile_name: name of tile with the most low cloud coverage
115
+ """
116
+ #Extract tile coordonnates (lat,long)
117
+ tile_path = tile_name+"/INSPIRE.xml"
118
+ xml_file=open(tile_path,"r")
119
+ xml_string=xml_file.read()
120
+ python_dict=xmltodict.parse(xml_string)
121
+ tile_coordonnates = python_dict["gmd:MD_Metadata"]["gmd:identificationInfo"]["gmd:MD_DataIdentification"]["gmd:abstract"]["gco:CharacterString"].split()
122
+
123
+ # S2 tile coordonnates
124
+ lat,lon = float(tile_coordonnates[0]),float(tile_coordonnates[1])
125
+ tile_coordonnate = [lat,lon]
126
+
127
+ refx, refy, _, _ = utm.from_latlon(tile_coordonnate[0], tile_coordonnate[1])
128
+ ax,ay,_,_ = utm.from_latlon(area[1],area[0]) # lat,lon
129
+
130
+ ref = [refx, refy]
131
+ utm_cord = [ax,ay]
132
+ x,y = coords_to_pixels(ref,utm_cord)
133
+
134
+ # read images
135
+ path_4 = tile_name+"/GRANULE/*/IMG_DATA/R10m/*_B04_10m.jp2"
136
+ path_8 = tile_name+"/GRANULE/*/IMG_DATA/R10m/*_B08_10m.jp2"
137
+ red_object = rasterio.open(glob(path_4)[0])
138
+ nir_object = rasterio.open(glob(path_8)[0])
139
+ red = red_object.read()
140
+ nir = nir_object.read()
141
+ red,nir = red[0],nir[0]
142
+ # extract area and remove unsigne
143
+ sub_red = red[y-3:y+3,x-3:x+3].astype(np.float16)
144
+ sub_nir = nir[y-3:y+3,x-3:x+3].astype(np.float16)
145
+
146
+ # NDVI
147
+ ndvi_image = ((sub_nir - sub_red)/(sub_nir+sub_red))
148
+ ndvi_mean_value = ndvi_image.mean()
149
+
150
+ return ndvi_mean_value
151
+
requirements.txt ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ DateTime==5.2
2
+ geojson==3.0.1
3
+ matplotlib==3.8.0
4
+ numpy==1.26.0
5
+ pandas==2.1.1
6
+ Pillow==10.0.1
7
+ scipy==1.11.2
8
+ sentinelsat==1.2.1
9
+ tqdm==4.66.1
10
+ utm==0.7.0
11
+ xgboost==2.0.0
12
+ xmltodict==0.13.0
13
+ rasterio