|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from itertools import product |
|
import numpy as np |
|
import xarray as xr |
|
import netCDF4 |
|
import datasets |
|
from pathlib import Path |
|
|
|
|
|
_CITATION = """\ |
|
@ARTICLE{ |
|
9749916, |
|
author={Sykas, Dimitrios and Sdraka, Maria and Zografakis, Dimitrios and Papoutsis, Ioannis}, |
|
journal={IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing}, |
|
title={A Sentinel-2 multi-year, multi-country benchmark dataset for crop classification and segmentation with deep learning}, |
|
year={2022}, |
|
doi={10.1109/JSTARS.2022.3164771} |
|
} |
|
""" |
|
|
|
_DESCRIPTION = """\ |
|
Sen4AgriNet is a Sentinel-2 based time series multi country benchmark dataset, tailored for |
|
agricultural monitoring applications with Machine and Deep Learning. It is annotated from |
|
farmer declarations collected via the Land Parcel Identification System (LPIS) for harmonizing |
|
country wide labels. These declarations have only recently been made available as open data, |
|
allowing for the first time the labelling of satellite imagery from ground truth data. |
|
We proceed to propose and standardise a new crop type taxonomy across Europe that address |
|
Common Agriculture Policy (CAP) needs, based on the Food and Agriculture Organization (FAO) |
|
Indicative Crop Classification scheme. Sen4AgriNet is the only multi-country, multi-year dataset |
|
that includes all spectral information. It is constructed to cover the period 2016-2020 for |
|
Catalonia and France, while it can be extended to include additional countries. |
|
""" |
|
|
|
_HOMEPAGE = "https://www.sen4agrinet.space.noa.gr/" |
|
|
|
_LICENSE = "MIT License" |
|
|
|
_URL = 'https://huggingface.co/datasets/paren8esis/S4A/resolve/main/data' |
|
|
|
|
|
CAT_TILES = ['31TBF', '31TCF', '31TCG', '31TDF', '31TDG'] |
|
|
|
|
|
FR_TILES = ['31TCJ', '31TDK', '31TCL', '31TDM', '31UCP', '31UDR'] |
|
|
|
|
|
PATCH_IDX = { |
|
'2019': { |
|
'31TBF': [29, 29], |
|
'31TCF': [29, 27], |
|
'31TCG': [29, 29], |
|
'31TDF': [15, 9], |
|
'31TDG': [29, 29], |
|
'31TCJ': [29, 29], |
|
'31TDK': [29, 29], |
|
'31TCL': [29, 29], |
|
'31TDM': [29, 29], |
|
'31UCP': [29, 29], |
|
'31UDR': [29, 29] |
|
}, |
|
'2020': { |
|
'31TBF': [29, 29], |
|
'31TCF': [29, 27], |
|
'31TCG': [29, 29], |
|
'31TDF': [15, 9], |
|
'31TDG': [29, 29] |
|
} |
|
} |
|
|
|
|
|
class S4A(datasets.GeneratorBasedBuilder): |
|
VERSION = datasets.Version("0.0.1") |
|
|
|
BUILDER_CONFIGS = [ |
|
datasets.BuilderConfig(name="complete", version=VERSION, description="All Sen4AgriNet data."), |
|
datasets.BuilderConfig(name="cat_2019", version=VERSION, description="Sen4AgriNet data for Catalonia 2019."), |
|
datasets.BuilderConfig(name="cat_2020", version=VERSION, description="Sen4AgriNet data for Catalonia 2020."), |
|
datasets.BuilderConfig(name="fr_2019", version=VERSION, description="Sen4AgriNet data for France 2019."), |
|
] |
|
|
|
DEFAULT_CONFIG_NAME = "complete" |
|
|
|
def _info(self): |
|
features = datasets.Features( |
|
{ |
|
"patch_full_name": datasets.Value("string"), |
|
"patch_year": datasets.Value("string"), |
|
"patch_name": datasets.Value("string"), |
|
"patch_country_code": datasets.Value("string"), |
|
"patch_tile": datasets.Value("string"), |
|
"B01": datasets.Array3D(shape=(None, 61, 61), dtype="uint16"), |
|
"B02": datasets.Array3D(shape=(None, 366, 366), dtype="uint16"), |
|
"B03": datasets.Array3D(shape=(None, 366, 366), dtype="uint16"), |
|
"B04": datasets.Array3D(shape=(None, 366, 366), dtype="uint16"), |
|
"B05": datasets.Array3D(shape=(None, 183, 183), dtype="uint16"), |
|
"B06": datasets.Array3D(shape=(None, 183, 183), dtype="uint16"), |
|
"B07": datasets.Array3D(shape=(None, 183, 183), dtype="uint16"), |
|
"B08": datasets.Array3D(shape=(None, 366, 366), dtype="uint16"), |
|
"B09": datasets.Array3D(shape=(None, 61, 61), dtype="uint16"), |
|
"B10": datasets.Array3D(shape=(None, 61, 61), dtype="uint16"), |
|
"B11": datasets.Array3D(shape=(None, 183, 183), dtype="uint16"), |
|
"B12": datasets.Array3D(shape=(None, 183, 183), dtype="uint16"), |
|
"B8A": datasets.Array3D(shape=(None, 183, 183), dtype="uint16"), |
|
"labels": datasets.Array2D(shape=(366, 366), dtype="uint32"), |
|
"parcels": datasets.Array2D(shape=(366, 366), dtype="uint32"), |
|
"timestamp": datasets.Sequence(datasets.Value("timestamp[ns]")) |
|
} |
|
) |
|
|
|
return datasets.DatasetInfo( |
|
|
|
description=_DESCRIPTION, |
|
features=features, |
|
homepage=_HOMEPAGE, |
|
license=_LICENSE, |
|
citation=_CITATION, |
|
) |
|
|
|
|
|
def _split_generators(self, dl_manager): |
|
root_paths = [] |
|
if self.config.name == "complete": |
|
for year, tile in list(product(['2019'], FR_TILES)) + list(product(['2019', '2020'], CAT_TILES)): |
|
x, y = PATCH_IDX[year][tile] |
|
for x_i in range(x + 1): |
|
for y_i in range(y + 1): |
|
try: |
|
downloaded_paths = dl_manager.download(_URL + f'/{year}' + f'/{tile}' + f'/{year}_{tile}_patch_{str(x_i).zfill(2)}_{str(y_i).zfill(2)}.nc') |
|
root_paths.append(downloaded_paths) |
|
except FileNotFoundError as e: |
|
continue |
|
|
|
elif self.config.name == 'cat_2019': |
|
year = '2019' |
|
for tile in CAT_TILES: |
|
x, y = PATCH_IDX[year][tile] |
|
for x_i in range(x + 1): |
|
for y_i in range(y + 1): |
|
try: |
|
downloaded_paths = dl_manager.download(_URL + f'/{year}' + f'/{tile}' + f'/{year}_{tile}_patch_{str(x_i).zfill(2)}_{str(y_i).zfill(2)}.nc') |
|
root_paths.append(downloaded_paths) |
|
except FileNotFoundError as e: |
|
continue |
|
|
|
elif self.config.name == 'cat_2020': |
|
year = '2020' |
|
for tile in CAT_TILES: |
|
x, y = PATCH_IDX[year][tile] |
|
for x_i in range(x + 1): |
|
for y_i in range(y + 1): |
|
try: |
|
downloaded_paths = dl_manager.download(_URL + f'/{year}' + f'/{tile}' + f'/{year}_{tile}_patch_{str(x_i).zfill(2)}_{str(y_i).zfill(2)}.nc') |
|
root_paths.append(downloaded_paths) |
|
except FileNotFoundError as e: |
|
continue |
|
|
|
elif self.config.name == 'fr_2019': |
|
year = '2019' |
|
for tile in FR_TILES: |
|
x, y = PATCH_IDX[year][tile] |
|
for x_i in range(x + 1): |
|
for y_i in range(y + 1): |
|
try: |
|
downloaded_paths = dl_manager.download(_URL + f'/{year}' + f'/{tile}' + f'/{year}_{tile}_patch_{str(x_i).zfill(2)}_{str(y_i).zfill(2)}.nc') |
|
root_paths.append(downloaded_paths) |
|
except FileNotFoundError as e: |
|
continue |
|
|
|
return [ |
|
datasets.SplitGenerator( |
|
name='self.config.name', |
|
|
|
gen_kwargs={ |
|
"root_paths": root_paths, |
|
}, |
|
), |
|
] |
|
|
|
|
|
|
|
def _generate_examples(self, root_paths): |
|
for file in root_paths: |
|
netcdf = netCDF4.Dataset(file) |
|
|
|
res = { |
|
"patch_full_name": netcdf.patch_full_name, |
|
"patch_year": netcdf.patch_year, |
|
"patch_name": netcdf.patch_name, |
|
"patch_country_code": netcdf.patch_country_code, |
|
"patch_tile": netcdf.patch_tile |
|
} |
|
|
|
time_recorded = False |
|
|
|
for variable in ['B01', 'B02', 'B03', 'B04', 'B05', 'B06', 'B07', 'B08', 'B09', 'B10', 'B11', 'B12', 'B8A', 'labels', 'parcels']: |
|
v = xr.open_dataset(xr.backends.NetCDF4DataStore(netcdf[variable])) |
|
if not time_recorded: |
|
res['timestamp'] = (v.time.values.astype(np.int64) // 10 ** 9).tolist() |
|
time_recorded = True |
|
|
|
res[variable] = getattr(v, variable).values |
|
|
|
key = res['patch_full_name'] |
|
yield key, res |
|
|