|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
"""This dataset consists of HRV channel imagery from the EUMETSAT SEVIRI RSS service covering the UK from 2020-2021""" |
|
import pandas as pd |
|
import s3fs |
|
import xarray |
|
import datasets |
|
|
|
|
|
_CITATION = """\ |
|
@InProceedings{noaa::goes-mrms, |
|
title = {EUMETSAT SEVIRI RSS UK HRV}, |
|
author={EUMETSAT, with preparation by Open Climate Fix |
|
}, |
|
year={2022} |
|
} |
|
""" |
|
|
|
_DESCRIPTION = """\ |
|
|
|
""" |
|
|
|
_HOMEPAGE = "https://console.cloud.google.com/marketplace/product/bigquery-public-data/eumetsat-seviri-rss-hrv-uk?project=tactile-acrobat-249716" |
|
|
|
_LICENSE = "Cite EUMETSAT as the data source. This data is redistributed with permission from EUMETSAT under the terms of the EUMETSAT Data Policy for SEVIRI data with a latency of >3 hours . This redistributed dataset is released under the CC BY 4.0 open data license & is provided \"AS IS\" without any warranty, express or implied, from Google. Google disclaims all liability for any damages, direct or indirect, resulting from the use of the dataset." |
|
|
|
_URL = "gs://public-datasets-eumetsat-solar-forecasting/satellite/EUMETSAT/SEVIRI_RSS/v3/eumetsat_seviri_hrv_uk.zarr" |
|
|
|
|
|
class GoesMrmsHrrrataset(datasets.GeneratorBasedBuilder): |
|
"""This dataset consists of the HRV channel from the EUMETSAT SEVIRI RSS service covering the UK from 2017 to 2021.""" |
|
|
|
VERSION = datasets.Version("1.2.0") |
|
BUILDER_CONFIGS = [ |
|
datasets.BuilderConfig(name="metnet", version=VERSION, description="This is a close approximation of the dataset used for MetNet"), |
|
datasets.BuilderConfig(name="metnet2", version=VERSION, description="This is a close approximation of the dataset used for MetNet-2"), |
|
datasets.BuilderConfig(name="2017_2021", version=VERSION, description="This is an expanded dataset covering 2017 to 2021"), |
|
datasets.BuilderConfig(name="2017_2021_metnet_goes", version=VERSION, description="This is an expanded dataset covering 2017 to 2021 at native temporal resolution of the inputs and two targets of satellite and MRMS data for 8 hour forecasts"), |
|
datasets.BuilderConfig(name="2017_2021_metnet2_goes", version=VERSION, description="This is an expanded dataset covering 2017 to 2021 at native temporal resolution of the inputs and two targets of satellite and MRMS data for 12 hour forecasts"), |
|
datasets.BuilderConfig(name="metnet_hrrr", version=VERSION, description="This is a close approximation of the dataset used for MetNet with HRRR data as well"), |
|
datasets.BuilderConfig(name="metnet2_hrrr", version=VERSION, description="This is a close approximation of the dataset used for MetNet-2 with HRRR data as well"), |
|
datasets.BuilderConfig(name="2017_2021_hrrr", version=VERSION, description="This is an expanded dataset covering 2017 to 2021 with HRRR data as well"), |
|
datasets.BuilderConfig(name="2017_2021_metnet_hrrr_goes", version=VERSION, description="This is an expanded dataset covering 2017 to 2021 at native temporal resolution of the inputs and two targets of satellite and MRMS data for 8 hour forecasts, with input HRRR data as well"), |
|
datasets.BuilderConfig(name="2017_2021_metnet2_hrrr_goes", version=VERSION, description="This is an expanded dataset covering 2017 to 2021 at native temporal resolution of the inputs and two targets of satellite and MRMS data for 12 hour forecasts, with input HRRR data as well"), |
|
|
|
] |
|
|
|
DEFAULT_CONFIG_NAME = "2017_2021" |
|
|
|
def _info(self): |
|
features = { |
|
"timestamp": datasets.Value("time64[ns]"), |
|
"x_coordinates": datasets.Sequence(datasets.Value("float64")), |
|
"y_coordinates": datasets.Sequence(datasets.Value("float64")), |
|
} |
|
if self.config.name == "metnet": |
|
features.update( |
|
{ |
|
"image": datasets.Array4D(shape=(18,16,512,512), dtype="float32"), |
|
"mrms": datasets.Array4D(shape=(18,1,512,512), dtype="int16"), |
|
"target": datasets.Array4D(shape=(240,1,64,64), dtype="int16"), |
|
"x_coordinates": datasets.Sequence(datasets.Value("float64")), |
|
} |
|
) |
|
elif self.config.name == "metnet-2": |
|
features.update( |
|
{ |
|
"image": datasets.Array4D(shape=(18,16,2048,2048), dtype="float32"), |
|
"mrms": datasets.Array4D(shape=(18,1,2048,2048), dtype="int16"), |
|
"target": datasets.Array4D(shape=(360,1,512,512), dtype="int16"), |
|
} |
|
) |
|
elif self.config.name == "2017_2021_metnet2_goes": |
|
features.update( |
|
{ |
|
"image": datasets.Array4D(shape=(18,16,2048,2048), dtype="float32"), |
|
"mrms": datasets.Array4D(shape=(18,1,2048,2048), dtype="int16"), |
|
"target_mrms": datasets.Array4D(shape=(360,1,512,512), dtype="int16"), |
|
"target_image": datasets.Array4D(shape=(144,16,512,512), dtype="float32"), |
|
} |
|
) |
|
elif self.config.name == "2017_2021_metnet_goes": |
|
features.update( |
|
{ |
|
"image": datasets.Array4D(shape=(18,16,512,512), dtype="float32"), |
|
"mrms": datasets.Array4D(shape=(18,1,512,512), dtype="int16"), |
|
"target_mrms": datasets.Array4D(shape=(240,1,64,64), dtype="int16"), |
|
"target_image": datasets.Array4D(shape=(96,16,64,64), dtype="float32"), |
|
} |
|
) |
|
else: |
|
features.update( |
|
{ |
|
"image": datasets.Array4D(shape=(18,16,512,512), dtype="float32"), |
|
"mrms": datasets.Array4D(shape=(45,1,512,512), dtype="int16"), |
|
"target": datasets.Array4D(shape=(240,1,64,64), dtype="int16"), |
|
} |
|
) |
|
if "hrrr" in self.config.name: |
|
|
|
|
|
|
|
features.update({"hrrr": datasets.Array4D(shape=(3,612,2048,2048), dtype="float32")}) |
|
features = datasets.Features(features) |
|
return datasets.DatasetInfo( |
|
|
|
description=_DESCRIPTION, |
|
|
|
features=features, |
|
|
|
|
|
|
|
|
|
homepage=_HOMEPAGE, |
|
|
|
license=_LICENSE, |
|
|
|
citation=_CITATION, |
|
) |
|
|
|
def _split_generators(self, dl_manager): |
|
|
|
return [ |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TRAIN, |
|
|
|
gen_kwargs={ |
|
"filepath": _URL, |
|
"time_range": slice("2020-01-01", "2020-12-31"), |
|
"split": "train", |
|
}, |
|
), |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TEST, |
|
|
|
gen_kwargs={ |
|
"filepath": _URL, |
|
"time_range": slice("2021-01-01", "2021-12-31"), |
|
"split": "test" |
|
}, |
|
), |
|
] |
|
|
|
|
|
def _generate_examples(self, filepath, time_range, split): |
|
sat_data = xarray.open_dataset(filepath, engine="zarr", chunks='auto') |
|
sat_data = sat_data.sel(time=time_range) |
|
if self.config.name == "uk_video": |
|
last_chunk_time = sat_data.time.values[0] - pd.Timedelta("3 hours") |
|
for key, timestamp in enumerate(sat_data.time.values): |
|
if timestamp >= last_chunk_time + pd.Timedelta("3 hours"): |
|
|
|
start_time = timestamp - pd.Timedelta("55 minutes") |
|
end_time = timestamp + pd.Timedelta("2 hours") |
|
entry = sat_data.sel(time=slice(start_time, end_time)) |
|
|
|
if len(entry.time.values) == 36: |
|
last_chunk_time = timestamp |
|
yield key, { |
|
"timestamps": entry.time.values, |
|
"x_coordinates": entry.x.values, |
|
"y_coordinates": entry.y.values, |
|
"video": entry.values, |
|
} |
|
else: |
|
for key, timestamp in enumerate(sat_data.time.values): |
|
if self.config.name == "uk": |
|
entry = sat_data.sel(time=timestamp) |
|
yield key, { |
|
"timestamp": entry.time.values, |
|
"x_coordinates": entry.x.values, |
|
"y_coordinates": entry.y.values, |
|
"image": entry.values, |
|
} |
|
elif self.config.name == "uk_osgb": |
|
entry = sat_data.sel(time=timestamp) |
|
yield key, { |
|
"timestamp": entry.time.values, |
|
"x_coordinates": entry.x_osgb.values, |
|
"y_coordinates": entry.y_osgb.values, |
|
"image": entry.values, |
|
} |
|
|