# coding=utf-8 # Copyright 2020 The HuggingFace Datasets Authors and Open Climate Fix. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This dataset consists of HRV channel imagery from the EUMETSAT SEVIRI RSS service covering the UK from 2020-2021""" import pandas as pd import s3fs import xarray import datasets _CITATION = """\ @InProceedings{noaa::goes-mrms, title = {EUMETSAT SEVIRI RSS UK HRV}, author={EUMETSAT, with preparation by Open Climate Fix }, year={2022} } """ _DESCRIPTION = """\ """ _HOMEPAGE = "https://console.cloud.google.com/marketplace/product/bigquery-public-data/eumetsat-seviri-rss-hrv-uk?project=tactile-acrobat-249716" _LICENSE = "Cite EUMETSAT as the data source. This data is redistributed with permission from EUMETSAT under the terms of the EUMETSAT Data Policy for SEVIRI data with a latency of >3 hours . This redistributed dataset is released under the CC BY 4.0 open data license & is provided \"AS IS\" without any warranty, express or implied, from Google. Google disclaims all liability for any damages, direct or indirect, resulting from the use of the dataset." _URL = "gs://public-datasets-eumetsat-solar-forecasting/satellite/EUMETSAT/SEVIRI_RSS/v3/eumetsat_seviri_hrv_uk.zarr" class GoesMrmsHrrrataset(datasets.GeneratorBasedBuilder): """This dataset consists of the HRV channel from the EUMETSAT SEVIRI RSS service covering the UK from 2017 to 2021.""" VERSION = datasets.Version("1.2.0") BUILDER_CONFIGS = [ datasets.BuilderConfig(name="metnet", version=VERSION, description="This is a close approximation of the dataset used for MetNet"), datasets.BuilderConfig(name="metnet2", version=VERSION, description="This is a close approximation of the dataset used for MetNet-2"), datasets.BuilderConfig(name="2017_2021", version=VERSION, description="This is an expanded dataset covering 2017 to 2021"), datasets.BuilderConfig(name="2017_2021_metnet_goes", version=VERSION, description="This is an expanded dataset covering 2017 to 2021 at native temporal resolution of the inputs and two targets of satellite and MRMS data for 8 hour forecasts"), datasets.BuilderConfig(name="2017_2021_metnet2_goes", version=VERSION, description="This is an expanded dataset covering 2017 to 2021 at native temporal resolution of the inputs and two targets of satellite and MRMS data for 12 hour forecasts"), datasets.BuilderConfig(name="metnet_hrrr", version=VERSION, description="This is a close approximation of the dataset used for MetNet with HRRR data as well"), datasets.BuilderConfig(name="metnet2_hrrr", version=VERSION, description="This is a close approximation of the dataset used for MetNet-2 with HRRR data as well"), datasets.BuilderConfig(name="2017_2021_hrrr", version=VERSION, description="This is an expanded dataset covering 2017 to 2021 with HRRR data as well"), datasets.BuilderConfig(name="2017_2021_metnet_hrrr_goes", version=VERSION, description="This is an expanded dataset covering 2017 to 2021 at native temporal resolution of the inputs and two targets of satellite and MRMS data for 8 hour forecasts, with input HRRR data as well"), datasets.BuilderConfig(name="2017_2021_metnet2_hrrr_goes", version=VERSION, description="This is an expanded dataset covering 2017 to 2021 at native temporal resolution of the inputs and two targets of satellite and MRMS data for 12 hour forecasts, with input HRRR data as well"), ] DEFAULT_CONFIG_NAME = "2017_2021" # It's not mandatory to have a default configuration. Just use one if it make sense. def _info(self): features = { "timestamp": datasets.Value("time64[ns]"), "x_coordinates": datasets.Sequence(datasets.Value("float64")), "y_coordinates": datasets.Sequence(datasets.Value("float64")), } if self.config.name == "metnet": features.update( { "image": datasets.Array4D(shape=(18,16,512,512), dtype="float32"), "mrms": datasets.Array4D(shape=(18,1,512,512), dtype="int16"), "target": datasets.Array4D(shape=(240,1,64,64), dtype="int16"), "x_coordinates": datasets.Sequence(datasets.Value("float64")), } ) elif self.config.name == "metnet-2": features.update( { "image": datasets.Array4D(shape=(18,16,2048,2048), dtype="float32"), "mrms": datasets.Array4D(shape=(18,1,2048,2048), dtype="int16"), "target": datasets.Array4D(shape=(360,1,512,512), dtype="int16"), } ) elif self.config.name == "2017_2021_metnet2_goes": features.update( { "image": datasets.Array4D(shape=(18,16,2048,2048), dtype="float32"), "mrms": datasets.Array4D(shape=(18,1,2048,2048), dtype="int16"), "target_mrms": datasets.Array4D(shape=(360,1,512,512), dtype="int16"), "target_image": datasets.Array4D(shape=(144,16,512,512), dtype="float32"), } ) elif self.config.name == "2017_2021_metnet_goes": features.update( { "image": datasets.Array4D(shape=(18,16,512,512), dtype="float32"), "mrms": datasets.Array4D(shape=(18,1,512,512), dtype="int16"), "target_mrms": datasets.Array4D(shape=(240,1,64,64), dtype="int16"), "target_image": datasets.Array4D(shape=(96,16,64,64), dtype="float32"), } ) else: features.update( { "image": datasets.Array4D(shape=(18,16,512,512), dtype="float32"), "mrms": datasets.Array4D(shape=(45,1,512,512), dtype="int16"), "target": datasets.Array4D(shape=(240,1,64,64), dtype="int16"), } ) if "hrrr" in self.config.name: # HRRR is only once per hour and on a 3kmx3km grid, not 1kmish like the others? (MRMS is 1km, GOES is 2km, HRRR is 3km) # Paper not clear, but stacks radar and HRRR together, so assuming just makes it 1kmx1km data by interpolation # Same with GOES-16 for that matter, the data it has is 2km data, not 1km like in the paper features.update({"hrrr": datasets.Array4D(shape=(3,612,2048,2048), dtype="float32")}) features = datasets.Features(features) return datasets.DatasetInfo( # This is the description that will appear on the datasets page. description=_DESCRIPTION, # This defines the different columns of the dataset and their types features=features, # Here we define them above because they are different between the two configurations # If there's a common (input, target) tuple from the features, uncomment supervised_keys line below and # specify them. They'll be used if as_supervised=True in builder.as_dataset. # supervised_keys=("sentence", "label"), # Homepage of the dataset for documentation homepage=_HOMEPAGE, # License for the dataset if available license=_LICENSE, # Citation for the dataset citation=_CITATION, ) def _split_generators(self, dl_manager): #data_dir = dl_manager.download(_URL) return [ datasets.SplitGenerator( name=datasets.Split.TRAIN, # These kwargs will be passed to _generate_examples gen_kwargs={ "filepath": _URL, "time_range": slice("2020-01-01", "2020-12-31"), "split": "train", }, ), datasets.SplitGenerator( name=datasets.Split.TEST, # These kwargs will be passed to _generate_examples gen_kwargs={ "filepath": _URL, "time_range": slice("2021-01-01", "2021-12-31"), "split": "test" }, ), ] # method parameters are unpacked from `gen_kwargs` as given in `_split_generators` def _generate_examples(self, filepath, time_range, split): sat_data = xarray.open_dataset(filepath, engine="zarr", chunks='auto') sat_data = sat_data.sel(time=time_range) if self.config.name == "uk_video": last_chunk_time = sat_data.time.values[0] - pd.Timedelta("3 hours") for key, timestamp in enumerate(sat_data.time.values): if timestamp >= last_chunk_time + pd.Timedelta("3 hours"): # Get current time and go backwards an hour and forward 2 hours start_time = timestamp - pd.Timedelta("55 minutes") end_time = timestamp + pd.Timedelta("2 hours") entry = sat_data.sel(time=slice(start_time, end_time)) # Only want to keep ones that have the correct length if len(entry.time.values) == 36: last_chunk_time = timestamp yield key, { "timestamps": entry.time.values, "x_coordinates": entry.x.values, "y_coordinates": entry.y.values, "video": entry.values, } else: for key, timestamp in enumerate(sat_data.time.values): if self.config.name == "uk": entry = sat_data.sel(time=timestamp) yield key, { "timestamp": entry.time.values, "x_coordinates": entry.x.values, "y_coordinates": entry.y.values, "image": entry.values, } elif self.config.name == "uk_osgb": entry = sat_data.sel(time=timestamp) yield key, { "timestamp": entry.time.values, "x_coordinates": entry.x_osgb.values, "y_coordinates": entry.y_osgb.values, "image": entry.values, }