| import os |
| import re |
| import sys |
| import datasets |
| import pandas as pd |
| from huggingface_hub import HfFileSystem |
| from typing import List |
|
|
| logger = datasets.logging.get_logger(name=__name__) |
| fs = HfFileSystem() |
|
|
| _CITATION = """ |
| """ |
|
|
| _DESCRIPTION = """\ |
| This dataset contain file about datetime date. |
| It's created with purpose is practice loading dataset from hugging face hub.""" |
|
|
| _HOMEPAGE = """\ |
| https://github.com/minhnv4099 |
| """ |
|
|
| _REPO = "datasets/nguyenminh4099/date-data" |
| _BRANCH = "main" |
| _REPO_BRANCH = f"{_REPO}@{_BRANCH}" |
|
|
| _REPO_URL = f"https://huggingface.co/{_REPO}/resolve/{_BRANCH}" |
| _URLS = { |
| 'zipfile': os.path.join(_REPO_URL, "data", "{}.zip"), |
| 'metadata': _REPO_URL + "/metadata.parquet", |
| } |
|
|
| _CONFIGS = ['all'] |
| _CONFIGS.extend( |
| os.path.basename(file)[:-4] |
| for file in fs.listdir(_REPO_BRANCH + "/data/", detail=False) |
| if file.endswith('.zip') |
| ) |
|
|
|
|
| |
| class DateDataConfig(datasets.BuilderConfig): |
| def __init__( |
| self, |
| name: str, |
| **kwargs, |
| ): |
| super(DateDataConfig, self).__init__( |
| name=name, |
| version=datasets.Version("1.0.0"), |
| ) |
| |
| |
| |
| |
| |
|
|
|
|
| |
| class DateData(datasets.GeneratorBasedBuilder): |
| logger.info('call dataset builder') |
|
|
| BUILDER_CONFIGS = [ |
| DateDataConfig( |
| name=name, |
| |
| |
| ) |
| for name in _CONFIGS |
| ] |
| DEFAULT_CONFIG_NAME = 'all' |
|
|
| def _info(self) -> datasets.DatasetInfo: |
| features = datasets.Features({ |
| "id": datasets.Value('string'), |
| "dow": datasets.Value('string'), |
| "month": datasets.Value('string'), |
| "dom": datasets.Value('string'), |
| "hour": datasets.Value('string'), |
| "min": datasets.Value('string'), |
| "second": datasets.Value('string'), |
| "timezone": datasets.Value('string'), |
| "year": datasets.Value('string'), |
| "file_path": datasets.Value('string'), |
| }) |
| print(self.config) |
| return datasets.DatasetInfo( |
| features=features, |
| description=_DESCRIPTION, |
| citation=_CITATION, |
| homepage=_HOMEPAGE, |
| ) |
|
|
| def _split_generators( |
| self, |
| dl_manager: datasets.DownloadManager, |
| ) -> List[datasets.SplitGenerator]: |
| logger.info("Call _split_generators") |
| |
| configs = _CONFIGS[1:5] if self.config.name == 'all' else [self.config.name] |
| data_files = { |
| config : _URLS['zipfile'].format(config) |
| for config in configs |
| } |
| data_dict = dl_manager.download_and_extract(data_files) |
| print(data_dict) |
|
|
| return [ |
| datasets.SplitGenerator( |
| name=datasets.Split.TRAIN, |
| gen_kwargs={ |
| "metadata": _URLS['metadata'], |
| "data_dict": data_dict, |
| } |
| ) |
| ] |
|
|
| def _generate_examples( |
| self, |
| metadata: str, |
| data_dict: dict, |
| ) -> dict: |
| logger.info("Call _generate_examples") |
| infos = datasets.load_dataset( |
| "parquet", |
| data_files=[metadata], |
| split='train', |
| ) |
| metadata_df = infos.to_pandas() |
| data_df = pd.DataFrame( |
| { |
| "shard" : list(data_dict.keys()), |
| "data_dir" : list(data_dict.values()), |
| }, |
| columns=['shard', 'data_dir'], |
| index=range(len(data_dict)) |
| ) |
|
|
| metadata_df = metadata_df.merge( |
| right=data_df, |
| how='right', |
| left_on='shard', |
| right_on='shard', |
| sort=True, |
| ) |
|
|
| for i, sample in enumerate(metadata_df.itertuples()): |
| file_name = os.path.join( |
| sample.data_dir, sample.id + ".txt" |
| ) |
| example = self._read_txt(file_name=file_name) |
| example['id'] = sample.id |
| example['file_path'] = file_name |
| |
| yield i, example |
|
|
| def _read_txt( |
| self, |
| file_name: str, |
| ) -> dict: |
| with open(file=file_name, mode='r') as f: |
| return self._extract_datetime(f.read()) |
| |
| def _extract_datetime( |
| self, |
| datetime_string: str, |
| ) -> dict: |
| datetime_string = datetime_string.strip("./ ") |
| components = re.split(pattern=r'[\s\:]+', string=datetime_string) |
|
|
| return { |
| "dow": components[0], |
| "month": components[1], |
| "dom": components[2], |
| "hour": components[3], |
| "min": components[4], |
| "second": components[5], |
| "timezone": components[6], |
| "year": components[7], |
| } |
| DateData() |