File size: 1,083 Bytes
a099612
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import logging
import os
import re
from collections.abc import Sequence
from datetime import datetime, timezone
from pathlib import Path

import torch
import yaml


def batchify(seq: Sequence, batch_size: int):
    for i in range(0, len(seq), batch_size):
        yield seq[i : i + batch_size]


def get_device():
    if torch.backends.mps.is_available():
        return "mps"  # mac GPU
    elif torch.cuda.is_available():
        return "cuda"
    else:
        return "cpu"


def init_logger():
    logging.basicConfig(
        level=logging.INFO,  # Set the logging level
        format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",  # Define the log format
    )


def get_timestamp():
    return datetime.now(timezone.utc).strftime("%Y_%m_%d-%H_%M_%S")


TIMESTAMP_PATTERN = re.compile(r"^\d{4}_\d{2}_\d{2}-\d{2}_\d{2}_\d{2}$")


def get_last_timestamp(path: Path):
    if os.path.exists(path):
        timestamps = [f for f in os.listdir(path) if TIMESTAMP_PATTERN.match(f)]
        if len(timestamps) > 0:
            return sorted(timestamps)[-1]

    return None