import torch
import torch.nn as nn


def token_id_to_text(ids, tokenizer):
    return torch.tensor(
        [
            tokenizer.decode(token_id) for token_id in ids
        ]
    )


def text_to_token_id(text, tokenizer):
    return torch.tensor(
        [
            tokenizer.encode(word) for word in text
        ]
    )

def split_text(text):
    return text.split('')
