import torch
from torch import nn
from torch.nn import functional as F
import time
import model

vocab = list(" AaBbCcDdEeFfGgHhIiJjKkLlMmNnOoPpQqRrSsTtUuVvWwXxYyZz,.\n\t?!;:'\"()0123456789+-*/=<>[]{}\|`~@#$%^&_")
class config:
    vocab_size: len(vocab)
    context_window: 16

stoi_map = {ch: i for i, ch in enumerate(vocab)}
def encode(s):
    return [stoi_map[ch] for ch in s]
def decode(l):
    return ''.join([vocab[i] for i in l])


model = model.Model(config)
print("Model parameters:", sum([m.numel() for m in model.parameters()]))

def train(model: nn.Module):
    optimizer = torch.optim.AdamW(model.parameters(), lr=1e-3)
    