import torch
import torch.nn as nn
import tiktoken


class GPTModel(nn.Module):
    def __init__(self,cfg):
        super().__init__()
        self.token_emb = nn.Linear(
            cfg['emb_dim'],cfg['emb_dim']
        )
        self.pos_emb = nn.Linear(
            torch.arange(cfg['va'])
        )

