from Num_sequence import NumSeq
import torch


device = torch.device("cuda" if torch.cuda.is_available() else "cpu")

max_len = 10
batch_size = 128
num_seq = NumSeq()

embedding_dim = 200

hidden_size = 128
num_layers = 1





