import torch

class Configs():
    def __init__(self):
        super().__init__
        self.SEED = 666
        self.BATCH_SIZE = 32
        self.DROP_LAST = True
        self.SHUFFLE_FLAG = True
        self.SEQ_LEN = 96
        self.EPOCHES = 200
        self.LABEL_LEN = 48
        self.PRED_LEN = 24
        self.PATCHES = 4
        self.PATCH_LEN = int(self.SEQ_LEN/self.PATCHES)
        self.VARS = 1
        self.SIZE = [self.SEQ_LEN,self.LABEL_LEN,self.PRED_LEN]
        self.lr = 0.5
        self.TEMPERATURE = 0.6
        self.USE_COSIN_SIMILARITY = True
        self.INPUT_DIMS = 1
        self.HIDDEN_DIMS = 128
        # self.OUTPUT_DIMS = 1
        self.OUTPUT_DIMS = 32 # trans
        self.LAMUBDA = 0.3
        self.ENCODER_LAYERS = 2
        self.ENCODER_HEAD = 8
        self.SHARED_EMBDEDDING = True
        self.D_FF = 256
        self.__device__()

    def __device__(self):
        if torch.cuda.is_available():
                self.DEVICE = torch.device("cuda")
        else:
             self.DEVICE = torch.device('cpu')