File size: 374 Bytes
6c48757
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
PATCH_LENGTH = 128  # Patch Length
PATCH_SIZE = 32  # Patch Size
PATCH_NUM_LAYERS = 9  # Number of layers in the encoder
CHAR_NUM_LAYERS = 3  # Number of layers in the decoder
# Batch size for patch during training, 0 for full context
PATCH_SAMPLING_BATCH_SIZE = 0
# Whether to share weights between the encoder and decoder
SHARE_WEIGHTS = False
TEMP_DIR = "./tmp"