File size: 1,258 Bytes
0f28fb5
 
b0544cb
 
0f28fb5
b0544cb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0f28fb5
b0544cb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0f28fb5
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import torch

from transformers import PretrainedConfig


class FlashSTUConfig(PretrainedConfig):
    model_type = "FlashSTU"

    def __init__(
        self,
        bsz: int = 4,
        n_embd: int = 2304,
        n_heads: int = 9,
        n_layers: int = 7,
        seq_len: int = 8192,
        window_size: int = 1024,
        vocab_size: int = 200064,
        mlp_scale: int = 12,
        bias: bool = False,
        dropout: float = 0.0,
        num_eigh: int = 24,
        use_hankel_L: bool = False,
        use_flash_fft: bool = True,
        use_approx: bool = True,
        softcap: float = 50.0,
        torch_dtype: torch.dtype = torch.bfloat16,
        **kwargs,
    ):
        super().__init__(**kwargs)
        self.bsz = bsz
        self.n_embd = n_embd
        self.n_heads = n_heads
        self.n_layers = n_layers
        self.seq_len = seq_len
        self.window_size = window_size
        self.vocab_size = vocab_size
        self.mlp_scale = mlp_scale
        self.bias = bias
        self.dropout = dropout
        self.num_eigh = num_eigh
        self.use_hankel_L = use_hankel_L
        self.use_flash_fft = use_flash_fft
        self.use_approx = use_approx
        self.softcap = softcap
        self.torch_dtype = torch_dtype