zpn commited on
Commit
2a16d3d
1 Parent(s): 083d764

Delete configuration_hf_nomic_bert.py

Browse files
Files changed (1) hide show
  1. configuration_hf_nomic_bert.py +0 -56
configuration_hf_nomic_bert.py DELETED
@@ -1,56 +0,0 @@
1
- from transformers import GPT2Config
2
-
3
-
4
- class NomicBertConfig(GPT2Config):
5
- model_type = "nomic_bert"
6
-
7
- def __init__(
8
- self,
9
- prenorm=False,
10
- parallel_block=False,
11
- parallel_block_tied_norm=False,
12
- rotary_emb_fraction=0.0,
13
- fused_dropout_add_ln=False,
14
- fused_bias_fc=False,
15
- use_flash_attn=False,
16
- use_xentropy=False,
17
- qkv_proj_bias=True,
18
- rotary_emb_base=1000,
19
- rotary_emb_scale_base=None,
20
- rotary_emb_interleaved=False,
21
- mlp_fc1_bias=True,
22
- mlp_fc2_bias=True,
23
- use_rms_norm=False,
24
- causal=False,
25
- type_vocab_size=2,
26
- dense_seq_output=True,
27
- pad_vocab_size_multiple=1,
28
- tie_word_embeddings=True,
29
- rotary_scaling_factor=1.0,
30
- max_trained_positions=2048,
31
- **kwargs,
32
- ):
33
- self.prenorm = prenorm
34
- self.parallel_block = parallel_block
35
- self.parallel_block_tied_norm = parallel_block_tied_norm
36
- self.rotary_emb_fraction = rotary_emb_fraction
37
- self.tie_word_embeddings = tie_word_embeddings
38
- self.fused_dropout_add_ln = fused_dropout_add_ln
39
- self.fused_bias_fc = fused_bias_fc
40
- self.use_flash_attn = use_flash_attn
41
- self.use_xentropy = use_xentropy
42
- self.qkv_proj_bias = qkv_proj_bias
43
- self.rotary_emb_base = rotary_emb_base
44
- self.rotary_emb_scale_base = rotary_emb_scale_base
45
- self.rotary_emb_interleaved = rotary_emb_interleaved
46
- self.mlp_fc1_bias = mlp_fc1_bias
47
- self.mlp_fc2_bias = mlp_fc2_bias
48
- self.use_rms_norm = use_rms_norm
49
- self.causal = causal
50
- self.type_vocab_size = type_vocab_size
51
- self.dense_seq_output = dense_seq_output
52
- self.pad_vocab_size_multiple = pad_vocab_size_multiple
53
- self.rotary_scaling_factor = rotary_scaling_factor
54
- self.max_trained_positions = max_trained_positions
55
-
56
- super().__init__(**kwargs)