PascalNotin commited on
Commit
ca31f7c
1 Parent(s): 6d890b9

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +46 -0
config.json ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "MSA_end": null,
3
+ "MSA_filename": null,
4
+ "MSA_start": null,
5
+ "MSA_weight_file_name": null,
6
+ "_name_or_path": "Tranception_Large",
7
+ "activation_function": "squared_relu",
8
+ "architectures": [
9
+ "TranceptionLMHeadModel"
10
+ ],
11
+ "attention_mode": "tranception",
12
+ "attn_pdrop": 0.1,
13
+ "bos_token_id": 1,
14
+ "clustal_omega_location": null,
15
+ "embd_pdrop": 0.1,
16
+ "eos_token_id": 2,
17
+ "full_protein_length": null,
18
+ "initializer_range": 0.02,
19
+ "layer_norm_epsilon": 1e-05,
20
+ "local_batch_size": 1,
21
+ "model_type": "gpt2",
22
+ "n_ctx": 1024,
23
+ "n_embd": 1280,
24
+ "n_head": 20,
25
+ "n_inner": 5120,
26
+ "n_layer": 36,
27
+ "n_positions": 1024,
28
+ "position_embedding": "grouped_alibi",
29
+ "reorder_and_upcast_attn": false,
30
+ "resid_pdrop": 0.1,
31
+ "retrieval_aggregation_mode": null,
32
+ "retrieval_inference_weight": 0.6,
33
+ "scale_attn_by_inverse_layer_idx": false,
34
+ "scale_attn_weights": true,
35
+ "scoring_window": "optimal",
36
+ "summary_activation": null,
37
+ "summary_first_dropout": 0.1,
38
+ "summary_proj_to_labels": true,
39
+ "summary_type": "cls_index",
40
+ "summary_use_proj": true,
41
+ "tokenizer": null,
42
+ "torch_dtype": "float32",
43
+ "transformers_version": "4.17.0",
44
+ "use_cache": true,
45
+ "vocab_size": 25
46
+ }