tliu commited on
Commit
d5734d6
1 Parent(s): 22c1e8d
config.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/cluster/home/tialiu/project_tianyu/projects/generative-coref/t0_3b",
3
+ "architectures": [
4
+ "T5Coref"
5
+ ],
6
+ "asp_activation": "relu",
7
+ "asp_dropout_rate": 0.3,
8
+ "asp_feature_emb_size": 20,
9
+ "asp_hidden_dim": 4096,
10
+ "asp_init_std": 0.02,
11
+ "d_ff": 5120,
12
+ "d_kv": 64,
13
+ "d_model": 2048,
14
+ "decoder_start_token_id": 0,
15
+ "dense_act_fn": "gelu_new",
16
+ "dropout_rate": 0.1,
17
+ "eos_token_id": 1,
18
+ "feed_forward_proj": "gated-gelu",
19
+ "gradient_checkpointing": false,
20
+ "initializer_factor": 1.0,
21
+ "is_encoder_decoder": true,
22
+ "is_gated_act": true,
23
+ "layer_norm_epsilon": 1e-06,
24
+ "mention_end_id": 32103,
25
+ "mention_start_id": 32102,
26
+ "model_type": "t5",
27
+ "num_decoder_layers": 24,
28
+ "num_heads": 32,
29
+ "num_layers": 24,
30
+ "output_past": true,
31
+ "pad_token_id": 0,
32
+ "pretrained_name_or_path": "/cluster/home/tialiu/tianyu/huggingface_models//T0_3B",
33
+ "relative_attention_max_distance": 128,
34
+ "relative_attention_num_buckets": 32,
35
+ "tie_word_embeddings": false,
36
+ "torch_dtype": "float32",
37
+ "transformers_version": "4.23.1",
38
+ "use_cache": true,
39
+ "vocab_size": 32104
40
+ }
model_Oct21_22-13-52_108000.bin → pytorch_model-00001-of-00002.bin RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1d8663bf64401042f906d5a95ca2a9144a678a27cfcc51f6291f55773739fd77
3
- size 11304119533
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:898dd48f7bdd77ddbf044868b5bc2c095885ea7c6035ea245dae9aca98e49f74
3
+ size 9449532799
pytorch_model-00002-of-00002.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d1eab9e1c3faf434a269c2f3be713a937364abd37e87a5e683757ab95fcf79f
3
+ size 1854508669