Spacyzipa commited on
Commit
4a56b2b
1 Parent(s): 68730a0

Training in progress, epoch 0

Browse files
Files changed (3) hide show
  1. config.json +3 -3
  2. generation_config.json +2 -2
  3. model.safetensors +2 -2
config.json CHANGED
@@ -49,7 +49,7 @@
49
  "LABEL_1": 1
50
  },
51
  "length_penalty": 1.0,
52
- "max_length": 768,
53
  "max_position_embeddings": 1536,
54
  "min_length": 0,
55
  "model_type": "mbart",
@@ -85,9 +85,9 @@
85
  "typical_p": 1.0,
86
  "use_bfloat16": false,
87
  "use_cache": true,
88
- "vocab_size": 57596
89
  },
90
- "decoder_start_token_id": 57595,
91
  "encoder": {
92
  "_name_or_path": "",
93
  "add_cross_attention": false,
 
49
  "LABEL_1": 1
50
  },
51
  "length_penalty": 1.0,
52
+ "max_length": 1500,
53
  "max_position_embeddings": 1536,
54
  "min_length": 0,
55
  "model_type": "mbart",
 
85
  "typical_p": 1.0,
86
  "use_bfloat16": false,
87
  "use_cache": true,
88
+ "vocab_size": 57614
89
  },
90
+ "decoder_start_token_id": 57613,
91
  "encoder": {
92
  "_name_or_path": "",
93
  "add_cross_attention": false,
generation_config.json CHANGED
@@ -1,10 +1,10 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
- "decoder_start_token_id": 57595,
5
  "eos_token_id": 2,
6
  "forced_eos_token_id": 2,
7
- "max_length": 768,
8
  "pad_token_id": 1,
9
  "transformers_version": "4.35.2"
10
  }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
+ "decoder_start_token_id": 57613,
5
  "eos_token_id": 2,
6
  "forced_eos_token_id": 2,
7
+ "max_length": 1500,
8
  "pad_token_id": 1,
9
  "transformers_version": "4.35.2"
10
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:81095aed59e080c74bc8689013585ab1076dd0334209d5504d9973e79f74ffec
3
- size 809361560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9341cf0c74306df75ad3c1ed4dfbb57db9db2c862007041df50639af54f9756b
3
+ size 809435288