hema99 commited on
Commit
953f8ba
1 Parent(s): 3ce2d56

Update fine-tuned model and tokenizer

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/content/drive/My Drive/my_model/model.safetensors",
3
  "activation_dropout": 0.1,
4
  "activation_function": "gelu",
5
  "add_bias_logits": false,
@@ -50,7 +50,7 @@
50
  "task_specific_params": {
51
  "summarization": {
52
  "length_penalty": 1.0,
53
- "max_length": 256,
54
  "min_length": 12,
55
  "num_beams": 4
56
  },
@@ -68,7 +68,7 @@
68
  }
69
  },
70
  "torch_dtype": "float32",
71
- "transformers_version": "4.38.2",
72
  "use_cache": true,
73
  "vocab_size": 50265
74
  }
 
1
  {
2
+ "_name_or_path": "facebook/bart-large",
3
  "activation_dropout": 0.1,
4
  "activation_function": "gelu",
5
  "add_bias_logits": false,
 
50
  "task_specific_params": {
51
  "summarization": {
52
  "length_penalty": 1.0,
53
+ "max_length": 128,
54
  "min_length": 12,
55
  "num_beams": 4
56
  },
 
68
  }
69
  },
70
  "torch_dtype": "float32",
71
+ "transformers_version": "4.40.0",
72
  "use_cache": true,
73
  "vocab_size": 50265
74
  }
generation_config.json CHANGED
@@ -1,16 +1,12 @@
1
  {
2
- "_from_model_config": true,
3
  "bos_token_id": 0,
4
  "decoder_start_token_id": 2,
5
  "early_stopping": true,
6
- "eos_token_id": 4,
7
  "forced_bos_token_id": 0,
8
- "forced_eos_token_id": 4,
9
  "no_repeat_ngram_size": 3,
10
- "num_beams": 8,
11
  "pad_token_id": 1,
12
- "transformers_version": "4.38.2",
13
- "max_length": 256,
14
- "min_length": 50,
15
- "temperature": 0.7
16
  }
 
1
  {
 
2
  "bos_token_id": 0,
3
  "decoder_start_token_id": 2,
4
  "early_stopping": true,
5
+ "eos_token_id": 2,
6
  "forced_bos_token_id": 0,
7
+ "forced_eos_token_id": 2,
8
  "no_repeat_ngram_size": 3,
9
+ "num_beams": 4,
10
  "pad_token_id": 1,
11
+ "transformers_version": "4.40.0"
 
 
 
12
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fb784ac0741de07510c6fa7e9e98455f951898a1dcf532b3112d8286ff0b0809
3
  size 1625426996
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41a296bfdca3a10acbb16928d52b8e9be642ac9911b3267e5dfc8f4c0defc0ab
3
  size 1625426996
special_tokens_map.json CHANGED
@@ -1,25 +1,7 @@
1
  {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": true,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "cls_token": {
10
- "content": "<s>",
11
- "lstrip": false,
12
- "normalized": true,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "eos_token": {
17
- "content": "</s>",
18
- "lstrip": false,
19
- "normalized": true,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
  "mask_token": {
24
  "content": "<mask>",
25
  "lstrip": true,
@@ -27,25 +9,7 @@
27
  "rstrip": false,
28
  "single_word": false
29
  },
30
- "pad_token": {
31
- "content": "<pad>",
32
- "lstrip": false,
33
- "normalized": true,
34
- "rstrip": false,
35
- "single_word": false
36
- },
37
- "sep_token": {
38
- "content": "</s>",
39
- "lstrip": false,
40
- "normalized": true,
41
- "rstrip": false,
42
- "single_word": false
43
- },
44
- "unk_token": {
45
- "content": "<unk>",
46
- "lstrip": false,
47
- "normalized": true,
48
- "rstrip": false,
49
- "single_word": false
50
- }
51
  }
 
1
  {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "mask_token": {
6
  "content": "<mask>",
7
  "lstrip": true,
 
9
  "rstrip": false,
10
  "single_word": false
11
  },
12
+ "pad_token": "<pad>",
13
+ "sep_token": "</s>",
14
+ "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -48,9 +48,10 @@
48
  "eos_token": "</s>",
49
  "errors": "replace",
50
  "mask_token": "<mask>",
51
- "model_max_length": 1024,
52
  "pad_token": "<pad>",
53
  "sep_token": "</s>",
54
  "tokenizer_class": "BartTokenizer",
 
55
  "unk_token": "<unk>"
56
  }
 
48
  "eos_token": "</s>",
49
  "errors": "replace",
50
  "mask_token": "<mask>",
51
+ "model_max_length": 256,
52
  "pad_token": "<pad>",
53
  "sep_token": "</s>",
54
  "tokenizer_class": "BartTokenizer",
55
+ "trim_offsets": true,
56
  "unk_token": "<unk>"
57
  }
vocab.json CHANGED
The diff for this file is too large to render. See raw diff