HamzaWaseem commited on
Commit
c495b3c
·
verified ·
1 Parent(s): 50498cc

Training in progress, step 500

Browse files
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d9232f7471b7e9df1a6455008f68514ba213672ee4d7e9587526cddaf9b5c0c6
3
  size 966995080
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c514746d0eafcfff5172a6ed6d387466b5a24c074ec3bddcb237d91566c50d69
3
  size 966995080
runs/Dec25_14-41-48_38ef2901aed8/events.out.tfevents.1735138022.38ef2901aed8.2556.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8d3c9aef28a6f3475a6859648708c7a756e29072b7786dfb145ffba9ce56121
3
+ size 7616
special_tokens_map.json CHANGED
@@ -115,20 +115,8 @@
115
  "rstrip": false,
116
  "single_word": false
117
  },
118
- "eos_token": {
119
- "content": "<|endoftext|>",
120
- "lstrip": false,
121
- "normalized": false,
122
- "rstrip": false,
123
- "single_word": false
124
- },
125
- "pad_token": {
126
- "content": "<|endoftext|>",
127
- "lstrip": false,
128
- "normalized": false,
129
- "rstrip": false,
130
- "single_word": false
131
- },
132
  "unk_token": {
133
  "content": "<|endoftext|>",
134
  "lstrip": false,
 
115
  "rstrip": false,
116
  "single_word": false
117
  },
118
+ "eos_token": "</s>",
119
+ "pad_token": "<pad>",
 
 
 
 
 
 
 
 
 
 
 
 
120
  "unk_token": {
121
  "content": "<|endoftext|>",
122
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -12978,11 +12978,11 @@
12978
  ],
12979
  "bos_token": "<|endoftext|>",
12980
  "clean_up_tokenization_spaces": true,
12981
- "eos_token": "<|endoftext|>",
12982
  "errors": "replace",
12983
  "extra_special_tokens": {},
12984
  "model_max_length": 1024,
12985
- "pad_token": "<|endoftext|>",
12986
  "processor_class": "WhisperProcessor",
12987
  "return_attention_mask": false,
12988
  "tokenizer_class": "WhisperTokenizer",
 
12978
  ],
12979
  "bos_token": "<|endoftext|>",
12980
  "clean_up_tokenization_spaces": true,
12981
+ "eos_token": "</s>",
12982
  "errors": "replace",
12983
  "extra_special_tokens": {},
12984
  "model_max_length": 1024,
12985
+ "pad_token": "<pad>",
12986
  "processor_class": "WhisperProcessor",
12987
  "return_attention_mask": false,
12988
  "tokenizer_class": "WhisperTokenizer",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:835929359fd0cc4b46b501cdd64eeb7b0a2cc738ce85763ae5cb9943d1835903
3
  size 5496
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02247fd117c3280fada3d5a8fe55f8130a6aed04385fd82378af74358a27c936
3
  size 5496