aminramezani345 commited on
Commit
f35b640
·
1 Parent(s): bfe9e6c

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -20,6 +20,6 @@
20
  "sinusoidal_pos_embds": false,
21
  "tie_weights_": true,
22
  "torch_dtype": "float32",
23
- "transformers_version": "4.29.2",
24
  "vocab_size": 30522
25
  }
 
20
  "sinusoidal_pos_embds": false,
21
  "tie_weights_": true,
22
  "torch_dtype": "float32",
23
+ "transformers_version": "4.30.2",
24
  "vocab_size": 30522
25
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:84495d87acd8379381b30a961e1ba2b635a96dbefe1c7f084f79333ea42326d8
3
  size 267855533
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd1e2dc3e4354ac7a180535daa565f65c84cfa97c74faccb4688e26be32c859b
3
  size 267855533
runs/Jun22_16-02-07_652223c68cc1/events.out.tfevents.1687449891.652223c68cc1.9355.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f680c6281eaa3d44007a91ba89f546427786f487bbf29851271378a57ee78c26
3
+ size 3960
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 512,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3dc9f7552c52c5f2b3333db3605b9a4c4868c0aad9fbc2dd5247f261df484d10
3
  size 3963
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3912b90a20d964f82584ecc7591f70ceea8b9f0d0b7a395d68f30a27bbcaeecc
3
  size 3963