Dorjzodovsuren commited on
Commit
581ebdb
1 Parent(s): 9b484ce

Saving weights and logs of step 2500

Browse files
config.json CHANGED
@@ -39,7 +39,7 @@
39
  "max_length": 50
40
  }
41
  },
42
- "transformers_version": "4.36.0",
43
  "use_cache": true,
44
  "vocab_size": 50257
45
  }
 
39
  "max_length": 50
40
  }
41
  },
42
+ "transformers_version": "4.37.0.dev0",
43
  "use_cache": true,
44
  "vocab_size": 50257
45
  }
events.out.tfevents.1704209754.c7a3583dbe1e.82.0.v2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ebf531d7335e08f611d633f7497ea04d1b2497b7fa5372166f7afc44f133b82f
3
+ size 367952
flax_model.msgpack CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f61100cf12b86deaebbcbff11518c37825917f9b02980c847071ca255c1ed3f0
3
  size 327652826
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:053380e2a16a622e10b6dcc53146ebd3f60b2840af9934f63044a4bb4e5e5eb5
3
  size 327652826
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
  "eos_token_id": 50256,
5
- "transformers_version": "4.36.0"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
  "eos_token_id": 50256,
5
+ "transformers_version": "4.37.0.dev0"
6
  }