Thytu commited on
Commit
f6bc454
1 Parent(s): 4a68d70

Upload PhiForCausalLM

Browse files
config.json CHANGED
@@ -28,7 +28,7 @@
28
  "rope_theta": 10000.0,
29
  "tie_word_embeddings": false,
30
  "torch_dtype": "bfloat16",
31
- "transformers_version": "4.39.0.dev0",
32
  "use_cache": true,
33
  "vocab_size": 51200
34
  }
 
28
  "rope_theta": 10000.0,
29
  "tie_word_embeddings": false,
30
  "torch_dtype": "bfloat16",
31
+ "transformers_version": "4.40.0.dev0",
32
  "use_cache": true,
33
  "vocab_size": 51200
34
  }
generation_config.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
  "_from_model_config": true,
3
- "transformers_version": "4.39.0.dev0"
4
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "transformers_version": "4.40.0.dev0"
4
  }
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5fc7d7c138dcbd18231bfcd91a2d619c474ad0059d4c34d520f8d6723fcb2b41
3
  size 4995584848
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03ff3cacbc6bcb8c5a842176167678b04a0f940109a0bc7054526f89e8d547c1
3
  size 4995584848
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:096c278c5855d84b17e8f706e4f7ec60062556a3b5b501f855b12e88ff3ddcfa
3
  size 563833008
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26ff8a9ef9e32489e59df02ca18c98c140ca96656635c347d48c0c449504aaa1
3
  size 563833008