Thytu commited on
Commit
54e82d8
1 Parent(s): 7f65600

Upload PhiForCausalLM

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "abacaj/phi-2-super",
3
  "architectures": [
4
  "PhiForCausalLM"
5
  ],
 
1
  {
2
+ "_name_or_path": "outputs/SMIT-Training-outputs/checkpoint-4000/",
3
  "architectures": [
4
  "PhiForCausalLM"
5
  ],
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e2f12bc509196aec2b9410a21d74a8f7b652956cb5e7dd79f2eb2cf7d1eb20fa
3
  size 4995584848
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63f903608b337aaa7fdfe45d27f4683f5cc267198f3c933ba4b4dc2ec8a8a8c0
3
  size 4995584848
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cdbb6945b8308217cdb9a6a4e221b81d246545afe45f4075b27f97357c2544b9
3
  size 563833008
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e4637b89c287b9d004d57800f927feda79d38d3fd916336af08d299cbd249cb
3
  size 563833008