Upload PhiForCausalLM

#3
config.json CHANGED
@@ -28,7 +28,7 @@
28
  "rope_theta": 10000.0,
29
  "tie_word_embeddings": false,
30
  "torch_dtype": "float32",
31
- "transformers_version": "4.37.0",
32
  "use_cache": true,
33
  "vocab_size": 51200
34
  }
 
28
  "rope_theta": 10000.0,
29
  "tie_word_embeddings": false,
30
  "torch_dtype": "float32",
31
+ "transformers_version": "4.37.1",
32
  "use_cache": true,
33
  "vocab_size": 51200
34
  }
generation_config.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
  "_from_model_config": true,
3
- "transformers_version": "4.37.0"
4
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "transformers_version": "4.37.1"
4
  }
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:04407bc1d400acb6e608baa722b4d78e1aa5ad23afc8907d8e35e79cafd3a4e3
3
  size 4984916152
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4f3221f0dc1559cd2e0e726100f921fa0cc0b8e7db4570186f8e061835bcaaa
3
  size 4984916152
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ed86628229e5cc4752f871922dc7e4785beec8a799d99d3fdebaf82587caf296
3
  size 688204064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01f4b7d840703242c654a96cad38530443b49af0b67725fe6492ff53de7344f8
3
  size 688204064