pathos00011 commited on
Commit
5843004
1 Parent(s): 950d1ab

Upload Phi3ForCausalLM

Browse files
config.json CHANGED
@@ -1,12 +1,12 @@
1
  {
2
- "_name_or_path": "Phi-3-mini-4k-instruct",
3
  "architectures": [
4
  "Phi3ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "auto_map": {
8
- "AutoConfig": "configuration_phi3.Phi3Config",
9
- "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
10
  },
11
  "bos_token_id": 1,
12
  "embd_pdrop": 0.0,
@@ -29,7 +29,7 @@
29
  "sliding_window": 2047,
30
  "tie_word_embeddings": false,
31
  "torch_dtype": "bfloat16",
32
- "transformers_version": "4.39.3",
33
- "use_cache": true,
34
  "vocab_size": 32064
35
- }
 
1
  {
2
+ "_name_or_path": "microsoft/Phi-3-mini-4k-instruct",
3
  "architectures": [
4
  "Phi3ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "auto_map": {
8
+ "AutoConfig": "microsoft/Phi-3-mini-4k-instruct--configuration_phi3.Phi3Config",
9
+ "AutoModelForCausalLM": "microsoft/Phi-3-mini-4k-instruct--modeling_phi3.Phi3ForCausalLM"
10
  },
11
  "bos_token_id": 1,
12
  "embd_pdrop": 0.0,
 
29
  "sliding_window": 2047,
30
  "tie_word_embeddings": false,
31
  "torch_dtype": "bfloat16",
32
+ "transformers_version": "4.40.1",
33
+ "use_cache": false,
34
  "vocab_size": 32064
35
+ }
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:daa4ef2edfb1cb4f625eda4f0e547b782d4ec54a169194997a8ccd651bb7217b
3
  size 4986667768
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a8fb59e7ba71d67ddd29ab89a1d09fb430848e22ef31f50d6ba97f810be7d2b
3
  size 4986667768
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ad104560e5d65e876e66a97eb9c738a6854a2126f4ae54954a337f647962d7ca
3
  size 2705880488
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4ed139709c325a331f75a4f81b956e5fc17e41f49eab19fecca268443a34cdc
3
  size 2705880488