hugosousa commited on
Commit
65dca04
1 Parent(s): 8d7428b

Upload model.

Browse files
Files changed (4) hide show
  1. adapter_0.pt +3 -0
  2. config.json +1 -0
  3. hf_model_0001_0.pt +3 -0
  4. hf_model_0002_0.pt +3 -0
adapter_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac3e223fc7db5863e01df5bbe7f0a48e8829f8de98329df8f4f343681dc75355
3
+ size 436943418
config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"_name_or_path": "Phi-3-mini-4k-instruct", "architectures": ["Phi3ForCausalLM"], "attention_dropout": 0.0, "auto_map": {"AutoConfig": "configuration_phi3.Phi3Config", "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"}, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 4096, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "sliding_window": 2047, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.39.3", "use_cache": true, "vocab_size": 32064}
hf_model_0001_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3d172e5283d6ec501bff57882c0e11dd210558edad0dfa1d8d1caf905fb95d2
3
+ size 4972518334
hf_model_0002_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9e6f637e62f5428636d8c23672c7bf8ee70a0a542d749fa1f6295f8bc93040a
3
+ size 2669707717