Upload LlamaForCausalLM

#2
by Defetya - opened
config.json CHANGED
@@ -21,7 +21,7 @@
21
  "rope_theta": 10000.0,
22
  "tie_word_embeddings": false,
23
  "torch_dtype": "bfloat16",
24
- "transformers_version": "4.34.0",
25
  "use_cache": true,
26
  "vocab_size": 32000
27
  }
 
21
  "rope_theta": 10000.0,
22
  "tie_word_embeddings": false,
23
  "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.34.1",
25
  "use_cache": true,
26
  "vocab_size": 32000
27
  }
generation_config.json CHANGED
@@ -6,5 +6,5 @@
6
  "pad_token_id": 0,
7
  "temperature": 0.6,
8
  "top_p": 0.9,
9
- "transformers_version": "4.34.0"
10
  }
 
6
  "pad_token_id": 0,
7
  "temperature": 0.6,
8
  "top_p": 0.9,
9
+ "transformers_version": "4.34.1"
10
  }
pytorch_model-00002-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1016ed88fbceb73452d36991ee0ec53768c95c495a11ab6184f885fb76245138
3
  size 3500311262
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ef12f8d6406778a6233914e46c441742ffde12c4922caf032a0e3be98dadb84
3
  size 3500311262