kingabzpro commited on
Commit
78be0e6
1 Parent(s): dfcc316

Upload LlamaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. generation_config.json +1 -1
config.json CHANGED
@@ -31,7 +31,7 @@
31
  "rope_theta": 500000.0,
32
  "tie_word_embeddings": true,
33
  "torch_dtype": "float16",
34
- "transformers_version": "4.45.1",
35
  "use_cache": true,
36
  "vocab_size": 128258
37
  }
 
31
  "rope_theta": 500000.0,
32
  "tie_word_embeddings": true,
33
  "torch_dtype": "float16",
34
+ "transformers_version": "4.44.2",
35
  "use_cache": true,
36
  "vocab_size": 128258
37
  }
generation_config.json CHANGED
@@ -5,5 +5,5 @@
5
  "pad_token_id": 128257,
6
  "temperature": 0.6,
7
  "top_p": 0.9,
8
- "transformers_version": "4.45.1"
9
  }
 
5
  "pad_token_id": 128257,
6
  "temperature": 0.6,
7
  "top_p": 0.9,
8
+ "transformers_version": "4.44.2"
9
  }