Text Generation
Russian
conversational
IlyaGusev commited on
Commit
df8295a
1 Parent(s): 8dfcef0
Files changed (3) hide show
  1. adapter_config.json +2 -2
  2. adapter_model.bin +2 -2
  3. generation_config.json +13 -11
adapter_config.json CHANGED
@@ -8,10 +8,10 @@
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
  "lora_alpha": 16,
11
- "lora_dropout": 0.05,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
- "r": 16,
15
  "revision": null,
16
  "target_modules": [
17
  "c_attn"
 
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
  "lora_alpha": 16,
11
+ "lora_dropout": 0.1,
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
+ "r": 8,
15
  "revision": null,
16
  "target_modules": [
17
  "c_attn"
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:16998638d672e7e2b553df498b8c7fb0a053a690f4fb277fcfa01b6090c7f22b
3
- size 52457121
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61cd953050f3f1d9a14e710de623ec80657e43cb730fd11c20a3f0eaa43875de
3
+ size 26242657
generation_config.json CHANGED
@@ -1,13 +1,15 @@
1
  {
2
- "pad_token_id": 0,
3
- "bos_token_id": 2,
4
- "eos_token_id": 3,
5
- "temperature": 0.2,
6
- "top_p": 0.9,
7
- "top_k": 30,
8
- "do_sample": true,
9
- "max_new_tokens": 1536,
10
- "num_beams": 1,
11
- "repetition_penalty": 1.15,
12
- "no_repeat_ngram_size": 15
 
 
13
  }
 
1
  {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 2,
4
+ "eos_token_id": 3,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.27.1",
7
+ "temperature": 0.2,
8
+ "top_p": 0.9,
9
+ "top_k": 30,
10
+ "do_sample": true,
11
+ "max_new_tokens": 1536,
12
+ "num_beams": 1,
13
+ "repetition_penalty": 1.15,
14
+ "no_repeat_ngram_size": 15
15
  }