Upload README.md
Browse files
README.md
CHANGED
@@ -46,10 +46,15 @@ Multiple GPTQ parameter permutations are provided; see Provided Files below for
|
|
46 |
<!-- repositories-available end -->
|
47 |
|
48 |
<!-- prompt-template start -->
|
49 |
-
## Prompt template:
|
50 |
|
51 |
```
|
52 |
-
|
|
|
|
|
|
|
|
|
|
|
53 |
|
54 |
```
|
55 |
|
@@ -164,7 +169,12 @@ model = AutoModelForCausalLM.from_pretrained(model_name_or_path,
|
|
164 |
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True)
|
165 |
|
166 |
prompt = "Tell me about AI"
|
167 |
-
prompt_template=f'''
|
|
|
|
|
|
|
|
|
|
|
168 |
|
169 |
'''
|
170 |
|
|
|
46 |
<!-- repositories-available end -->
|
47 |
|
48 |
<!-- prompt-template start -->
|
49 |
+
## Prompt template: Alpaca
|
50 |
|
51 |
```
|
52 |
+
Below is an instruction that describes a task. Write a response that appropriately completes the request.
|
53 |
+
|
54 |
+
### Instruction:
|
55 |
+
{prompt}
|
56 |
+
|
57 |
+
### Response:
|
58 |
|
59 |
```
|
60 |
|
|
|
169 |
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True)
|
170 |
|
171 |
prompt = "Tell me about AI"
|
172 |
+
prompt_template=f'''Below is an instruction that describes a task. Write a response that appropriately completes the request.
|
173 |
+
|
174 |
+
### Instruction:
|
175 |
+
{prompt}
|
176 |
+
|
177 |
+
### Response:
|
178 |
|
179 |
'''
|
180 |
|