Commit
•
90e1f34
1
Parent(s):
6e5cb60
Fix missing import and closing parenthesis (#1)
Browse files- Fix missing closing parenthesis (b6bdcd4dda0efd7160b84a74c3775bbc16e94be3)
- Fix add missing import (f7310b31a09ad92036a7dff9cc9fbfe9d56ddf75)
Co-authored-by: Daniel Vila <dvilasuero@users.noreply.huggingface.co>
README.md
CHANGED
@@ -148,7 +148,7 @@ Use the code below to get started with LINCE-ZERO!
|
|
148 |
|
149 |
```py
|
150 |
import torch
|
151 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer, AutoTokenizer
|
152 |
|
153 |
model_id = "clibrain/lince-zero"
|
154 |
|
@@ -187,7 +187,7 @@ def generate(
|
|
187 |
):
|
188 |
|
189 |
prompt = create_instruction(instruction, input, context)
|
190 |
-
print(prompt.replace("### Respuesta:\n", "")
|
191 |
inputs = tokenizer(prompt, return_tensors="pt")
|
192 |
input_ids = inputs["input_ids"].to("cuda")
|
193 |
attention_mask = inputs["attention_mask"].to("cuda")
|
|
|
148 |
|
149 |
```py
|
150 |
import torch
|
151 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, AutoTokenizer, GenerationConfig
|
152 |
|
153 |
model_id = "clibrain/lince-zero"
|
154 |
|
|
|
187 |
):
|
188 |
|
189 |
prompt = create_instruction(instruction, input, context)
|
190 |
+
print(prompt.replace("### Respuesta:\n", ""))
|
191 |
inputs = tokenizer(prompt, return_tensors="pt")
|
192 |
input_ids = inputs["input_ids"].to("cuda")
|
193 |
attention_mask = inputs["attention_mask"].to("cuda")
|