Update README.md
Browse files
README.md
CHANGED
@@ -35,7 +35,7 @@ model = AutoModelForCausalLM.from_pretrained(
|
|
35 |
)
|
36 |
|
37 |
if torch.cuda.is_available():
|
38 |
-
|
39 |
|
40 |
prompt="""### Instruction:
|
41 |
光の三原色は?
|
@@ -44,12 +44,12 @@ prompt="""### Instruction:
|
|
44 |
|
45 |
input_ids = tokenizer(prompt, return_tensors="pt").to(model.device)
|
46 |
outputs = model.generate(
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
)
|
54 |
print(tokenizer.decode(outputs[0]))
|
55 |
```
|
|
|
35 |
)
|
36 |
|
37 |
if torch.cuda.is_available():
|
38 |
+
model = model.to("cuda")
|
39 |
|
40 |
prompt="""### Instruction:
|
41 |
光の三原色は?
|
|
|
44 |
|
45 |
input_ids = tokenizer(prompt, return_tensors="pt").to(model.device)
|
46 |
outputs = model.generate(
|
47 |
+
**input_ids,
|
48 |
+
max_new_tokens=512,
|
49 |
+
do_sample=True,
|
50 |
+
top_p=0.95,
|
51 |
+
temperature=0.1,
|
52 |
+
repetition_penalty=1.0,
|
53 |
)
|
54 |
print(tokenizer.decode(outputs[0]))
|
55 |
```
|