update(README): fix code snippet
Browse files
README.md
CHANGED
@@ -35,14 +35,14 @@ This format is also available through the tokenizer's `apply_chat_template` meth
|
|
35 |
```python
|
36 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
37 |
|
38 |
-
tokenizer = AutoTokenizer.from_pretrained('stabilityai/stablelm-2-chat'
|
39 |
model = AutoModelForCausalLM.from_pretrained(
|
40 |
-
'stabilityai/stablelm-2-chat',
|
41 |
device_map="auto",
|
42 |
trust_remote_code=True,
|
43 |
)
|
44 |
|
45 |
-
prompt = [{'role': 'user', 'content': 'How to
|
46 |
inputs = tokenizer.apply_chat_template(
|
47 |
prompt,
|
48 |
add_generation_prompt=True,
|
@@ -55,14 +55,14 @@ tokens = model.generate(
|
|
55 |
temperature=0.7,
|
56 |
do_sample=True
|
57 |
)
|
58 |
-
output = tokenizer.decode(tokens[:, inputs.
|
59 |
|
60 |
print(output)
|
61 |
```
|
62 |
|
63 |
StableLM 2 12B Chat also supports function call usage this is an example how you can use it:
|
64 |
```python
|
65 |
-
|
66 |
You are a helpful assistant with access to the following functions. You must use them if required -\n
|
67 |
[
|
68 |
{
|
@@ -103,7 +103,7 @@ tokens = model.generate(
|
|
103 |
temperature=0.5,
|
104 |
do_sample=True
|
105 |
)
|
106 |
-
output = tokenizer.decode(tokens[:, inputs.
|
107 |
|
108 |
print(output)
|
109 |
"""
|
|
|
35 |
```python
|
36 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
37 |
|
38 |
+
tokenizer = AutoTokenizer.from_pretrained('stabilityai/stablelm-2-12b-chat')
|
39 |
model = AutoModelForCausalLM.from_pretrained(
|
40 |
+
'stabilityai/stablelm-2-12b-chat',
|
41 |
device_map="auto",
|
42 |
trust_remote_code=True,
|
43 |
)
|
44 |
|
45 |
+
prompt = [{'role': 'user', 'content': 'How to combine multiple rows of data into one row of data in Excel?'}]
|
46 |
inputs = tokenizer.apply_chat_template(
|
47 |
prompt,
|
48 |
add_generation_prompt=True,
|
|
|
55 |
temperature=0.7,
|
56 |
do_sample=True
|
57 |
)
|
58 |
+
output = tokenizer.decode(tokens[:, inputs.shape[-1]:][0], skip_special_tokens=False)
|
59 |
|
60 |
print(output)
|
61 |
```
|
62 |
|
63 |
StableLM 2 12B Chat also supports function call usage this is an example how you can use it:
|
64 |
```python
|
65 |
+
system_prompt = """\
|
66 |
You are a helpful assistant with access to the following functions. You must use them if required -\n
|
67 |
[
|
68 |
{
|
|
|
103 |
temperature=0.5,
|
104 |
do_sample=True
|
105 |
)
|
106 |
+
output = tokenizer.decode(tokens[:, inputs.shape[-1]:][0], skip_special_tokens=True)
|
107 |
|
108 |
print(output)
|
109 |
"""
|