test another model
Browse files- language.py +11 -2
- requirements.txt +2 -1
language.py
CHANGED
@@ -1,9 +1,18 @@
|
|
1 |
from transformers import pipeline
|
|
|
2 |
|
3 |
input = "i have question and answere.\
|
4 |
the question is : How many cars here ?\
|
5 |
the response is : 2\
|
6 |
with this information, can you crate an answere phrase in french"
|
7 |
|
8 |
-
|
9 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
from transformers import pipeline
|
2 |
+
import torch
|
3 |
|
4 |
input = "i have question and answere.\
|
5 |
the question is : How many cars here ?\
|
6 |
the response is : 2\
|
7 |
with this information, can you crate an answere phrase in french"
|
8 |
|
9 |
+
generate_text = pipeline(model="databricks/dolly-v2-12b", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto")
|
10 |
+
res = generate_text(input)
|
11 |
+
print(res[0]["generated_text"])
|
12 |
+
|
13 |
+
|
14 |
+
|
15 |
+
#i have question and answere.
|
16 |
+
#the question is : How many cars here ?
|
17 |
+
#the response is : 2
|
18 |
+
#with this information, can you crate an answere phrase
|
requirements.txt
CHANGED
@@ -1 +1,2 @@
|
|
1 |
-
transformers[torch]
|
|
|
|
1 |
+
transformers[torch]
|
2 |
+
torch
|