Spaces:
Runtime error
Runtime error
huolongguo10
commited on
Commit
·
5df8faa
1
Parent(s):
e69cda8
Update app.py
Browse files
app.py
CHANGED
@@ -3,10 +3,10 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
3 |
|
4 |
# torch.set_default_device("cuda")
|
5 |
|
6 |
-
model = AutoModelForCausalLM.from_pretrained("microsoft/phi-2", torch_dtype="
|
7 |
tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-2", trust_remote_code=True)
|
8 |
def greet(text):
|
9 |
-
inputs = tokenizer(text, return_tensors="pt", return_attention_mask=False)
|
10 |
|
11 |
outputs = model.generate(**inputs, max_length=200)
|
12 |
_text = tokenizer.batch_decode(outputs)[0]
|
|
|
3 |
|
4 |
# torch.set_default_device("cuda")
|
5 |
|
6 |
+
model = AutoModelForCausalLM.from_pretrained("microsoft/phi-2", torch_dtype=torch.float32, device_map="cpu", trust_remote_code=True)
|
7 |
tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-2", trust_remote_code=True)
|
8 |
def greet(text):
|
9 |
+
inputs = tokenizer('Instruct: '+text, return_tensors="pt"+'\nOutput: ', return_attention_mask=False)
|
10 |
|
11 |
outputs = model.generate(**inputs, max_length=200)
|
12 |
_text = tokenizer.batch_decode(outputs)[0]
|