File size: 670 Bytes
e69cda8
 
517a7f0
e69cda8
 
5df8faa
e69cda8
 
eac4a17
e69cda8
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
import gradio as gr
# torch.set_default_device("cuda")

model = AutoModelForCausalLM.from_pretrained("microsoft/phi-2", torch_dtype=torch.float32, device_map="cpu", trust_remote_code=True)
tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-2", trust_remote_code=True)
def greet(text):
    inputs = tokenizer('Instruct: '+text+'\nOutput: ', return_tensors="pt", return_attention_mask=False)
    
    outputs = model.generate(**inputs, max_length=200)
    _text = tokenizer.batch_decode(outputs)[0]
    return _text
demo = gr.Interface(fn=greet, inputs="text", outputs="text")
demo.launch()