Spaces:
Runtime error
Runtime error
from transformers import AutoTokenizer, LlamaForCausalLM | |
from fastapi import FastAPI | |
from pydantic import BaseModel | |
model_path = 'EleutherAI/llemma_7b' | |
model = LlamaForCausalLM.from_pretrained(model_path) | |
tokenizer = AutoTokenizer.from_pretrained(model_path) | |
class validation(BaseModel): | |
prompt: str | |
app = FastAPI() | |
async def stream(item: validation): | |
inputs = tokenizer(item.prompt, return_tensors="pt") | |
generate_ids = model.generate(inputs.input_ids, max_length=30) | |
var = tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] | |
return var | |