FricFast / main.py
Fric7ion's picture
Create main.py
61a85fa verified
raw
history blame
No virus
605 Bytes
from typing import Union
from fastapi import FastAPI
from huggingface_hub import InferenceClient
app = FastAPI()
@app.get("/")
async def root():
return {"message": "Hello World"}
@app.get("/chat")
async def chat(prompt: Union[str, None] = None, max_token: Union[str, None] = None):
client = InferenceClient(model="gpt2", token=api_key)
input_text = prompt or "Once upon a time"
# Call the text generation API
response = client.text_generation(input_text, max_new_tokens=int(max_token or "140"))
# Print the generated text
print(response)
return {"message": response}