Spaces:
Sleeping
Sleeping
Api exposed
Browse files
app.py
CHANGED
|
@@ -1,24 +1,21 @@
|
|
|
|
|
|
|
|
| 1 |
from transformers import pipeline
|
| 2 |
-
import
|
| 3 |
|
| 4 |
-
# Load
|
| 5 |
qa_pipeline = pipeline("text2text-generation", model="google/flan-t5-small")
|
| 6 |
-
|
| 7 |
-
# Load your resume content
|
| 8 |
with open("resume.txt", "r", encoding="utf-8") as f:
|
| 9 |
resume_text = f.read()
|
| 10 |
|
| 11 |
-
#
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
title="Rence's Resume Q&A Bot",
|
| 23 |
-
description="Ask questions based on Rence Abishek's resume. The bot will answer using the resume content."
|
| 24 |
-
).launch(share=True)
|
|
|
|
| 1 |
+
from fastapi import FastAPI, Request
|
| 2 |
+
from pydantic import BaseModel
|
| 3 |
from transformers import pipeline
|
| 4 |
+
import uvicorn
|
| 5 |
|
| 6 |
+
# Load model and resume
|
| 7 |
qa_pipeline = pipeline("text2text-generation", model="google/flan-t5-small")
|
|
|
|
|
|
|
| 8 |
with open("resume.txt", "r", encoding="utf-8") as f:
|
| 9 |
resume_text = f.read()
|
| 10 |
|
| 11 |
+
# FastAPI setup
|
| 12 |
+
app = FastAPI()
|
| 13 |
+
|
| 14 |
+
class Question(BaseModel):
|
| 15 |
+
query: str
|
| 16 |
|
| 17 |
+
@app.post("/predict")
|
| 18 |
+
async def predict(question: Question):
|
| 19 |
+
prompt = f"Resume:\n{resume_text}\n\nQuestion: {question.query}\nAnswer:"
|
| 20 |
+
result = qa_pipeline(prompt, max_length=100)[0]["generated_text"]
|
| 21 |
+
return {"answer": result.strip()}
|
|
|
|
|
|
|
|
|