Spaces:
Sleeping
Sleeping
from fastapi import FastAPI, HTTPException | |
from pydantic import BaseModel | |
from typing import List | |
import requests | |
import os | |
import google.generativeai as genai | |
from dotenv import load_dotenv | |
load_dotenv() | |
app = FastAPI() | |
# Define input schema | |
class QAInput(BaseModel): | |
questions: List[str] | |
answers: List[str] | |
# Set your API key | |
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY") | |
genai.configure(api_key=GEMINI_API_KEY) | |
def mock_gemini_response(prompt: str) -> str: | |
try: | |
model = genai.GenerativeModel("gemini-2.0-flash") | |
response = model.generate_content(prompt) | |
return response.text.strip() | |
except Exception as e: | |
return f"Error calling Gemini: {str(e)}" | |
# Endpoint to recommend course | |
def recommend_course(data: QAInput): | |
# Step 1: Fetch course list from LMS | |
url = "https://lmslearn.frappe.cloud/api/resource/LMS Course" | |
headers = { | |
"Authorization": "token ecef74adb0ffd76:122897a76b48867", | |
"Accept": "application/json" | |
} | |
try: | |
response = requests.get(url, headers=headers) | |
response.raise_for_status() | |
except requests.RequestException as e: | |
raise HTTPException(status_code=500, detail=f"Failed to fetch course list: {str(e)}") | |
courses = response.json().get("data", []) | |
# Step 2: Build prompt for Gemini | |
user_input = "\n".join(f"Q: {q}\nA: {a}" for q, a in zip(data.questions, data.answers)) | |
course_list = "\n".join([f"- {course['course_name']}" for course in courses if 'course_name' in course]) | |
prompt = f""" | |
You are an intelligent course recommender. | |
Based on the following Q&A from a user: | |
{user_input} | |
Here is a list of available courses: | |
{course_list} | |
Recommend the most suitable course for the user. | |
Instructions: | |
- Do not return the user's questions or answers. | |
- Return only the title of the most suitable course. | |
- Do not modify the course titles from the available course list. | |
""" | |
gemini_response = mock_gemini_response(prompt) | |
return {"recommendation": gemini_response} | |