farmligua_AI / app /main.py
drrobot9's picture
Upload folder using huggingface_hub
869543a verified
# farmlingua_backend/app/main.py
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if BASE_DIR not in sys.path:
sys.path.insert(0, BASE_DIR)
import logging
from fastapi import FastAPI, Body
from fastapi.middleware.cors import CORSMiddleware
import uvicorn
from app.tasks.rag_updater import schedule_updates
from app.utils import config
from app.agents.crew_pipeline import run_pipeline
logging.basicConfig(
format="%(asctime)s [%(levelname)s] %(message)s",
level=logging.INFO
)
app = FastAPI(
title="FarmLingua Backend",
description="Backend service for FarmLingua with RAG updates and CrewAI pipeline",
version="1.0.0"
)
app.add_middleware(
CORSMiddleware,
allow_origins=config.ALLOWED_ORIGINS if hasattr(config, "ALLOWED_ORIGINS") else ["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
@app.on_event("startup")
def startup_event():
"""Start scheduled RAG updates when the app launches."""
logging.info(" Starting FarmLingua backend...")
schedule_updates()
@app.get("/")
def home():
return {
"status": "FarmLingua backend running",
"version": "1.0.0",
"vectorstore_path": config.VECTORSTORE_PATH
}
@app.post("/ask")
def ask_farmbot(query: str = Body(..., embed=True)):
"""
Ask the FarmLingua AI a question.
Uses the crew_pipeline to process and return a detailed, farmer-friendly answer.
"""
logging.info(f"Received query: {query}")
answer = run_pipeline(query)
return {"query": query, "answer": answer}
if __name__ == "__main__":
uvicorn.run(
"app.main:app",
host="0.0.0.0",
port=getattr(config, "PORT", 7860),
reload=bool(getattr(config, "DEBUG", False))
)