Spaces:
Running
Running
# # # Shiva | |
# # from flask import Flask, render_template, request, jsonify, session | |
# # import os | |
# # from dotenv import load_dotenv | |
# # import json | |
# # import random | |
# # from werkzeug.utils import secure_filename | |
# # import google.generativeai as genai | |
# # from pathlib import Path | |
# # # Load environment variables | |
# # load_dotenv() | |
# # app = Flask(__name__) | |
# # app.config['SECRET_KEY'] = os.getenv('FLASK_SECRET_KEY', 'dev-secret-key') | |
# # app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024 # 16MB max file size | |
# # # Configure upload settings | |
# # UPLOAD_FOLDER = 'uploads' | |
# # ALLOWED_EXTENSIONS = {'txt', 'pdf', 'docx', 'doc', 'json', 'csv'} | |
# # app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER | |
# # # Create upload directory | |
# # os.makedirs(UPLOAD_FOLDER, exist_ok=True) | |
# # # Configure Gemini API | |
# # GEMINI_API_KEY = os.getenv('GEMINI_API_KEY') | |
# # if GEMINI_API_KEY: | |
# # genai.configure(api_key=GEMINI_API_KEY) | |
# # model = genai.GenerativeModel('gemini-1.5-pro') | |
# # print("✅ Gemini API configured successfully!") | |
# # else: | |
# # model = None | |
# # print("⚠️ No Gemini API key found. Using fallback responses.") | |
# # # Import agents and utilities | |
# # from agents.router_agent import RouterAgent | |
# # from utils.helpers import load_quotes, get_greeting | |
# # from utils.file_processor import FileProcessor | |
# # def allowed_file(filename): | |
# # return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS | |
# # class MyPharmaAI: | |
# # def __init__(self): | |
# # self.router = RouterAgent(model) # Pass model to router | |
# # self.quotes = load_quotes() | |
# # self.file_processor = FileProcessor() | |
# # def process_query(self, query, user_name="Student", uploaded_files=None): | |
# # """Process user query through the router agent with optional file context""" | |
# # try: | |
# # # Check if we have uploaded files to reference | |
# # file_context = "" | |
# # if uploaded_files and 'uploaded_files' in session: | |
# # file_context = self.get_file_context(session['uploaded_files']) | |
# # # Route the query to appropriate agent | |
# # response = self.router.route_query(query, file_context) | |
# # return { | |
# # 'success': True, | |
# # 'response': response, | |
# # 'agent_used': response.get('agent_type', 'unknown') | |
# # } | |
# # except Exception as e: | |
# # return { | |
# # 'success': False, | |
# # 'response': f"माफ करें (Sorry), I encountered an error: {str(e)}", | |
# # 'agent_used': 'error' | |
# # } | |
# # def get_file_context(self, uploaded_files): | |
# # """Get context from uploaded files""" | |
# # context = "" | |
# # for file_info in uploaded_files[-3:]: # Last 3 files only | |
# # file_path = file_info['path'] | |
# # if os.path.exists(file_path): | |
# # try: | |
# # content = self.file_processor.extract_text(file_path) | |
# # if content: | |
# # context += f"\n\n📄 Content from {file_info['original_name']}:\n{content[:2000]}..." # Limit context | |
# # except Exception as e: | |
# # context += f"\n\n❌ Error reading {file_info['original_name']}: {str(e)}" | |
# # return context | |
# # def get_daily_quote(self): | |
# # """Get inspirational quote from Gita/Vedas""" | |
# # return random.choice(self.quotes) if self.quotes else "विद्या धनं सर्व धन प्रधानम्" | |
# # def process_file_upload(self, file): | |
# # """Process uploaded file and extract information""" | |
# # try: | |
# # if file and allowed_file(file.filename): | |
# # filename = secure_filename(file.filename) | |
# # timestamp = str(int(time.time())) | |
# # filename = f"{timestamp}_{filename}" | |
# # file_path = os.path.join(app.config['UPLOAD_FOLDER'], filename) | |
# # file.save(file_path) | |
# # # Extract text content | |
# # content = self.file_processor.extract_text(file_path) | |
# # # Store in session | |
# # if 'uploaded_files' not in session: | |
# # session['uploaded_files'] = [] | |
# # file_info = { | |
# # 'original_name': file.filename, | |
# # 'saved_name': filename, | |
# # 'path': file_path, | |
# # 'size': os.path.getsize(file_path), | |
# # 'preview': content[:500] if content else "No text content extracted" | |
# # } | |
# # session['uploaded_files'].append(file_info) | |
# # session.modified = True | |
# # return { | |
# # 'success': True, | |
# # 'message': f'File "{file.filename}" uploaded successfully! You can now ask questions about its content.', | |
# # 'file_info': file_info | |
# # } | |
# # else: | |
# # return { | |
# # 'success': False, | |
# # 'message': 'Invalid file type. Supported: TXT, PDF, DOCX, DOC, JSON, CSV' | |
# # } | |
# # except Exception as e: | |
# # return { | |
# # 'success': False, | |
# # 'message': f'Error uploading file: {str(e)}' | |
# # } | |
# # # Initialize the AI system | |
# # import time | |
# # pharma_ai = MyPharmaAI() | |
# # @app.route('/') | |
# # def index(): | |
# # """Main chat interface""" | |
# # greeting = get_greeting() | |
# # daily_quote = pharma_ai.get_daily_quote() | |
# # # Get uploaded files info | |
# # uploaded_files = session.get('uploaded_files', []) | |
# # return render_template('index.html', | |
# # greeting=greeting, | |
# # daily_quote=daily_quote, | |
# # uploaded_files=uploaded_files, | |
# # api_available=bool(GEMINI_API_KEY)) | |
# # @app.route('/chat', methods=['POST']) | |
# # def chat(): | |
# # """Main chat endpoint""" | |
# # try: | |
# # data = request.get_json() | |
# # if not data or 'query' not in data: | |
# # return jsonify({ | |
# # 'success': False, | |
# # 'error': 'No query provided' | |
# # }), 400 | |
# # user_query = data.get('query', '').strip() | |
# # user_name = data.get('user_name', 'Student') | |
# # if not user_query: | |
# # return jsonify({ | |
# # 'success': False, | |
# # 'error': 'Empty query' | |
# # }), 400 | |
# # # Process the query (with file context if available) | |
# # result = pharma_ai.process_query(user_query, user_name, session.get('uploaded_files')) | |
# # return jsonify(result) | |
# # except Exception as e: | |
# # return jsonify({ | |
# # 'success': False, | |
# # 'error': f'Server error: {str(e)}' | |
# # }), 500 | |
# # @app.route('/upload', methods=['POST']) | |
# # def upload_file(): | |
# # """Handle file upload""" | |
# # try: | |
# # if 'file' not in request.files: | |
# # return jsonify({ | |
# # 'success': False, | |
# # 'error': 'No file provided' | |
# # }), 400 | |
# # file = request.files['file'] | |
# # if file.filename == '': | |
# # return jsonify({ | |
# # 'success': False, | |
# # 'error': 'No file selected' | |
# # }), 400 | |
# # result = pharma_ai.process_file_upload(file) | |
# # return jsonify(result) | |
# # except Exception as e: | |
# # return jsonify({ | |
# # 'success': False, | |
# # 'error': f'Upload error: {str(e)}' | |
# # }), 500 | |
# # @app.route('/files') | |
# # def get_uploaded_files(): | |
# # """Get list of uploaded files""" | |
# # uploaded_files = session.get('uploaded_files', []) | |
# # return jsonify({ | |
# # 'files': uploaded_files, | |
# # 'count': len(uploaded_files) | |
# # }) | |
# # @app.route('/clear_files', methods=['POST']) | |
# # def clear_files(): | |
# # """Clear uploaded files""" | |
# # try: | |
# # # Remove files from disk | |
# # if 'uploaded_files' in session: | |
# # for file_info in session['uploaded_files']: | |
# # file_path = file_info['path'] | |
# # if os.path.exists(file_path): | |
# # os.remove(file_path) | |
# # # Clear session | |
# # session.pop('uploaded_files', None) | |
# # return jsonify({ | |
# # 'success': True, | |
# # 'message': 'All files cleared successfully' | |
# # }) | |
# # except Exception as e: | |
# # return jsonify({ | |
# # 'success': False, | |
# # 'error': f'Error clearing files: {str(e)}' | |
# # }), 500 | |
# # @app.route('/quote') | |
# # def get_quote(): | |
# # """Get a random inspirational quote""" | |
# # quote = pharma_ai.get_daily_quote() | |
# # return jsonify({'quote': quote}) | |
# # @app.route('/health') | |
# # def health_check(): | |
# # """Health check endpoint""" | |
# # return jsonify({ | |
# # 'status': 'healthy', | |
# # 'app': 'MyPharma AI', | |
# # 'version': '2.0.0', | |
# # 'gemini_api': 'connected' if GEMINI_API_KEY else 'not configured', | |
# # 'features': ['chat', 'file_upload', 'multi_agent', 'indian_theme'] | |
# # }) | |
# # if __name__ == '__main__': | |
# # # Create necessary directories | |
# # for directory in ['data', 'static/css', 'static/js', 'templates', 'agents', 'utils', 'uploads']: | |
# # os.makedirs(directory, exist_ok=True) | |
# # print("🇮🇳 MyPharma AI Starting...") | |
# # print(f"📁 Upload folder: {UPLOAD_FOLDER}") | |
# # print(f"🤖 Gemini API: {'✅ Ready' if GEMINI_API_KEY else '❌ Not configured'}") | |
# # print("🚀 Server starting on http://localhost:5000") | |
# # # Run the app | |
# # app.run(debug=True, port=5000) | |
# # # #### app.py (Main Application) | |
# # # from flask import Flask, render_template, request, jsonify | |
# # # import os | |
# # # from dotenv import load_dotenv | |
# # # import json | |
# # # import random | |
# # # # Load environment variables | |
# # # load_dotenv() | |
# # # app = Flask(__name__) | |
# # # app.config['SECRET_KEY'] = os.getenv('FLASK_SECRET_KEY', 'dev-secret-key') | |
# # # # Import agents | |
# # # from agents.router_agent import RouterAgent | |
# # # from utils.helpers import load_quotes, get_greeting | |
# # # class MyPharmaAI: | |
# # # def __init__(self): | |
# # # self.router = RouterAgent() | |
# # # self.quotes = load_quotes() | |
# # # def process_query(self, query, user_name="Student"): | |
# # # """Process user query through the router agent""" | |
# # # try: | |
# # # # Route the query to appropriate agent | |
# # # response = self.router.route_query(query) | |
# # # return { | |
# # # 'success': True, | |
# # # 'response': response, | |
# # # 'agent_used': response.get('agent_type', 'unknown') | |
# # # } | |
# # # except Exception as e: | |
# # # return { | |
# # # 'success': False, | |
# # # 'response': f"माफ करें (Sorry), I encountered an error: {str(e)}", | |
# # # 'agent_used': 'error' | |
# # # } | |
# # # def get_daily_quote(self): | |
# # # """Get inspirational quote from Gita/Vedas""" | |
# # # return random.choice(self.quotes) if self.quotes else "विद्या धनं सर्व धन प्रधानम्" | |
# # # # Initialize the AI system | |
# # # pharma_ai = MyPharmaAI() | |
# # # @app.route('/') | |
# # # def index(): | |
# # # """Main chat interface""" | |
# # # greeting = get_greeting() | |
# # # daily_quote = pharma_ai.get_daily_quote() | |
# # # return render_template('index.html', | |
# # # greeting=greeting, | |
# # # daily_quote=daily_quote) | |
# # # @app.route('/chat', methods=['POST']) | |
# # # def chat(): | |
# # # """Main chat endpoint""" | |
# # # try: | |
# # # data = request.get_json() | |
# # # if not data or 'query' not in data: | |
# # # return jsonify({ | |
# # # 'success': False, | |
# # # 'error': 'No query provided' | |
# # # }), 400 | |
# # # user_query = data.get('query', '').strip() | |
# # # user_name = data.get('user_name', 'Student') | |
# # # if not user_query: | |
# # # return jsonify({ | |
# # # 'success': False, | |
# # # 'error': 'Empty query' | |
# # # }), 400 | |
# # # # Process the query | |
# # # result = pharma_ai.process_query(user_query, user_name) | |
# # # return jsonify(result) | |
# # # except Exception as e: | |
# # # return jsonify({ | |
# # # 'success': False, | |
# # # 'error': f'Server error: {str(e)}' | |
# # # }), 500 | |
# # # @app.route('/quote') | |
# # # def get_quote(): | |
# # # """Get a random inspirational quote""" | |
# # # quote = pharma_ai.get_daily_quote() | |
# # # return jsonify({'quote': quote}) | |
# # # @app.route('/health') | |
# # # def health_check(): | |
# # # """Health check endpoint""" | |
# # # return jsonify({ | |
# # # 'status': 'healthy', | |
# # # 'app': 'MyPharma AI', | |
# # # 'version': '1.0.0' | |
# # # }) | |
# # # if __name__ == '__main__': | |
# # # # Create data directories if they don't exist | |
# # # os.makedirs('data', exist_ok=True) | |
# # # os.makedirs('static/css', exist_ok=True) | |
# # # os.makedirs('static/js', exist_ok=True) | |
# # # os.makedirs('templates', exist_ok=True) | |
# # # os.makedirs('agents', exist_ok=True) | |
# # # os.makedirs('utils', exist_ok=True) | |
# # # # Run the app | |
# # # app.run(debug=True, port=5000) | |
# # app.py | |
# # Main Flask application for MyPharma AI | |
# from flask import Flask, render_template, request, jsonify, session | |
# import os | |
# import json | |
# import random | |
# import time | |
# from dotenv import load_dotenv | |
# from werkzeug.utils import secure_filename | |
# import google.generativeai as genai | |
# # Load environment variables from a .env file | |
# load_dotenv() | |
# # --- App Configuration --- | |
# app = Flask(__name__) | |
# app.config['SECRET_KEY'] = os.getenv('FLASK_SECRET_KEY', 'a-very-secret-key-for-dev') | |
# app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024 # 16MB max file size | |
# # --- Upload Configuration --- | |
# UPLOAD_FOLDER = '/tmp/uploads' | |
# ALLOWED_EXTENSIONS = {'txt', 'pdf', 'docx', 'json', 'csv'} | |
# app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER | |
# os.makedirs(UPLOAD_FOLDER, exist_ok=True) | |
# # --- Gemini API Configuration --- | |
# GEMINI_API_KEY = os.getenv('GEMINI_API_KEY') | |
# model = None | |
# if GEMINI_API_KEY: | |
# try: | |
# genai.configure(api_key=GEMINI_API_KEY) | |
# # Using gemini-1.5-flash for speed and cost-effectiveness | |
# model = genai.GenerativeModel('gemini-1.5-flash') | |
# print("✅ Gemini 1.5 Flash Model configured successfully!") | |
# except Exception as e: | |
# print(f"❌ Error configuring Gemini API: {e}") | |
# else: | |
# print("⚠️ No Gemini API key found. AI features will be disabled.") | |
# # --- Import Agents and Utilities --- | |
# # (Ensure these files exist in their respective directories) | |
# from agents.router_agent import RouterAgent | |
# from utils.helpers import load_quotes, get_greeting | |
# from utils.file_processor import FileProcessor | |
# def allowed_file(filename): | |
# """Check if the uploaded file has an allowed extension.""" | |
# return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS | |
# # --- Main AI Application Class --- | |
# class MyPharmaAI: | |
# """Orchestrator for the entire AI system.""" | |
# def __init__(self): | |
# self.router = RouterAgent(model) # The router now gets the configured model | |
# self.quotes = load_quotes() | |
# self.file_processor = FileProcessor() | |
# def process_query(self, query, user_name="Student", viva_state=None, uploaded_files=None, chat_history=None): | |
# """Routes a user's query to the appropriate agent, handling context.""" | |
# try: | |
# # This block correctly gets the file content from the session data | |
# file_context = "" | |
# if uploaded_files: | |
# file_context = self.get_file_context(uploaded_files) | |
# # This passes the file content and chat history to the router | |
# response_data = self.router.route_query(query, file_context, viva_state, chat_history) | |
# return { | |
# 'success': True, | |
# **response_data | |
# } | |
# except Exception as e: | |
# print(f"Error in MyPharmaAI.process_query: {e}") | |
# return { | |
# 'success': False, | |
# 'message': f"Sorry, a critical error occurred: {str(e)}", | |
# 'agent_used': 'error' | |
# } | |
# def get_file_context(self, uploaded_files_session): | |
# """Extracts text from the most recent files to use as context.""" | |
# context = "" | |
# for file_info in uploaded_files_session[-3:]: # Limit to last 3 files | |
# file_path = file_info.get('path') | |
# if file_path and os.path.exists(file_path): | |
# try: | |
# content = self.file_processor.extract_text(file_path) | |
# if content: | |
# # Limit context from each file to 2000 characters | |
# context += f"\n\n--- Content from {file_info['original_name']} ---\n{content[:2000]}..." | |
# except Exception as e: | |
# context += f"\n\n--- Error reading {file_info['original_name']}: {str(e)} ---" | |
# return context | |
# def get_daily_quote(self): | |
# """Returns a random quote.""" | |
# return random.choice(self.quotes) if self.quotes else "विद्या धनं सर्व धन प्रधानम्" | |
# # Initialize the AI system | |
# pharma_ai = MyPharmaAI() | |
# # --- Flask Routes --- | |
# @app.route('/') | |
# def index(): | |
# """Renders the main chat interface.""" | |
# greeting = get_greeting() | |
# daily_quote = pharma_ai.get_daily_quote() | |
# uploaded_files = session.get('uploaded_files', []) | |
# return render_template('index.html', | |
# greeting=greeting, | |
# daily_quote=daily_quote, | |
# uploaded_files=uploaded_files) | |
# @app.route('/chat', methods=['POST']) | |
# def chat(): | |
# """Handles the main chat logic, including session management for the Viva Agent.""" | |
# try: | |
# data = request.get_json() | |
# query = data.get('query', '').strip() | |
# if not query: | |
# return jsonify({'success': False, 'error': 'Empty query'}), 400 | |
# # --- HISTORY MANAGEMENT START --- | |
# # Get the conversation history from the session (or start a new one) | |
# chat_history = session.get('chat_history', []) | |
# # Get current viva state from session for the Viva Agent | |
# viva_state = session.get('viva_state', None) | |
# uploaded_files = session.get('uploaded_files', None) | |
# # Process the query through the main orchestrator | |
# result = pharma_ai.process_query(query, viva_state=viva_state, uploaded_files=uploaded_files,chat_history=chat_history) | |
# # If the query was successful, update the history | |
# if result.get('success'): | |
# # Add the user's query and the AI's message to the history | |
# chat_history.append({'role': 'user', 'parts': [query]}) | |
# chat_history.append({'role': 'model', 'parts': [result.get('message', '')]}) | |
# # Keep the history from getting too long (e.g., last 10 exchanges) | |
# session['chat_history'] = chat_history[-20:] | |
# # --- HISTORY MANAGEMENT END --- | |
# # If the Viva agent returns an updated state, save it to the session | |
# if 'viva_state' in result: | |
# session['viva_state'] = result.get('viva_state') | |
# return jsonify(result) | |
# except Exception as e: | |
# print(f"Error in /chat endpoint: {e}") | |
# return jsonify({'success': False, 'error': f'Server error: {str(e)}'}), 500 | |
# @app.route('/upload', methods=['POST']) | |
# def upload_file(): | |
# """Handles file uploads.""" | |
# if 'file' not in request.files: | |
# return jsonify({'success': False, 'error': 'No file part'}), 400 | |
# file = request.files['file'] | |
# if file.filename == '': | |
# return jsonify({'success': False, 'error': 'No selected file'}), 400 | |
# if file and allowed_file(file.filename): | |
# filename = secure_filename(file.filename) | |
# file_path = os.path.join(app.config['UPLOAD_FOLDER'], filename) | |
# file.save(file_path) | |
# if 'uploaded_files' not in session: | |
# session['uploaded_files'] = [] | |
# file_info = {'original_name': filename, 'path': file_path} | |
# session['uploaded_files'].append(file_info) | |
# session.modified = True | |
# return jsonify({ | |
# 'success': True, | |
# 'message': f'File "{filename}" uploaded. You can now ask questions about it.', | |
# 'files': session['uploaded_files'] | |
# }) | |
# return jsonify({'success': False, 'error': 'File type not allowed'}), 400 | |
# @app.route('/files', methods=['GET']) | |
# def get_uploaded_files(): | |
# """Returns the list of uploaded files from the session.""" | |
# return jsonify({'files': session.get('uploaded_files', [])}) | |
# @app.route('/clear_files', methods=['POST']) | |
# def clear_files(): | |
# """Deletes uploaded files from disk and clears them from the session.""" | |
# if 'uploaded_files' in session: | |
# for file_info in session['uploaded_files']: | |
# if os.path.exists(file_info['path']): | |
# os.remove(file_info['path']) | |
# session.pop('uploaded_files', None) | |
# session.pop('viva_state', None) # Also clear viva state | |
# return jsonify({'success': True, 'message': 'All files and sessions cleared.'}) | |
# @app.route('/quote') | |
# def get_quote(): | |
# """Returns a new random quote.""" | |
# return jsonify({'quote': pharma_ai.get_daily_quote()}) | |
# # --- Main Execution --- | |
# # if __name__ == '__main__': | |
# # # Ensure all necessary directories exist | |
# # for directory in ['data', 'static/css', 'static/js', 'templates', 'agents', 'utils', 'uploads']: | |
# # os.makedirs(directory, exist_ok=True) | |
# # print("🇮🇳 MyPharma AI Starting...") | |
# # print(f"🤖 Gemini API Status: {'✅ Ready' if model else '❌ Not configured'}") | |
# # print("🚀 Server starting on http://127.0.0.1:5000") | |
# # app.run(debug=True, port=5000) | |
# if __name__ == '__main__': | |
# # Create necessary directories (this is good practice) | |
# for directory in ['data', 'uploads', 'templates']: | |
# os.makedirs(directory, exist_ok=True) | |
# # Get port from environment variable, defaulting to 5000 for local testing | |
# port = int(os.environ.get('PORT', 7860)) | |
# print("🇮🇳 MyPharma AI Starting...") | |
# print(f"🤖 Gemini API Status: {'✅ Ready' if model else '❌ Not configured'}") | |
# print(f"🚀 Server starting on http://0.0.0.0:{port}") | |
# # Run the app to be accessible on the server | |
# app.run(host='0.0.0.0', port=port) | |
# app.py | |
import os | |
import random | |
from dotenv import load_dotenv | |
from flask import Flask, render_template, request, jsonify, session | |
import google.generativeai as genai | |
# Import new langchain components and our helpers | |
from langchain_google_genai import GoogleGenerativeAIEmbeddings | |
from langchain_community.vectorstores import FAISS | |
from utils.helpers import create_vector_store, get_greeting, load_quotes | |
from agents.router_agent import RouterAgent # Re-import the RouterAgent | |
# --- Initial Setup --- | |
load_dotenv() | |
# Create the knowledge library on first startup if it doesn't exist | |
create_vector_store() | |
# --- App Configuration --- | |
app = Flask(__name__) | |
app.config['SECRET_KEY'] = os.getenv('FLASK_SECRET_KEY', 'a-very-secret-key-for-dev') | |
# --- Gemini API & Knowledge Base Configuration --- | |
model = None | |
vector_store = None | |
try: | |
GEMINI_API_KEY = os.getenv('GOOGLE_API_KEY') | |
if GEMINI_API_KEY: | |
genai.configure(api_key=GEMINI_API_KEY) | |
model = genai.GenerativeModel('gemini-1.5-flash') | |
index_path = '/tmp/faiss_index' | |
if os.path.exists(index_path): | |
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001") | |
vector_store = FAISS.load_local(index_path, embeddings, allow_dangerous_deserialization=True) | |
print("✅ Gemini Model and Knowledge Base loaded successfully!") | |
else: | |
print("✅ Gemini Model loaded. No knowledge base found to load.") | |
else: | |
print("⚠️ No Gemini API key found.") | |
except Exception as e: | |
print(f"❌ Error during initialization: {e}") | |
# --- Main AI Application Class (Reinstated) --- | |
class MyPharmaAI: | |
def __init__(self, gemini_model, vector_store_db): | |
self.router = RouterAgent(gemini_model) | |
self.quotes = load_quotes() | |
self.vector_store = vector_store_db | |
def process_query(self, query, viva_state, chat_history): | |
# This is the core logic that combines both systems: | |
# 1. Search the permanent knowledge base for context. | |
file_context = "" | |
if self.vector_store: | |
relevant_docs = self.vector_store.similarity_search(query, k=4) # Get top 4 results | |
file_context = "\n".join(doc.page_content for doc in relevant_docs) | |
context_with_sources = [] | |
for doc in relevant_docs: | |
# Clean up the source path to just the filename | |
source_filename = os.path.basename(doc.metadata.get('source', 'Unknown Source')) | |
# Page numbers from PyPDF are 0-indexed, so we add 1 for readability | |
page_number = doc.metadata.get('page', -1) + 1 | |
context_with_sources.append( | |
f"[Source: {source_filename}, Page: {page_number}]\n{doc.page_content}" | |
) | |
file_context = "\n\n".join(context_with_sources) | |
# 2. Pass the retrieved context to the multi-agent router system. | |
return self.router.route_query(query, file_context, viva_state, chat_history) | |
pharma_ai = MyPharmaAI(model, vector_store) | |
# --- Flask Routes --- | |
def index(): | |
# Use the correct template name | |
return render_template('index.html', greeting=get_greeting(), daily_quote=random.choice(pharma_ai.quotes)) | |
def chat(): | |
# This function is now the final, stable version. | |
try: | |
data = request.get_json() | |
query = data.get('query', '').strip() | |
if not query: | |
return jsonify({'success': False, 'error': 'Empty query'}), 400 | |
chat_history = session.get('chat_history', []) | |
viva_state = session.get('viva_state', None) | |
# Get the result dictionary from the agent system | |
agent_result = pharma_ai.process_query(query, viva_state, chat_history) | |
# --- THIS IS THE FIX --- | |
# We now build the final JSON response to match what the JavaScript expects. | |
if "error" in agent_result.get('status', ''): | |
final_response = { | |
'success': False, | |
'error': agent_result.get('message', 'An unknown error occurred.'), | |
'agent_used': agent_result.get('agent_used', 'error') | |
} | |
else: | |
final_response = { | |
'success': True, | |
'message': agent_result.get('message', 'Sorry, I could not generate a response.'), | |
'agent_used': agent_result.get('agent_used', 'academic') | |
} | |
# --- END OF FIX --- | |
# Update chat history if the call was successful | |
if final_response.get('success'): | |
chat_history.append({'role': 'user', 'parts': [query]}) | |
chat_history.append({'role': 'model', 'parts': [final_response.get('message', '')]}) | |
session['chat_history'] = chat_history[-10:] | |
# Handle Viva state if present (no changes needed here) | |
if 'viva_state' in agent_result: | |
session['viva_state'] = agent_result.get('viva_state') | |
return jsonify(final_response) | |
except Exception as e: | |
print(f"Critical Error in /chat endpoint: {e}") | |
return jsonify({'success': False, 'error': f'A critical server error occurred: {e}', 'agent_used': 'error'}), 500 | |
# --- Main Execution --- | |
if __name__ == '__main__': | |
# app.run(host='127.0.0.1', port=5000, debug=True) | |
port = int(os.environ.get('PORT', 7860)) | |
app.run(host='0.0.0.0', port=port) |