import os
import sys
import uuid # For generating batch IDs
import json # For parsing and for data_source_info in document upload

# Add the project root to the Python path
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))

from flask import Flask, jsonify, request
from werkzeug.utils import secure_filename # For secure file uploads
from sqlalchemy import func # For max position query in document upload

# Import db from extensions
from extensions import db

# Import refactored services and models
from services.dataset_service_refactored import DatasetService
from models.dataset import Dataset, Document, DocumentSegment, DatasetProcessRule # Ensure all necessary models are imported

def create_app():
    app = Flask(__name__)
    app.config.from_pyfile("config.py")

    # Ensure UPLOAD_FOLDER exists
    if not os.path.exists(app.config["UPLOAD_FOLDER"]):
        os.makedirs(app.config["UPLOAD_FOLDER"])

    # Initialize extensions
    db.init_app(app)

    # Create database tables if they don't exist
    with app.app_context():
        db.create_all()

    # --- API Endpoints ---
    @app.route("/health", methods=["GET"])
    def health_check():
        return jsonify({"status": "healthy"}), 200

    @app.route("/datasets", methods=["GET"])
    def list_datasets_route():
        page = request.args.get("page", 1, type=int)
        per_page = request.args.get("per_page", 10, type=int)
        search = request.args.get("search", None, type=str)
        datasets_list, total = DatasetService.get_datasets(page=page, per_page=per_page, search=search)
        return jsonify({"datasets": [d.to_dict() for d in datasets_list], "total": total}), 200

    @app.route("/datasets", methods=["POST"])
    def create_dataset_route():
        data = request.get_json()
        if not data or "name" not in data:
            return jsonify({"error": "Name is required"}), 400
        try:
            new_dataset = DatasetService.create_empty_dataset(
                name=data.get("name"),
                description=data.get("description"),
                indexing_technique=data.get("indexing_technique"),
                provider=data.get("provider", "vendor"),
                embedding_model_provider=data.get("embedding_model_provider"),
                embedding_model_name=data.get("embedding_model_name"),
                retrieval_model=data.get("retrieval_model") # This should be a dict or JSON string
            )
            return jsonify({"message": "Dataset created successfully", "dataset": new_dataset.to_dict()}), 201
        except Exception as e:
            app.logger.error(f"Error creating dataset: {e}")
            return jsonify({"error": str(e)}), 500

    @app.route("/datasets/<string:dataset_id>", methods=["GET"])
    def get_dataset_route(dataset_id):
        dataset = DatasetService.get_dataset(dataset_id)
        if dataset:
            return jsonify(dataset.to_dict()), 200
        return jsonify({"error": "Dataset not found"}), 404

    @app.route("/datasets/<string:dataset_id>/documents", methods=["POST"])
    def upload_document_route(dataset_id):
        if "file" not in request.files:
            return jsonify({"error": "No file part"}), 400
        file = request.files["file"]
        if file.filename == "":
            return jsonify({"error": "No selected file"}), 400

        dataset = DatasetService.get_dataset(dataset_id)
        if not dataset:
            return jsonify({"error": "Dataset not found"}), 404

        if file:
            filename = secure_filename(file.filename)
            file_path = os.path.join(app.config["UPLOAD_FOLDER"], filename)
            file.save(file_path)

            try:
                current_max_position = db.session.query(func.max(Document.position)).filter_by(dataset_id=dataset_id).scalar()
                next_position = (current_max_position or 0) + 1

                doc = Document(
                    dataset_id=dataset_id,
                    name=filename,
                    data_source_type="upload_file",
                    data_source_info=json.dumps({"filename": filename, "path": file_path}),
                    batch=str(uuid.uuid4()),
                    created_from="api",
                    position=next_position,
                    indexing_status="waiting",
                    file_id=filename 
                )
                db.session.add(doc)
                db.session.commit()
                # TODO: Trigger document processing task here (parsing, chunking, embedding)
                return jsonify({"message": "Document uploaded successfully", "document_id": doc.id, "filename": filename}), 201
            except Exception as e:
                app.logger.error(f"Error creating document entry: {e}")
                if os.path.exists(file_path):
                    os.remove(file_path)
                return jsonify({"error": str(e)}), 500
        return jsonify({"error": "File upload failed"}), 500

    @app.route("/datasets/<string:dataset_id>/query", methods=["GET"])
    def query_dataset_route(dataset_id):
        query_text = request.args.get("q")
        top_k_str = request.args.get("top_k", "2") # Default top_k to 2

        if not query_text:
            return jsonify({"error": "Query parameter 'q' is required"}), 400
        
        try:
            top_k = int(top_k_str)
        except ValueError:
            return jsonify({"error": "Parameter 'top_k' must be an integer"}), 400

        try:
            # This method will be implemented in DatasetService
            results = DatasetService.perform_rag_query(dataset_id, query_text, top_k=top_k)
            return jsonify({"results": results}), 200
        except Exception as e:
            # Catch NotFound separately if you import it in DatasetService
            # from werkzeug.exceptions import NotFound
            # except NotFound as e:
            #     return jsonify({"error": str(e)}), 404
            app.logger.error(f"Error querying dataset {dataset_id}: {e}")
            return jsonify({"error": str(e)}), 500

    return app

if __name__ == "__main__":
    app = create_app()
    app.run(host="0.0.0.0", port=5001, debug=True)

