from flask import Flask, jsonify, request
from flask_cors import CORS
from collections import deque, defaultdict
from multiprocessing import Process
import pandas as pd

# Import task handlers from tasks.py
from tasks import NODE_TYPE_HANDLERS, execute_node_task

# --- App Setup ---
app = Flask(__name__)
CORS(app)

# --- Core Logic ---

def topological_sort(nodes, edges):
    """Performs a topological sort on the given nodes and edges."""
    adj = {node['id']: [] for node in nodes}
    in_degree = {node['id']: 0 for node in nodes}
    for edge in edges:
        source = edge['source']
        target = edge['target']
        if source in adj and target in in_degree:
            adj[source].append(target)
            in_degree[target] += 1
    queue = deque([node_id for node_id, degree in in_degree.items() if degree == 0])
    sorted_order = []
    while queue:
        node_id = queue.popleft()
        sorted_order.append(node_id)
        if node_id in adj:
            for neighbor in adj[node_id]:
                in_degree[neighbor] -= 1
                if in_degree[neighbor] == 0:
                    queue.append(neighbor)
    if len(sorted_order) == len(nodes):
        return sorted_order
    else:
        return None

def run_workflow_in_background(nodes, edges, execution_order):
    """
    This function is executed in a separate process.
    It runs the workflow tasks sequentially.
    """
    print("--- [Background Process] Workflow Execution Started ---")
    
    nodes_map = {node['id']: node for node in nodes}
    
    # Build a map of node -> [parent_nodes]
    node_parents = defaultdict(list)
    for edge in edges:
        node_parents[edge['target']].append(edge['source'])

    # Store the output of each executed node
    node_outputs = {}

    for node_id in execution_order:
        node = nodes_map.get(node_id)
        if not node:
            print(f"⚠️ [Background Process] Warning: Node {node_id} not found.")
            continue

        # Gather inputs from parent nodes
        parent_outputs = [node_outputs.get(parent_id) for parent_id in node_parents[node_id]]
        
        # Execute the task for the current node
        output = execute_node_task(node, parent_outputs)
        
        # Store the output for child nodes
        node_outputs[node_id] = output

    print("--- [Background Process] Workflow Execution Finished ---")


# --- API Endpoints ---

@app.route('/api/v1/workflow/execute', methods=['POST'])
def execute_workflow():
    """
    Receives a workflow, parses it, and starts a background process for execution.
    """
    data = request.json
    nodes = data.get('nodes', [])
    edges = data.get('edges', [])

    if not nodes:
        return jsonify({"status": "error", "message": "No nodes provided."}), 400

    execution_order = topological_sort(nodes, edges)
    if execution_order is None:
        return jsonify({"status": "error", "message": "Workflow contains a cycle."}), 400

    # Start a new process to run the workflow in the background
    # args must be serializable, so we pass the raw data
    process = Process(target=run_workflow_in_background, args=(nodes, edges, execution_order))
    process.start()

    print(f"✅ Started background process {process.pid} for workflow execution.")

    return jsonify({
        "status": "success", 
        "message": "Workflow execution started in the background.",
        "execution_order": execution_order
    })

if __name__ == '__main__':
    app.run(debug=True, port=5001)