"""
Simple Quantum Asteroid Detector
This program uses quantum computing to identify asteroids in images
Simplified version for high school students
"""

import os
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
from qiskit import QuantumCircuit
from qiskit_aer import Aer
from qiskit.primitives import Sampler
import pickle
import glob
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score

# Part 1: Image Processing Functions

def load_image(image_path, target_size=(8, 8)):
    """Load and preprocess an image"""
    # Open image and convert to grayscale
    img = Image.open(image_path).convert('L')
    
    # Resize to smaller dimensions for quantum processing
    resized_img = img.resize(target_size)
    
    # Convert to array and normalize (values between 0 and 1)
    img_array = np.array(resized_img) / 255.0
    
    # Flatten the 2D image into a 1D array
    flat_img = img_array.flatten()
    
    return flat_img

def reduce_dimension(features, n_qubits=2):
    """Reduce feature dimensionality to fit quantum circuit"""
    # Number of qubits determines how many features we can represent
    target_dim = 2**n_qubits
    
    if len(features) <= target_dim:
        # If features are already few enough, pad with zeros
        return np.pad(features, (0, target_dim - len(features)))
    
    # Reduce dimensions by averaging blocks
    block_size = len(features) // target_dim
    reduced = np.array([np.mean(features[i:i+block_size]) for i in range(0, len(features), block_size)])
    
    # Ensure we have exactly target_dim features
    return reduced[:target_dim]

def prepare_dataset(data_dir="dataset", n_qubits=2):
    """Prepare dataset for quantum machine learning"""
    # Get image paths
    with_asteroid_paths = glob.glob(os.path.join(data_dir, "with_asteroid", "*.png"))
    without_asteroid_paths = glob.glob(os.path.join(data_dir, "without_asteroid", "*.png"))
    
    # Limit dataset size for demonstration
    max_samples = 50
    with_asteroid_paths = with_asteroid_paths[:max_samples]
    without_asteroid_paths = without_asteroid_paths[:max_samples]
    
    # Process images
    X = []  # Features
    y = []  # Labels
    
    print("Processing images with asteroids...")
    for path in with_asteroid_paths:
        features = load_image(path)
        reduced_features = reduce_dimension(features, n_qubits)
        X.append(reduced_features)
        y.append(1)  # 1 for asteroid present
    
    print("Processing images without asteroids...")
    for path in without_asteroid_paths:
        features = load_image(path)
        reduced_features = reduce_dimension(features, n_qubits)
        X.append(reduced_features)
        y.append(0)  # 0 for no asteroid
    
    # Convert to numpy arrays
    X = np.array(X)
    y = np.array(y)
    
    return X, y

# Part 2: Quantum Computing Functions

def create_quantum_circuit(features, n_qubits=2):
    """Create a quantum circuit for encoding image features"""
    # Ensure we're only using the number of features corresponding to n_qubits
    feature_dim = min(2**n_qubits, len(features))
    features = features[:feature_dim]
    
    # Normalize features for better quantum encoding
    norm = np.sqrt(np.sum(features**2))
    if norm > 0:
        features = features / norm
    
    # Create quantum circuit with specified number of qubits
    qc = QuantumCircuit(n_qubits)
    
    # Apply amplitude encoding (more advanced than simple rotation)
    # First, apply Hadamard gates to create superposition
    for i in range(n_qubits):
        qc.h(i)
    
    # Encode features using multiple rotation gates for better representation
    for i in range(min(n_qubits, len(features))):
        # Use multiple rotation gates for richer encoding
        qc.ry(features[i] * np.pi, i)  # Y-rotation based on feature
        if i < len(features) - 1:
            qc.rz(features[i+1] * np.pi, i)  # Z-rotation based on next feature
    
    # Create entanglement pattern (more complex than linear)
    # All-to-all connectivity for better quantum advantage
    for i in range(n_qubits):
        for j in range(i+1, n_qubits):
            qc.cx(i, j)  # CNOT between all pairs of qubits
    
    # Second layer of rotations for deeper circuit
    for i in range(min(n_qubits, len(features))):
        qc.rx(features[i] * np.pi/2, i)  # X-rotation with different angle
    
    # Add measurement (for visualization)
    qc.measure_all()
    
    return qc

def quantum_similarity(features1, features2, n_qubits=2):
    """Calculate similarity between two feature vectors using quantum computing"""
    from qiskit_aer import Aer
    from qiskit.primitives import Sampler
    
    # Ensure we're only using the number of features corresponding to n_qubits
    feature_dim = min(2**n_qubits, len(features1), len(features2))
    f1 = features1[:feature_dim]
    f2 = features2[:feature_dim]
    
    # Normalize the feature vectors
    norm1 = np.sqrt(np.sum(f1**2))
    norm2 = np.sqrt(np.sum(f2**2))
    
    # Avoid division by zero
    if norm1 == 0 or norm2 == 0:
        return 0
        
    f1_normalized = f1 / norm1
    f2_normalized = f2 / norm2
    
    # Create quantum circuits for both feature vectors
    qc1 = QuantumCircuit(n_qubits)
    qc2 = QuantumCircuit(n_qubits)
    
    # Encode normalized features into quantum states
    for i in range(n_qubits):
        if i < len(f1_normalized):
            # Map feature values to rotation angles
            angle1 = f1_normalized[i] * np.pi
            qc1.ry(angle1, i)
            
            angle2 = f2_normalized[i] * np.pi
            qc2.ry(angle2, i)
    
    # Create a circuit to measure similarity (swap test)
    similarity_circuit = QuantumCircuit(2*n_qubits + 1, 1)
    
    # Add an ancilla qubit in superposition
    similarity_circuit.h(2*n_qubits)
    
    # Copy first circuit to first n qubits
    for i in range(n_qubits):
        if i < len(f1_normalized):
            angle = f1_normalized[i] * np.pi
            similarity_circuit.ry(angle, i)
    
    # Copy second circuit to second n qubits
    for i in range(n_qubits):
        if i < len(f2_normalized):
            angle = f2_normalized[i] * np.pi
            similarity_circuit.ry(angle, i + n_qubits)
    
    # Apply controlled-SWAP operations (the heart of the swap test)
    for i in range(n_qubits):
        similarity_circuit.cswap(2*n_qubits, i, i + n_qubits)
    
    # Apply H gate to ancilla
    similarity_circuit.h(2*n_qubits)
    
    # Measure ancilla
    similarity_circuit.measure(2*n_qubits, 0)
    
    # Execute the circuit using the new Sampler primitive
    sampler = Sampler()
    job = sampler.run(similarity_circuit, shots=1024)
    result = job.result()
    counts = result.quasi_dists[0]
    
    # Calculate similarity from measurement results
    # Probability of measuring |0⟩ is (1 + |⟨ψ|φ⟩|²)/2
    # where |⟨ψ|φ⟩|² is the squared inner product (similarity)
    prob_zero = counts.get(0, 0)
    
    # Extract similarity from probability
    similarity = (2 * prob_zero - 1)
    
    # Ensure similarity is between 0 and 1
    similarity = max(0, min(1, similarity))
    
    return similarity

# Part 3: Quantum Classifier

class QuantumKNNClassifier:
    """K-nearest neighbors classifier using quantum similarity"""
    
    def __init__(self, n_qubits=2, k=3):
        self.n_qubits = n_qubits
        self.k = k
        self.X_train = None
        self.y_train = None
    
    def fit(self, X_train, y_train):
        """Store training data"""
        self.X_train = X_train
        self.y_train = y_train
        print(f"Model trained using {len(X_train)} samples")
    
    def predict(self, X_test):
        """Predict labels for test data"""
        predictions = []
        
        for test_features in X_test:
            # Calculate quantum similarity between test features and all training features
            similarities = []
            for train_features in self.X_train:
                similarity = quantum_similarity(test_features, train_features, self.n_qubits)
                similarities.append(similarity)
            
            # Find k most similar samples
            most_similar_indices = np.argsort(similarities)[-self.k:]
            most_similar_labels = [self.y_train[i] for i in most_similar_indices]
            
            # Vote for final label
            predicted_label = np.bincount(most_similar_labels).argmax()
            predictions.append(predicted_label)
        
        return np.array(predictions)

# Part 4: Evaluation and Visualization

def evaluate_model(model, X_test, y_test):
    """Evaluate model performance"""
    print("Evaluating model...")
    
    # Make predictions
    y_pred = model.predict(X_test)
    
    # Calculate accuracy
    accuracy = accuracy_score(y_test, y_pred)
    print(f"Test accuracy: {accuracy:.4f}")
    
    # Calculate confusion matrix
    confusion_matrix = np.zeros((2, 2), dtype=int)
    for i in range(len(y_test)):
        confusion_matrix[y_test[i]][y_pred[i]] += 1
    
    print("\nConfusion Matrix:")
    print(f"True\\Predicted  No Asteroid  Asteroid")
    print(f"No Asteroid     {confusion_matrix[0][0]}           {confusion_matrix[0][1]}")
    print(f"Asteroid        {confusion_matrix[1][0]}           {confusion_matrix[1][1]}")
    
    # Visualize confusion matrix
    plt.figure(figsize=(8, 6))
    plt.imshow(confusion_matrix, interpolation='nearest', cmap=plt.cm.Blues)
    plt.title('Confusion Matrix')
    plt.colorbar()
    classes = ["No Asteroid", "Asteroid"]
    plt.xticks([0, 1], classes)
    plt.yticks([0, 1], classes)
    
    # Add text annotations to confusion matrix
    threshold = confusion_matrix.max() / 2
    for i in range(confusion_matrix.shape[0]):
        for j in range(confusion_matrix.shape[1]):
            plt.text(j, i, format(confusion_matrix[i, j], 'd'),
                    horizontalalignment="center",
                    color="white" if confusion_matrix[i, j] > threshold else "black")
    
    plt.tight_layout()
    plt.ylabel('True Label')
    plt.xlabel('Predicted Label')
    plt.savefig("confusion_matrix.png")
    
    return accuracy

def visualize_results(X_test, y_test, y_pred, n_samples=5):
    """Visualize some test results"""
    fig, axes = plt.subplots(2, n_samples, figsize=(15, 6))
    
    # Find some correct and incorrect predictions
    correct_indices = np.where(y_test == y_pred)[0]
    incorrect_indices = np.where(y_test != y_pred)[0]
    
    # Show correct predictions
    for i in range(min(n_samples, len(correct_indices))):
        idx = correct_indices[i]
        feature_vector = X_test[idx]
        
        # Reshape to approximate original image (for visualization only)
        size = int(np.sqrt(len(feature_vector)))
        if size**2 == len(feature_vector):  # Ensure perfect square
            img = feature_vector.reshape(size, size)
        else:
            img = feature_vector[:4].reshape(2, 2)  # Simple 2x2 image display
        
        axes[0, i].imshow(img, cmap='gray')
        axes[0, i].set_title(f"True: {'Yes' if y_test[idx]==1 else 'No'}, Pred: {'Yes' if y_pred[idx]==1 else 'No'}")
        axes[0, i].axis('off')
    
    # Show incorrect predictions
    for i in range(min(n_samples, len(incorrect_indices))):
        if i < len(incorrect_indices):
            idx = incorrect_indices[i]
            feature_vector = X_test[idx]
            
            # Reshape to approximate original image
            size = int(np.sqrt(len(feature_vector)))
            if size**2 == len(feature_vector):  # Ensure perfect square
                img = feature_vector.reshape(size, size)
            else:
                img = feature_vector[:4].reshape(2, 2)  # Simple 2x2 image display
            
            axes[1, i].imshow(img, cmap='gray')
            axes[1, i].set_title(f"True: {'Yes' if y_test[idx]==1 else 'No'}, Pred: {'Yes' if y_pred[idx]==1 else 'No'}")
            axes[1, i].axis('off')
    
    plt.tight_layout()
    plt.savefig("prediction_results.png")
    plt.show()

def save_model(model, filename="quantum_asteroid_model.pkl"):
    """Save the trained model"""
    with open(filename, 'wb') as f:
        # Use protocol 4 for better compatibility
        pickle.dump(model, f, protocol=4)
    print(f"Model saved to {filename}")

def load_model(filename="quantum_asteroid_model.pkl"):
    """Load a trained model"""
    with open(filename, 'rb') as f:
        model = pickle.load(f)
    print(f"Model loaded from {filename}")
    return model

# Part 5: Main Function

def main():
    # Number of qubits to use (determines feature dimension)
    n_qubits = 2  # Using 2 qubits is easier to understand
    
    # Prepare dataset
    print("Preparing dataset...")
    X, y = prepare_dataset(n_qubits=n_qubits)
    
    # Split into training and test sets
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)
    
    print(f"Training set size: {len(X_train)}")
    print(f"Test set size: {len(X_test)}")
    
    # Create and train quantum classifier
    print("Training quantum classifier...")
    classifier = QuantumKNNClassifier(n_qubits=n_qubits, k=3)
    classifier.fit(X_train, y_train)
    
    # Visualize quantum circuit for a sample
    if len(X_train) > 0:
        print("Visualizing quantum circuit for a sample...")
        sample_features = X_train[0]
        quantum_circuit = create_quantum_circuit(sample_features, n_qubits)
        quantum_circuit.draw(output='mpl', filename='quantum_circuit.png')
        print("Quantum circuit saved as 'quantum_circuit.png'")
    
    # Evaluate model
    accuracy = evaluate_model(classifier, X_test, y_test)
    
    # Make predictions on test set
    y_pred = classifier.predict(X_test)
    
    # Visualize results
    visualize_results(X_test, y_test, y_pred)
    
    # Save model
    save_model(classifier)
    
    return classifier

if __name__ == "__main__":
    print("Starting quantum asteroid detection...")
    main()
    print("Quantum asteroid detection complete!")
