plt.tight_layout()
plt.show()

# Task 2: Manually implement KNN for classification
def euclidean_distance(x1, x2):
    """Compute Euclidean distance between two points."""
    return np.sqrt(np.sum((x1 - x2) ** 2))

def knn(X_train, y_train, X_test, k=3):
    """Manual implementation of KNN algorithm."""
    predictions = []
    for test_point in X_test:
        distances = [euclidean_distance(test_point, train_point) for train_point in X_train]
        k_indices = np.argsort(distances)[:k]
        k_nearest_labels = [y_train[i] for i in k_indices]
        most_common = Counter(k_nearest_labels).most_common(1)
        predictions.append(most_common[0][0])
    return np.array(predictions)

# Split the dataset into training and testing sets
train_size = int(0.8 * len(filtered_data))
X_train, X_test = X[:train_size], X[train_size:]
y_train, y_test = y[:train_size], y[train_size:]

# Apply KNN classification
k = 3
y_pred = knn(X_train, y_train, X_test, k)

# Evaluate the model accuracy
accuracy = np.mean(y_pred == y_test)
print(f"Accuracy of KNN (k={k}): {accuracy * 100:.2f}%")

# Visualize KNN classification results with PCA
X_train_pca = pca(X_train, n_components=2)
X_test_pca = pca(X_test, n_components=2)

plt.figure(figsize=(8, 6))
plt.scatter(X_train_pca[:, 0], X_train_pca[:, 1], c=y_train, cmap='viridis', edgecolor='k', label='Training data')
plt.scatter(X_test_pca[:, 0], X_test_pca[:, 1], c=y_pred, cmap='coolwarm', edgecolor='k', marker='x', label='Test predictions')
plt.title('KNN Classification Results (PCA-reduced Data)')
plt.xlabel('Principal Component 1')
plt.ylabel('Principal Component 2')
plt.legend()
plt.show()