import numpy as np
import matplotlib.pyplot as plt
import platform

# Set font based on operating system
if platform.system() == 'Windows':
    plt.rcParams['font.sans-serif'] = ['SimHei']  # Windows common Chinese font
elif platform.system() == 'Darwin':  # macOS
    plt.rcParams['font.sans-serif'] = ['STHeiti']
else:  # Linux or other
    plt.rcParams['font.sans-serif'] = ['Arial Unicode MS']  # Example font, may vary based on system

# Set minus sign to display correctly
plt.rcParams['axes.unicode_minus'] = False

# Generate synthetic binary classification data
def generate_data():
    np.random.seed(42)
    X_class1 = np.random.randn(50, 2) + np.array([-2, -2])
    X_class2 = np.random.randn(50, 2) + np.array([2, 2])
    X = np.vstack((X_class1, X_class2))
    y = np.hstack((np.zeros(50), np.ones(50)))  # 0 for class1, 1 for class2
    return X, y

# Sigmoid function
def sigmoid(z):
    return 1 / (1 + np.exp(-z))

# Logistic regression cost function
def compute_cost(X, y, theta):
    m = len(y)
    z = X.dot(theta)
    h = sigmoid(z)
    epsilon = 1e-5  # Small constant to prevent log(0)
    cost = - (1 / m) * np.sum(y * np.log(h + epsilon) + (1 - y) * np.log(1 - h + epsilon))
    return cost

# Gradient descent function for logistic regression
def gradient_descent(X, y, theta, learning_rate, iterations, ax1, ax2):
    m = len(y)
    cost_history = []

    for i in range(iterations):
        z = X.dot(theta)
        h = sigmoid(z)
        gradient = (1 / m) * X.T.dot(h - y)
        theta -= learning_rate * gradient
        
        # Record cost for visualization
        cost = compute_cost(X, y, theta)
        cost_history.append(cost)

        # Update plot every few iterations
        if i % 10 == 0 or i == iterations - 1:
            ax1.cla()  # Clear plot for new frame
            plot_decision_boundary(X[:, 1:], y, theta, ax1)
            ax1.set_title(f"逻辑回归 - 第 {i+1} 次迭代\n损失: {cost:.4f}")
            
            # Explanation text in Chinese
            ax1.text(0.05, 0.95, f"第 {i+1} 次迭代: 正在调整权重以更好地分隔类别。",
                     transform=ax1.transAxes, fontsize=10, color="blue", verticalalignment='top')
            
            # Update cost plot
            ax2.cla()
            ax2.plot(cost_history, color='purple')
            ax2.set_title("损失函数收敛 (对数损失)")
            ax2.set_xlabel("迭代次数")
            ax2.set_ylabel("损失")
            ax2.text(0.05, 0.9, "较低的损失表示模型性能更好。", transform=ax2.transAxes, fontsize=10, color="purple", verticalalignment='top')
            
            plt.pause(0.3)  # Pause to create an animation effect

    plt.show()
    return theta

# Plot decision boundary with explanations
def plot_decision_boundary(X, y, theta, ax):
    ax.scatter(X[y == 0][:, 0], X[y == 0][:, 1], color='blue', label='类别 0')
    ax.scatter(X[y == 1][:, 0], X[y == 1][:, 1], color='red', label='类别 1')
    
    # Create a grid to evaluate model
    x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
    y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
    xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.1),
                         np.arange(y_min, y_max, 0.1))
    Z = sigmoid(np.c_[np.ones((xx.ravel().shape[0], 1)), xx.ravel(), yy.ravel()].dot(theta))
    Z = Z.reshape(xx.shape)
    ax.contourf(xx, yy, Z, alpha=0.3, cmap="coolwarm")
    
    # Plot the decision boundary (where probability is 0.5)
    ax.contour(xx, yy, Z, levels=[0.5], linewidths=1, colors='black')
    ax.set_xlabel("特征 1")
    ax.set_ylabel("特征 2")
    ax.legend()
    
    # Position the region labels closer to each region dynamically
    ax.text(x_min + 0.8, y_min + 1, "区域: 较高概率为类别 0", color="blue", fontsize=9)
    ax.text(x_max - 2.8, y_max - 2, "区域: 较高概率为类别 1", color="red", fontsize=9)

# Function to predict and display new data points
def predict_and_plot_new_point(theta, ax):
    try:
        feature1 = float(input("请输入特征 1 值: "))
        feature2 = float(input("请输入特征 2 值: "))
        X_new = np.array([1, feature1, feature2])  # Add intercept term
        probability = sigmoid(X_new.dot(theta))
        predicted_class = 1 if probability >= 0.5 else 0
        color = 'red' if predicted_class == 1 else 'blue'

        # Plot the new point with prediction color
        ax.scatter(feature1, feature2, color=color, edgecolor='black', s=100, marker='o')
        ax.text(feature1 + 0.1, feature2, f"预测类别: {predicted_class} (概率: {probability:.2f})", fontsize=9, color=color)
        plt.draw()
        print(f"特征 1: {feature1}, 特征 2: {feature2}, 预测类别: {predicted_class}, 概率: {probability:.2f}")
    except ValueError:
        print("输入无效，请输入有效的数字。")

# Main function
def main():
    # Generate data and initialize parameters
    X, y = generate_data()
    X = np.c_[np.ones(X.shape[0]), X]  # Add intercept term
    theta = np.zeros(X.shape[1])  # Initialize weights

    learning_rate = 0.1
    iterations = 200

    # Setup single figure with two side-by-side subplots
    fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(14, 6))
    
    # Run gradient descent and visualize
    theta = gradient_descent(X, y, theta, learning_rate, iterations, ax1, ax2)

    # Enable students to input and visualize new points
    while True:
        plt.pause(0.1)  # Pause to allow interaction
        predict_and_plot_new_point(theta, ax1)  # Prompt for input and update plot

if __name__ == "__main__":
    main()