import numpy as np

class LogisticRegression:
    def __init__(self, learning_rate=0.01, epochs=1000):
        self.lr = learning_rate      # 学习率
        self.epochs = epochs        # 迭代次数
        self.weights = None         # 模型权重
        self.bias = None            # 偏置项
    
    def _sigmoid(self, z):
        """Sigmoid激活函数"""
        return 1 / (1 + np.exp(-z))
    
    def fit(self, X, y):
        """训练模型"""
        # 初始化参数
        n_samples, n_features = X.shape
        self.weights = np.zeros(n_features)
        self.bias = 0
        
        # 梯度下降
        for _ in range(self.epochs):
            # 计算预测值
            linear_model = np.dot(X, self.weights) + self.bias
            y_pred = self._sigmoid(linear_model)
            
            # 计算梯度
            dw = (1 / n_samples) * np.dot(X.T, (y_pred - y))
            db = (1 / n_samples) * np.sum(y_pred - y)
            
            # 更新参数
            self.weights -= self.lr * dw
            self.bias -= self.lr * db
    
    def predict(self, X, threshold=0.5):
        """预测类别"""
        linear_model = np.dot(X, self.weights) + self.bias
        y_prob = self._sigmoid(linear_model)
        return [1 if prob >= threshold else 0 for prob in y_prob]

# ============= 使用示例 =============
if __name__ == "__main__":
    # 创建简单数据集（特征1，特征2，标签）
    X = np.array([
        [2.5, 3.0], [1.5, 2.5], [3.5, 4.0], [3.0, 3.5],
        [0.5, 1.0], [1.0, 0.5], [0.5, 0.5], [1.0, 1.5]
    ])
    y = np.array([1, 1, 1, 1, 0, 0, 0, 0])  # 1=类别A, 0=类别B

    # 训练模型
    model = LogisticRegression(learning_rate=0.1, epochs=1000)
    model.fit(X, y)
    
    # 预测新样本
    test_samples = np.array([
        [2.0, 2.5],  # 应预测为类别A
        [0.8, 1.2]   # 应预测为类别B
    ])
    
    predictions = model.predict(test_samples)
    print("预测结果:", predictions)  # 输出: [1, 0]