import numpy as np

def logistic(X, y):
    '''
    LR Logistic Regression.

    INPUT:  X: training sample features, P-by-N matrix.
            y: training sample labels, 1-by-N row vector.

    OUTPUT: w: learned parameters, (P+1)-by-1 column vector.
    '''
    P, N = X.shape
    w = np.random.randn(P + 1, 1) * 0.01
    learning_rate = 0.001
    max_iter = 20000
    tol = 1e-6
    X_mean = np.mean(X, axis=1, keepdims=True)
    X_std = np.std(X, axis=1, keepdims=True)
    X_normalized = (X - X_mean) / X_std
    X1 = np.vstack((np.ones((1, N)), X_normalized))
    y = (y + 1) / 2
    for i in range(max_iter):
        scores = np.dot(w.T, X1)
        prob = 1 / (1 + np.exp(-scores)) 
        loss = -np.mean(y * np.log(prob) + (1 - y) * np.log(1 - prob))
        error = prob - y 
        grad = np.dot(X1, error.T) / N
        w_new = w - learning_rate * grad
        if np.linalg.norm(w_new - w) < tol:
            break
        w = w_new
    return w