import numpy as np


def sigmoid(z):
    return 1 / (1 + np.exp(-z))


def logistic_regression(X, y, learning_rate=0.01, num_iterations=1000):
    X_b = np.c_[np.ones((X.shape[0], 1)), X]
    theta = np.zeros(X_b.shape[1])

    for _ in range(num_iterations):
        z = np.dot(X_b, theta)
        y_pred = sigmoid(z)
        gradient = np.dot(X_b.T, (y_pred - y)) / y.size
        theta -= learning_rate * gradient

    return theta


def predict(X, theta):
    X_b = np.c_[np.ones((X.shape[0], 1)), X]
    z = np.dot(X_b, theta)
    y_pred_proba = sigmoid(z)
    y_pred = (y_pred_proba >= 0.5).astype(int)
    return y_pred