import torch
import numpy as np
import matplotlib.pyplot as plt
import torch.nn as nn
import torch.optim
from sklearn.linear_model import LogisticRegression, SGDClassifier

torch.manual_seed(10)

n = 10000
mean_value = 2.5  # mean value of the distribution
bias = 1.3

n_data = torch.ones(n, 2)
x0 = torch.normal(mean_value * n_data, 1.0) + bias
y0 = torch.zeros(n)

x1 = torch.normal(-mean_value * n_data, 1.0) + bias
y1 = torch.ones(n)

train_x_0 = torch.cat((x0, x1), 0)
train_y_0 = torch.cat((y0, y1), 0)
np.random.seed(10)
ind = np.random.permutation(range(train_y_0.shape[0]))
train_x, train_y = train_x_0[ind], train_y_0[ind]
print(train_x, train_y, sum(train_y == 1) / train_y.shape[0])
n = train_y.shape[0]
ntrn = int(n * 0.8)
x_trn, y_trn, x_val, y_val = train_x[:ntrn], train_y[:ntrn], train_x[ntrn:], train_y[ntrn:]
print(sum(y_val == 1) / y_val.shape[0])

# {'sag', 'newton-cg', 'liblinear', 'lbfgs', 'newton-cholesky', 'saga'}
lr = LogisticRegression(penalty='l2', tol=1e-9, C=1, solver="lbfgs", fit_intercept=True, max_iter=100)
lr.fit(x_trn, y_trn)
pred_val = lr.predict(x_val)
print(sum(pred_val == y_val.numpy()) / y_val.shape[0])

pred = lr.predict(x_trn)
print(sum(pred == y_trn.numpy()) / y_trn.shape[0])
print(lr.coef_, lr.intercept_)

# data = np.array([[1, 1], [1, 0], [0, 1], [4.7804, 4.0977]])
# prob = lr.predict_proba(data)
# prob2 = 1 / (1 + np.exp(-(np.dot(np.array(data), lr.coef_[0]) + lr.intercept_)))
# print(prob, prob2)
