import joblib

import pandas as pd
from matplotlib import pyplot as plt
from sklearn.metrics import roc_auc_score, roc_curve, auc
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from xgboost import XGBRegressor, XGBClassifier
from lightgbm import LGBMClassifier

data = pd.read_csv('../../data/raw/train.csv')
data['Age'] = pd.cut(data['Age'], bins=[17, 25, 35, 45, 60], labels=[0, 1, 2, 3])
data['DistanceFromHome'] = pd.cut(data['DistanceFromHome'], bins=[0, 3, 5, 10, 15, 20, 30], labels=[0, 1, 2, 3, 4, 5])
data['MonthlyIncome'] = pd.cut(data['MonthlyIncome'], bins=[1000, 2000, 5000, 8000, 10000, 15000, 20000],
                               labels=[0, 1, 2, 3, 4, 5])
data['PercentSalaryHike'] = pd.qcut(data['PercentSalaryHike'], 3, labels=[0, 1, 2])
data['TotalWorkingYears'] = pd.cut(data['TotalWorkingYears'], bins=[-1, 1, 2, 5, 10, 20, 30, 40],
                                   labels=[0, 1, 2, 3, 4, 5, 6])
data['YearsAtCompany'] = pd.cut(data['YearsAtCompany'], bins=[-1, 1, 2, 5, 10, 20, 30, 40],
                                labels=[0, 1, 2, 3, 4, 5, 6])
data['YearsInCurrentRole'] = pd.cut(data['YearsInCurrentRole'], bins=[-1, 1, 2, 5, 10, 18], labels=[0, 1, 2, 3, 4])
data['YearsSinceLastPromotion'] = pd.cut(data['YearsSinceLastPromotion'], bins=[-1, 1, 2, 5, 10, 18],
                                         labels=[0, 1, 2, 3, 5])
data['YearsWithCurrManager'] = pd.cut(data['YearsWithCurrManager'], bins=[-1, 1, 3, 5, 8, 12, 17],
                                      labels=[0, 1, 2, 3, 4, 5])
categorical_cols = data.select_dtypes(include=['object']).columns  # .tolist()

le = LabelEncoder()
for i in categorical_cols:
    data[i] = le.fit_transform(data[i])

# x=data[['OverTime','StockOptionLevel','JobLevel','JobRole','MaritalStatus','TotalWorkingYears','Age','JobInvolvement','YearsWithCurrManager','YearsInCurrentRole','JobSatisfaction','YearsAtCompany','EnvironmentSatisfaction','NumCompaniesWorked','WorkLifeBalance','MonthlyIncome']]

x = data[[
    'OverTime',
    'TotalWorkingYears',
    'StockOptionLevel',
    'Age',
    'YearsAtCompany',
    'MonthlyIncome',
    'JobLevel',
    'JobRole',
    'MaritalStatus',
    'YearsWithCurrManager',
    'YearsInCurrentRole',
    'JobInvolvement',
    'JobSatisfaction',
    'EnvironmentSatisfaction',
    'BusinessTravel',
    'EducationField',
    'Department',
    'NumCompaniesWorked',
    'WorkLifeBalance',
    'DistanceFromHome'
]]
y = data['Attrition']

x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=57)

# es=XGBClassifier(n_estimators=100,max_depth=7,learning_rate=0.1,random_state=57)
# es=XGBClassifier(random_state=2301)
es = LGBMClassifier(learning_rate=0.1,
                    random_state=57,
                    n_estimators=500,
                    max_depth=1,  # 2
                    min_child_weight=1,
                    objective='binary',
                    metric='auc')
# es=XGBClassifier(
#     learning_rate=0.3,
#     random_state=22,
#     n_estimators=500,
#     max_depth=1,  # 2
#     min_child_weight=2,
#     gamma=0,
#     reg_alpha=1,
#     # reg_lambda=0.1,
#     objective='binary:logistic',
#     eval_metric='auc',
#     n_jobs=-1
# )
es.fit(x_train, y_train)
# y_pre=es.predict(x_test)
y_pre = es.predict_proba(x_test)[:, 1]
# y_pre = es.predict(x_test)
roc_auc = roc_auc_score(y_test, y_pre)
print(f"AUC Score: {roc_auc:.4f}")

# # 绘制 ROC 曲线
# fpr, tpr, _ = roc_curve(y_test, y_pre)
# roc_auc_value = auc(fpr, tpr)
#
# plt.figure(figsize=(8, 6))
# plt.plot(fpr, tpr, color='blue', lw=2, label=f'ROC curve (area = {roc_auc_value:.2f})')
# plt.plot([0, 1], [0, 1], color='gray', linestyle='--', lw=2, label='Random guess')
# plt.xlim([0.0, 1.0])
# plt.ylim([0.0, 1.05])
# plt.xlabel('False Positive Rate')
# plt.ylabel('True Positive Rate')
# plt.title('ROC Curve')
# plt.legend(loc="lower right")
# plt.show()

# joblib.dump(es, './model06_cut.pkl')
