from lightgbm.sklearn import LGBMRegressor
from sklearn.model_selection import cross_val_score
from sklearn.metrics import mean_absolute_error,  make_scorer
import pandas as pd
import numpy as np
import nni


def main(args):
    cat = {
        'model': 'category',
        'brand': 'category',
        'bodyType': 'category',
        'fuelType': 'category',
        'gearbox': 'category',
        'notRepairedDamage': 'category',
        'regionCode': 'category',
    }

    df = pd.read_csv('../user_data/df_s.csv', sep=' ', dtype=cat)
    #df['regionCode_count'] = pd.qcut(df.groupby(['regionCode'])['SaleID'].transform('count'), q=10,labels=range(10))
    #df['city'] = pd.Categorical(df['regionCode'].apply(lambda x: str(x)[:2]))

    train_X = df[df.train == 1].drop(['price', 'SaleID', 'regionCode'], axis=1)
    train_y = df[df.train == 1]['price']
    train_y_ln = np.log1p(train_y)

    def maee(y_true, y_pred):
        loss = mean_absolute_error(np.expm1(y_true), np.expm1(y_pred))
        nni.report_intermediate_result(loss)
        return loss
    model = LGBMRegressor(**args)
    mae = cross_val_score(model,
                          X=train_X,
                          y=train_y_ln,
                          verbose=0,
                          cv=5,
                          scoring=make_scorer(maee))
    nni.report_final_result(np.mean(mae))


if __name__ == '__main__':
    params = nni.get_next_parameter()
    params['n_estimators'] = 15000
    params['min_data_in_leaf'] = 20
    params['metric'] = 'mae'
    params['boosting_type'] = 'gbdt'
    params['lambda_l2'] = 2
    params['objective'] = 'regression_l1'
    main(params)
