import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.tree import DecisionTreeClassifier
data_off = pd.read_csv("./项目准备/O2O优惠券使用预测/offline_train.csv")
off_test = pd.read_csv("./项目准备/O2O优惠券使用预测/offline_test.csv")
off_test1 = off_test
print(off_test.head())

print(data_off.shape)
print(data_off.info())
print(data_off.describe())

# 消费日期的最大最小值
# 领券日期的最大最小值
print(data_off['Date'].max(),data_off['Date'].min())
print(data_off['Date_received'].max(),data_off['Date_received'].min())
 
# 缺失值
data_off.isnull().sum()

# 没有优惠券时coupon_id，字段discount_rate和date_received也同时没有
nan1 = data_off["Discount_rate"].isnull()
nan2 = data_off['Date_received'].isnull()
nan3 = data_off['Coupon_id'].isnull()
np.all(nan1==nan2),np.all(nan1==nan3)
 
# 删除重复值
data_off.drop_duplicates(inplace=True) 
data_off.info()

# 将日期float64类型转换为日期类型
data_off['Date'] = pd.to_datetime(data_off['Date'],format='%Y%m%d')
data_off['Date_received'] = pd.to_datetime(data_off['Date_received'],format='%Y%m%d')
off_test['Date_received'] = pd.to_datetime(off_test['Date_received'],format='%Y%m%d')
data_off.info()

# 从领券到消费的天数
date_interval = data_off['Date']-data_off['Date_received']
data_off['date_interval'] = [d.days for d in date_interval]
#领券日期是周几
data_off['receive_week']=[d.weekday()+1 for d in data_off['Date_received']]
off_test['receive_week']=[d.weekday()+1 for d in off_test['Date_received']]
 
#优惠券领取时间是否是周末
data_off['receive_isWeekend']=data_off['receive_week'].apply(lambda x:1 if x>5 else 0)
off_test['receive_isWeekend']=off_test['receive_week'].apply(lambda x:1 if x>5 else 0)

# 折扣率
def deal_rate(x):
    if pd.isna(x):
        y =float(x)
    elif ":" in x:
        a = float(x.split(":")[0])# 分母
        b = a-float(x.split(":")[1])# 分子
        y = np.round(b/a,2)
    else:
        y = float(x)
    return y
data_off['Discount_rate_%'] = data_off['Discount_rate'].map(deal_rate)
off_test['Discount_rate_%'] = off_test['Discount_rate'].map(deal_rate)
 
# 门槛
def deal_mk(x):
    if pd.isna(x):# nan
        y =float(x)
    elif ":" in x:# 满减券
        y = int(x.split(":")[0])# 分母
    else:# 打折券
        y = np.nan
    return y
data_off['Discount_rate_mk'] = data_off['Discount_rate'].apply(deal_mk,1)
off_test['Discount_rate_mk'] = off_test['Discount_rate'].apply(deal_mk,1)
data_off.head()

data_off['Y'] = data_off['date_interval'].apply(lambda x:1 if x<=15 else 0)
data_off.head()

feature1=data_off[((data_off['Date_received']>='2016-01-01')&(data_off['Date_received']<='2016-04-30')) | ((data_off['Date']>='2016-01-01')&(data_off['Date']<='2016-04-30'))]
feature1.reset_index(drop=True,inplace=True)
database1=data_off[((data_off['Date_received']>='2016-05-01')&(data_off['Date_received']<='2016-05-31')) | ((data_off['Date']>='2016-05-01')&(data_off['Date']<='2016-05-31'))]
database1.reset_index(drop=True,inplace=True)
print(' 1-4月数据总计%i行'%len(feature1))
print(' 5月数据总计%i行'%len(database1))
feature2=data_off[((data_off['Date_received']>='2016-02-01')&(data_off['Date_received']<='2016-05-31')) | ((data_off['Date']>='2016-02-01')&(data_off['Date']<='2016-05-31'))]
feature2.reset_index(drop=True,inplace=True)
database2=data_off[((data_off['Date_received']>='2016-06-01')&(data_off['Date_received']<='2016-06-30')) | ((data_off['Date']>='2016-06-01')&(data_off['Date']<='2016-06-30'))]
database2.reset_index(drop=True,inplace=True)
print(' 2-5月数据总计%i行'%len(feature2))
print(' 6月数据总计%i行'%len(database2))
feature3=data_off[((data_off['Date_received']>='2016-03-01')&(data_off['Date_received']<='2016-06-30')) | ((data_off['Date']>='2016-03-01')&(data_off['Date']<='2016-06-30'))]
feature3.reset_index(drop=True,inplace=True)
database3=off_test
print(' 3-5月数据总计%i行'%len(feature3))
print(' 7月数据总计%i行'%len(database3))

def user_feature(feature):
    all_users = feature['User_id']
    users = all_users.drop_duplicates()
    # 1.用户消费次数(不对商家去重)
    users_goods = feature[pd.notna(feature.Date)][['User_id','Merchant_id']]
    users_goods['Merchant_id']=1
    users_goods_nums = users_goods.groupby(by = 'User_id').sum('Merchant_id')
    users_goods_nums.columns=['buy_num']
    users = pd.merge(users,users_goods_nums,on='User_id',how = 'left')
    # 2.每个用户的领券次数
    Coupon = feature[pd.notna(feature['Coupon_id'])][['User_id','Coupon_id']]
    Coupon['Coupon_id'] = 1
    Coupon_num = Coupon.groupby(by='User_id').sum('Coupon_id')
    Coupon_num.columns = ['Coupon_get_num']
    users = pd.merge(users,Coupon_num,on='User_id',how='left')
    users['Coupon_get_num']=users['Coupon_get_num'].replace(np.nan,0)
    # 3.用户领券消费次数
    Used_Coupon = feature[(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))][['User_id','Coupon_id']]
    Used_Coupon['Coupon_id'] = 1
    Used_Coupon_num = Used_Coupon.groupby(by='User_id').sum('Coupon_id')
    Used_Coupon_num.columns = ['Coupon_use_num']
    users = pd.merge(users,Used_Coupon_num,on='User_id',how='left')
    users['Coupon_use_num']=users['Coupon_use_num'].replace(np.nan,0)
    # 4.用户用券购买概率
    users['yqgmgl'] = users['Coupon_use_num']/users['buy_num']
    # 5.用户核销率
    users['Coupon_use_rate'] = users['Coupon_use_num']/users['Coupon_get_num']
    # 6.每个用户15天内核销优惠券的张数
    Used_Coupon = feature[(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))&(feature['date_interval']<=15)][['User_id','Coupon_id']]
    Used_Coupon['Coupon_id'] = 1
    Used_Coupon_num15 = Used_Coupon.groupby(by='User_id').sum('Coupon_id')
    Used_Coupon_num15.columns = ['Coupon_use_num15']
    users = pd.merge(users,Used_Coupon_num15,on='User_id',how='left')
    users['Coupon_use_num15']=users['Coupon_use_num15'].replace(np.nan,0)
    # 7.每个用户15天内优惠券核销率
    users['Coupon_use_rate15'] = users['Coupon_use_num15']/users['Coupon_get_num']
    # 8.用户消费过的不同商家数量（对商家去重）
    users_goods = feature[pd.notna(feature.Date)][['User_id','Merchant_id']]
    users_goods = users_goods.drop_duplicates()
    users_goods['Merchant_id']=1
    users_goods_nums = users_goods.groupby(by = 'User_id').sum('Merchant_id')
    users_goods_nums.columns=['buy_merchant_num']
    users = pd.merge(users,users_goods_nums,on='User_id',how = 'left')
    # 9.优惠券使用间隔天数（最小天数，平均天数）
    get_user_date = feature[(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))][['User_id','date_interval']]
    min_interval = get_user_date.groupby('User_id').min('date_interval')
    min_interval.columns = ['user_min_interval']
    mean_interval = get_user_date.groupby('User_id').mean('date_interval')
    mean_interval.columns = ['user_mean_interval']
    users = pd.merge(users,min_interval,on='User_id',how='left')
    users = pd.merge(users,mean_interval,on='User_id',how='left')
    # 10.用户-商家领券消费距离（最大/最小/平均距离）
    distance = feature[(pd.notna(feature.Distance))&(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))][['User_id','Distance']]
    user_distance_max = distance.groupby(by='User_id').max('Distance')
    user_distance_max.columns = ['user_distance_max']
    user_distance_min = distance.groupby(by='User_id').min('Distance')
    user_distance_min.columns = ['user_distance_min']
    user_distance_mean = distance.groupby(by='User_id').mean('Distance')
    user_distance_mean.columns = ['user_distance_mean']
    users = pd.merge(users,user_distance_max,on='User_id',how='left')
    users = pd.merge(users,user_distance_mean,on='User_id',how='left')
    users = pd.merge(users,user_distance_min,on='User_id',how='left')
    
    # 11.用户核销优惠券的平均门槛
    mk = feature[pd.notna(feature['Discount_rate_mk'])][['User_id','Discount_rate_mk']]
    user_Discount_mk_mean =mk.groupby(by='User_id').mean('Discount_rate_mk')
    user_Discount_mk_mean.columns = ['user_Discount_mk_mean']
    users = pd.merge(users,user_Discount_mk_mean,on='User_id',how='left')
    
    user_Discount_mk_min =mk.groupby(by='User_id').mean('Discount_rate_mk')
    user_Discount_mk_min.columns = ['user_Discount_mk_min']
    users = pd.merge(users,user_Discount_mk_min,on='User_id',how='left')
    
    user_Discount_mk_max =mk.groupby(by='User_id').mean('Discount_rate_mk')
    user_Discount_mk_max.columns = ['user_Discount_mk_max']
    users = pd.merge(users,user_Discount_mk_max,on='User_id',how='left')
    
    users.buy_num =users.buy_num.replace(np.nan,0)
    users.buy_merchant_num =users.buy_merchant_num.replace(np.nan,0)
    
    return users

def Merchant_feature(feature):
    all_Merchants = feature['Merchant_id']
    Merchants = all_Merchants.drop_duplicates()
    # 1.商户合计被消费次数
    Merchant_sale = feature[pd.notna(feature['Date'])][['Merchant_id']] 
    Merchant_sale['Merchant_sale_num'] = 1
    Merchant_sale_num = Merchant_sale.groupby(by='Merchant_id').sum('Merchant_sale_num')
    Merchants = pd.merge(Merchants,Merchant_sale_num,on='Merchant_id',how='left')
    Merchants['Merchant_sale_num']=Merchants['Merchant_sale_num'].replace(np.nan,0)
    # 2.商户被领券次数
    Merchant_coupons = feature[pd.notna(feature['Date_received'])][['Merchant_id']]
    Merchant_coupons['Merchant_coupons_num'] = 1
    Merchant_coupons_num = Merchant_coupons.groupby(by='Merchant_id').sum('Merchant_coupons_num')
    Merchants = pd.merge(Merchants,Merchant_coupons_num,on='Merchant_id',how='left')
    Merchants['Merchant_coupons_num']=Merchants['Merchant_coupons_num'].replace(np.nan,0)
    # 3.商户被领券消费次数
    Merchant_coupons_buy = feature[(pd.notna(feature['Date_received']))&(pd.notna(feature['Date']))][['Merchant_id']]
    Merchant_coupons_buy['Merchant_coupons_buy_num'] = 1
    Merchant_coupons_buy_num = Merchant_coupons_buy.groupby(by='Merchant_id').sum('Merchant_coupons_buy_num')
    Merchants = pd.merge(Merchants,Merchant_coupons_buy_num,on='Merchant_id',how='left')
    Merchants['Merchant_coupons_buy_num']=Merchants['Merchant_coupons_buy_num'].replace(np.nan,0)
    # 4.商户用券率
    Merchants['Merchant_user_rate'] = Merchants['Merchant_coupons_buy_num']/Merchants['Merchant_sale_num']
    # 5.商户核销率
    Merchants['Merchant_rate'] = Merchants['Merchant_coupons_buy_num']/Merchants['Merchant_coupons_num']
    # 6. 消费者15天内核销总数、核销率
    Merchant_Coupon = feature[(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))&(feature['date_interval']<=15)][['Merchant_id','Coupon_id']]
    Merchant_Coupon['Coupon_id'] = 1
    Merchant_Coupon_num15 = Merchant_Coupon.groupby(by='Merchant_id').sum('Coupon_id')
    Merchant_Coupon_num15.columns = ['Merchant_Coupon_use_num15']
    Merchants = pd.merge(Merchants,Merchant_Coupon_num15,on='Merchant_id',how='left')
    Merchants['Merchant_Coupon_use_num15']=Merchants['Merchant_Coupon_use_num15'].replace(np.nan,0)
    Merchants['Merchant_Coupon_use_rate15'] = Merchants['Merchant_Coupon_use_num15']/Merchants['Merchant_coupons_num']
    # 7. 商户-消费者距离（max/mean已核销）
    Merchant_distance = feature[(pd.notna(feature['Distance']))&(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))][['Merchant_id','Distance']]
    Merchant_distance_max = Merchant_distance.groupby(by='Merchant_id').max('Distance')
    Merchant_distance_max.columns = ['Merchant_distance_max']
    Merchant_distance_mean = Merchant_distance.groupby(by='Merchant_id').mean('Distance')
    Merchant_distance_mean.columns = ['Merchant_distance_mean']
    Merchants = pd.merge(Merchants,Merchant_distance_mean,on='Merchant_id',how='left')
    Merchants = pd.merge(Merchants,Merchant_distance_max,on='Merchant_id',how='left')
    # 8. 商家已使用的优惠券门槛(平均、最小)
    Merchant_mk = feature[(pd.notna(feature['Discount_rate_mk']))&(pd.notna(feature['Date']))][['Discount_rate_mk','Merchant_id']]
    Merchant_mk_min = Merchant_mk.groupby(by='Merchant_id').min('Discount_rate_mk')
    Merchant_mk_min.columns = ['Merchant_mk_min']
    Merchant_mk_mean = Merchant_mk.groupby(by='Merchant_id').mean('Discount_rate_mk')
    Merchant_mk_mean.columns = ['Merchant_mk_mean']
    Merchant_mk_max = Merchant_mk.groupby(by='Merchant_id').mean('Discount_rate_mk')
    Merchant_mk_max.columns = ['Merchant_mk_max']
    Merchants = pd.merge(Merchants,Merchant_mk_min,on='Merchant_id',how='left')
    Merchants = pd.merge(Merchants,Merchant_mk_max,on='Merchant_id',how='left')
    Merchants = pd.merge(Merchants,Merchant_mk_mean,on='Merchant_id',how='left')
    # 9. 商家优惠券被使用的平均时间
    Merchant_interval = feature[(pd.notna(feature['Date_received']))&(pd.notna(feature['Date']))][['Merchant_id','date_interval']]
    min_interval = Merchant_interval.groupby('Merchant_id').min('date_interval')
    min_interval.columns = ['Merchant_min_interval']
    mean_interval = Merchant_interval.groupby('Merchant_id').mean('date_interval')
    mean_interval.columns = ['Merchant_mean_interval']
    Merchants = pd.merge(Merchants,min_interval,on='Merchant_id',how='left')
    Merchants = pd.merge(Merchants,mean_interval,on='Merchant_id',how='left')
    return Merchants

def couponsType_feature(feature):
    all_coupons = feature[pd.notna(feature['Discount_rate'])]['Discount_rate']
    Coupons = all_coupons.drop_duplicates()
    # 1.各类优惠券type被领取次数
    Coupons_Type_get = feature[(pd.notna(feature['Date_received']))&(pd.notna(feature['Discount_rate']))][['Discount_rate']]
    Coupons_Type_get['Coupons_Type_get_num'] = 1
    Coupons_Type_get_num = Coupons_Type_get.groupby(by='Discount_rate').sum('Coupons_Type_get_num')
    Coupons = pd.merge(Coupons,Coupons_Type_get_num,on='Discount_rate',how='left')
    Coupons['Coupons_Type_get_num']=Coupons['Coupons_Type_get_num'].replace(np.nan,0)
    # 2.各类优惠券type被使用次数
    Coupons_Type_use = feature[(pd.notna(feature['Date_received']))&(pd.notna(feature['Date']))][['Discount_rate']]
    Coupons_Type_use['Coupons_Type_use_num'] = 1
    Coupons_Type_use_num = Coupons_Type_use.groupby(by='Discount_rate').sum('Coupons_Type_use_num')
    Coupons = pd.merge(Coupons,Coupons_Type_use_num,on='Discount_rate',how='left')
    Coupons['Coupons_Type_use_num']=Coupons['Coupons_Type_use_num'].replace(np.nan,0)
    # 3.各类优惠券type核销率
    Coupons['Coupons_Type_rate']=Coupons['Coupons_Type_use_num']/Coupons['Coupons_Type_get_num']
    # 4.各类优惠券type15天内核销数量
    Coupon15 = feature[(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))&(feature['date_interval']<=15)][['Discount_rate']]
    Coupon15['Coupon15_use_num'] = 1
    Coupon15_num15 = Coupon15.groupby(by='Discount_rate').sum('Coupon15_use_num')
    Coupons = pd.merge(Coupons,Coupon15_num15,on='Discount_rate',how='left')
    # 5.各类优惠券type15天内核销率
    Coupons['Coupons15_Type_rate']=Coupons['Coupon15_use_num']/Coupons['Coupons_Type_get_num']
    # 6.各类优惠券type被使用的距离(max/mean)
    Coupon_distance = feature[(pd.notna(feature['Distance']))&(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))][['Discount_rate','Distance']]
    Coupon_distance_max = Coupon_distance.groupby(by='Discount_rate').max('Distance')
    Coupon_distance_max.columns = ['Coupons_Type_distance_max']
    Coupon_distance_mean = Coupon_distance.groupby(by='Discount_rate').mean('Distance')
    Coupon_distance_mean.columns = ['Coupons_Type_distance_mean']
    Coupons = pd.merge(Coupons,Coupon_distance_mean,on='Discount_rate',how='left')
    Coupons = pd.merge(Coupons,Coupon_distance_max,on='Discount_rate',how='left')
    # 7.各类优惠券type被使用的时间间隔（mean/min）
    Coupon_interval = feature[(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))][['Discount_rate','date_interval']]
    Coupon_interval_min = Coupon_interval.groupby(by='Discount_rate').min('date_interval')
    Coupon_interval_min.columns = ['Coupons_Type_interval_min']
    Coupon_interval_mean = Coupon_interval.groupby(by='Discount_rate').mean('date_interval')
    Coupon_interval_mean.columns = ['Coupons_Type_interval_mean']
    Coupons = pd.merge(Coupons,Coupon_interval_mean,on='Discount_rate',how='left')
    Coupons = pd.merge(Coupons,Coupon_interval_min,on='Discount_rate',how='left')
    return Coupons

def User_CouponsType_feature(feature):
    User_Coupons = feature[['User_id','Discount_rate']]
    User_Coupons = User_Coupons.drop_duplicates()
    # 1. 用户领取特定优惠券次数
    User_CouponType_get = feature[pd.notna(feature['Date_received'])][['User_id','Discount_rate']]
    User_CouponType_get['User_CouponType_get_num'] = 1
    User_CouponType_get = User_CouponType_get.groupby(['User_id','Discount_rate']).sum('User_CouponType_get_num')
    User_Coupons = pd.merge(User_Coupons,User_CouponType_get,on=['User_id','Discount_rate'],how='left')
    User_Coupons['User_CouponType_get_num']=User_Coupons['User_CouponType_get_num'].replace(np.nan,0)
    # 2. 用户使用特定优惠券次数
    User_CouponType_use = feature[(pd.notna(feature['Date_received']))&(pd.notna(feature['Date']))][['User_id','Discount_rate']]
    User_CouponType_use['User_CouponType_use_num'] = 1
    User_CouponType_use = User_CouponType_use.groupby(['User_id','Discount_rate']).sum('User_CouponType_use_num')
    User_Coupons = pd.merge(User_Coupons,User_CouponType_use,on=['User_id','Discount_rate'],how='left')
    User_Coupons['User_CouponType_use_num']=User_Coupons['User_CouponType_use_num'].replace(np.nan,0)
    # 3. 用户特定优惠券核销率
    User_Coupons['User_Coupons_rate'] = User_Coupons['User_CouponType_use_num']/User_Coupons['User_CouponType_get_num']
    # 4. 15天核销次数
    User_Coupon15 = feature[(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))&(feature['date_interval']<=15)][['User_id','Discount_rate']]
    User_Coupon15['User_Coupon15_use_num'] = 1
    User_Coupon15_num15 = User_Coupon15.groupby(['User_id','Discount_rate']).sum('User_Coupon15_use_num')
    User_Coupons = pd.merge(User_Coupons,User_Coupon15_num15,on=['User_id','Discount_rate'],how='left')
    User_Coupons['User_Coupon15_use_num']=User_Coupons['User_Coupon15_use_num'].replace(np.nan,0)
    # 5. 15天核销率
    User_Coupons['User_Coupons_rate15'] = User_Coupons['User_Coupon15_use_num']/User_Coupons['User_CouponType_get_num']
    # 6. 时间间隔
    User_Coupon_interval = feature[(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))][['User_id','Discount_rate','date_interval']]
    User_Coupon_interval_min = User_Coupon_interval.groupby(['Discount_rate','User_id']).min('date_interval')
    User_Coupon_interval_min.columns = ['User_Coupons_Type_interval_min']
    User_Coupon_interval_mean = User_Coupon_interval.groupby(['Discount_rate','User_id']).mean('date_interval')
    User_Coupon_interval_mean.columns = ['User_Coupons_Type_interval_mean']
    User_Coupons = pd.merge(User_Coupons,User_Coupon_interval_mean,on=['Discount_rate','User_id'],how='left')
    User_Coupons = pd.merge(User_Coupons,User_Coupon_interval_min,on=['Discount_rate','User_id'],how='left')
    return User_Coupons

def User_Merchants_feature(feature):
    User_Merchants = feature[['User_id','Merchant_id']]
    User_Merchants = User_Merchants.drop_duplicates()
    # 1.用户在特定商家消费次数
    User_Merchant_buy = feature[pd.notna(feature['Date'])][['User_id','Merchant_id']]
    User_Merchant_buy['User_Merchant_buy_num'] = 1
    User_Merchant_buy = User_Merchant_buy.groupby(['User_id','Merchant_id']).sum('User_Merchant_buy_num')
    User_Merchants = pd.merge(User_Merchants,User_Merchant_buy,on=['User_id','Merchant_id'],how='left')
    User_Merchants['User_Merchant_buy_num']=User_Merchants['User_Merchant_buy_num'].replace(np.nan,0)
    # 2. 用户在特定商家领取优惠券次数
    User_Merchant_get = feature[pd.notna(feature['Date_received'])][['User_id','Merchant_id']]
    User_Merchant_get['User_Merchant_get_num'] = 1
    User_Merchant_get = User_Merchant_get.groupby(['User_id','Merchant_id']).sum('User_Merchant_get_num')
    User_Merchants = pd.merge(User_Merchants,User_Merchant_get,on=['User_id','Merchant_id'],how='left')
    User_Merchants['User_Merchant_get_num']=User_Merchants['User_Merchant_get_num'].replace(np.nan,0)
    # 3. 用户在特定商家使用优惠券次数
    User_Merchant_use = feature[(pd.notna(feature['Date_received']))&(pd.notna(feature['Date']))][['User_id','Merchant_id']]
    User_Merchant_use['User_Merchant_use_num'] = 1
    User_Merchant_use = User_Merchant_use.groupby(['User_id','Merchant_id']).sum('User_Merchant_use_num')
    User_Merchants = pd.merge(User_Merchants,User_Merchant_use,on=['User_id','Merchant_id'],how='left')
    User_Merchants['User_Merchant_use_num']=User_Merchants['User_Merchant_use_num'].replace(np.nan,0)
    # 4. 用户在特定商家优惠券核销率
    User_Merchants['User_Merchants_rate'] = User_Merchants['User_Merchant_use_num']/User_Merchants['User_Merchant_get_num']
    # 5. 用券率
    User_Merchants['User_Merchants_user_rate'] = User_Merchants['User_Merchant_use_num']/User_Merchants['User_Merchant_buy_num']
    # 6. 15天核销次数
    User_Merchant15 = feature[(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))&(feature['date_interval']<=15)][['User_id','Merchant_id']]
    User_Merchant15['User_Merchant15_use_num'] = 1
    User_Merchant15_num15 = User_Merchant15.groupby(['User_id','Merchant_id']).sum('User_Merchant15_use_num')
    User_Merchants = pd.merge(User_Merchants,User_Merchant15_num15,on=['User_id','Merchant_id'],how='left')
    User_Merchants['User_Merchant15_use_num']=User_Merchants['User_Merchant15_use_num'].replace(np.nan,0)
    # 7. 15天核销率
    User_Merchants['User_Merchant_rate15'] = User_Merchants['User_Merchant15_use_num']/User_Merchants['User_Merchant_get_num']
    # 8. 时间间隔
    User_Merchant_interval = feature[(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))][['User_id','Merchant_id','date_interval']]
    User_Merchant_interval_min = User_Merchant_interval.groupby(['User_id','Merchant_id']).min('date_interval')
    User_Merchant_interval_min.columns = ['User_Merchants_Type_interval_min']
    User_Merchant_interval_mean = User_Merchant_interval.groupby(['User_id','Merchant_id']).mean('date_interval')
    User_Merchant_interval_mean.columns = ['User_Merchants_Type_interval_mean']
    User_Merchants = pd.merge(User_Merchants,User_Merchant_interval_mean,on=['User_id','Merchant_id'],how='left')
    User_Merchants = pd.merge(User_Merchants,User_Merchant_interval_min,on=['User_id','Merchant_id'],how='left')
    return User_Merchants

def Merchants_CouponsType_feature(feature):
    Merchants_Coupons = feature[['Merchant_id','Discount_rate']]
    Merchants_Coupons = Merchants_Coupons.drop_duplicates()
    # 1. 商户领取特定优惠券次数
    Merchants_CouponType_get = feature[pd.notna(feature['Date_received'])][['Merchant_id','Discount_rate']]
    Merchants_CouponType_get['Merchants_CouponType_get_num'] = 1
    Merchants_CouponType_get = Merchants_CouponType_get.groupby(['Merchant_id','Discount_rate']).sum('Merchants_CouponType_get_num')
    Merchants_Coupons = pd.merge(Merchants_Coupons,Merchants_CouponType_get,on=['Merchant_id','Discount_rate'],how='left')
    Merchants_Coupons['Merchants_CouponType_get_num']=Merchants_Coupons['Merchants_CouponType_get_num'].replace(np.nan,0)
    # 2. 商户使用特定优惠券次数
    Merchants_CouponType_use = feature[(pd.notna(feature['Date_received']))&(pd.notna(feature['Date']))][['Merchant_id','Discount_rate']]
    Merchants_CouponType_use['Merchants_CouponType_use_num'] = 1
    Merchants_CouponType_use = Merchants_CouponType_use.groupby(['Merchant_id','Discount_rate']).sum('Merchants_CouponType_use_num')
    Merchants_Coupons = pd.merge(Merchants_Coupons,Merchants_CouponType_use,on=['Merchant_id','Discount_rate'],how='left')
    Merchants_Coupons['Merchants_CouponType_use_num']=Merchants_Coupons['Merchants_CouponType_use_num'].replace(np.nan,0)
    # 3. 商户特定优惠券核销率
    Merchants_Coupons['Merchants_Coupons_rate'] = Merchants_Coupons['Merchants_CouponType_use_num']/Merchants_Coupons['Merchants_CouponType_get_num']
    
    # 4. 15天核销次数
    Merchants_CouponsType15 = feature[(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))&(feature['date_interval']<=15)][['Discount_rate','Merchant_id']]
    Merchants_CouponsType15['Merchants_CouponsType15_use_num'] = 1
    Merchants_CouponsType15_num15 = Merchants_CouponsType15.groupby(['Discount_rate','Merchant_id']).sum('Merchants_CouponsType15_use_num')
    Merchants_Coupons = pd.merge(Merchants_Coupons,Merchants_CouponsType15_num15,on=['Discount_rate','Merchant_id'],how='left')
    Merchants_Coupons['Merchants_CouponsType15_use_num']=Merchants_Coupons['Merchants_CouponsType15_use_num'].replace(np.nan,0)
    # 5. 15天核销率
    Merchants_Coupons['Merchants_CouponsType_rate15'] = Merchants_Coupons['Merchants_CouponsType15_use_num']/Merchants_Coupons['Merchants_CouponType_get_num']
    # 6. 时间间隔
    Merchants_Coupon_interval = feature[(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))][['Discount_rate','Merchant_id','date_interval']]
    Merchants_Coupon_interval_min = Merchants_Coupon_interval.groupby(['Discount_rate','Merchant_id']).min('date_interval')
    Merchants_Coupon_interval_min.columns = ['Merchants_Coupons_Type_interval_min']
    Merchants_Coupon_interval_mean = Merchants_Coupon_interval.groupby(['Discount_rate','Merchant_id']).mean('date_interval')
    Merchants_Coupon_interval_mean.columns = ['Merchants_Coupons_Type_interval_mean']
    Merchants_Coupons = pd.merge(Merchants_Coupons,Merchants_Coupon_interval_mean,on=['Discount_rate','Merchant_id'],how='left')
    Merchants_Coupons = pd.merge(Merchants_Coupons,Merchants_Coupon_interval_min,on=['Discount_rate','Merchant_id'],how='left')
    return Merchants_Coupons

def M_C_UType_feature(feature):
    M_C_U = feature[['Merchant_id','Discount_rate','User_id']]
    M_C_U = M_C_U.drop_duplicates()
    # 1. 用户-商户-优惠券-领取次数
    M_C_U_get = feature[pd.notna(feature['Date_received'])][['Merchant_id','Discount_rate','User_id']]
    M_C_U_get['M_C_U_get_num'] = 1
    M_C_U_get = M_C_U_get.groupby(['Merchant_id','Discount_rate','User_id']).sum('M_C_U_get_num')
    M_C_U = pd.merge(M_C_U,M_C_U_get,on=['Merchant_id','Discount_rate','User_id'],how='left')
    M_C_U['M_C_U_get_num']=M_C_U['M_C_U_get_num'].replace(np.nan,0)
    # 2. 用户-商户-优惠券-使用次数
    M_C_U_use = feature[(pd.notna(feature['Date_received']))&(pd.notna(feature['Date']))][['Merchant_id','Discount_rate','User_id']]
    M_C_U_use['M_C_U_use_num'] = 1
    M_C_U_use = M_C_U_use.groupby(['Merchant_id','Discount_rate','User_id']).sum('M_C_U_use_num')
    M_C_U = pd.merge(M_C_U,M_C_U_use,on=['Merchant_id','Discount_rate','User_id'],how='left')
    M_C_U['M_C_U_use_num']=M_C_U['M_C_U_use_num'].replace(np.nan,0)
    # 3. 商户特定优惠券核销率
    M_C_U['M_C_U_rate'] = M_C_U['M_C_U_use_num']/M_C_U['M_C_U_get_num']
    
    # 4. 15天核销次数
    M_C_U15 = feature[(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))&(feature['date_interval']<=15)][['Merchant_id','Discount_rate','User_id']]
    M_C_U15['M_C_U15_use_num'] = 1
    M_C_U15_num15 = M_C_U15.groupby(['Merchant_id','Discount_rate','User_id']).sum('M_C_U15_use_num')
    M_C_U = pd.merge(M_C_U,M_C_U15_num15,on=['Merchant_id','Discount_rate','User_id'],how='left')
    M_C_U['M_C_U15_use_num']=M_C_U['M_C_U15_use_num'].replace(np.nan,0)
    # 5. 15天核销率
    M_C_U['M_C_UType_rate15'] = M_C_U['M_C_U15_use_num']/M_C_U['M_C_U_get_num']
    
    # 6. 时间间隔
    M_C_U_interval = feature[(pd.notna(feature['Date']))&(pd.notna(feature['Date_received']))][['Merchant_id','Discount_rate','User_id','date_interval']]
    M_C_U_interval_min = M_C_U_interval.groupby(['Merchant_id','Discount_rate','User_id']).min('date_interval')
    M_C_U_interval_min.columns = ['M_C_Us_Type_interval_min']
    M_C_U_interval_mean = M_C_U_interval.groupby(['Merchant_id','Discount_rate','User_id']).mean('date_interval')
    M_C_U_interval_mean.columns = ['M_C_Us_Type_interval_mean']
    M_C_Us = pd.merge(M_C_U,M_C_U_interval_mean,on=['Merchant_id','Discount_rate','User_id'],how='left')
    M_C_Us = pd.merge(M_C_U,M_C_U_interval_min,on=['Merchant_id','Discount_rate','User_id'],how='left')
    return M_C_U

def leakage(database3):
    # 1.每个用户的领券次数
    Coupon = database3[pd.notna(database3['Coupon_id'])][['User_id','Coupon_id']]
    Coupon['Coupon_id'] = 1
    Coupon_num = Coupon.groupby(by='User_id').sum('Coupon_id')
    Coupon_num.columns = ['Coupon_get_num']
    database3 = pd.merge(database3,Coupon_num,on=['User_id'],how='left')
    # 2.用户本月领取的某种优惠券的数量
    User_CouponType_get = database3[pd.notna(database3['Date_received'])][['User_id','Discount_rate']]
    User_CouponType_get['leakage_User_CouponType_get_num'] = 1
    User_CouponType_get = User_CouponType_get.groupby(['User_id','Discount_rate']).sum('leakage_User_CouponType_get_num')
    database3 = pd.merge(database3,User_CouponType_get,on=['User_id','Discount_rate'],how='left')
 
    # 3.用户在特定商家领取优惠券次数
    User_Merchant_get = database3[pd.notna(database3['Date_received'])][['User_id','Merchant_id']]
    User_Merchant_get['leakage_User_Merchant_get_num'] = 1
    User_Merchant_get = User_Merchant_get.groupby(['User_id','Merchant_id']).sum('leakage_User_Merchant_get_num')
    database3 = pd.merge(database3,User_Merchant_get,on=['User_id','Merchant_id'],how='left')
    # 4.每个用户当天的领券次数
    Coupon_day = database3[pd.notna(database3['Coupon_id'])][['User_id','Date_received']]
    Coupon_day['leakage_Coupon_dayget_num'] = 1
    Coupon_num_day = Coupon_day.groupby(['User_id','Date_received']).sum('leakage_Coupon_dayget_num')
    database3 = pd.merge(database3,Coupon_num_day,on=['User_id','Date_received'],how='left')
    # 5.每个用户当天某种优惠券的领券次数
    Coupon_s_day = database3[pd.notna(database3['Coupon_id'])][['User_id','Date_received','Discount_rate']]
    Coupon_s_day['speleakage_Coupon_dayget_num'] = 1
    Coupon_num_s_day = Coupon_s_day.groupby(['User_id','Date_received','Discount_rate']).sum('speleakage_Coupon_dayget_num')
    database3 = pd.merge(database3,Coupon_num_s_day,on=['User_id','Date_received','Discount_rate'],how='left')
    lekge_user_SpeCouSum_maxday=database3[database3['leakage_User_CouponType_get_num']>1].groupby(['User_id','Discount_rate'])['Date_received'].max().reset_index().rename(columns={'Date_received':'lekge_user_SpeCouSum_maxday'})
    lekge_user_SpeCouSum_minday=database3[database3['leakage_User_CouponType_get_num']>1].groupby(['User_id','Discount_rate'])['Date_received'].min().reset_index().rename(columns={'Date_received':'lekge_user_SpeCouSum_minday'})
    database3=pd.merge(database3,lekge_user_SpeCouSum_maxday,how='left',on=['User_id','Discount_rate'])
    database3=pd.merge(database3,lekge_user_SpeCouSum_minday,how='left',on=['User_id','Discount_rate'])
    database3['lekge_user_SpeCou_ifirst']=(database3['Date_received']-database3['lekge_user_SpeCouSum_minday']).apply(lambda x:1 if x.days==0 else 0 if x.days>0 else -1)
    database3['lekge_user_SpeCou_iflast']=(database3['lekge_user_SpeCouSum_maxday']-database3['Date_received']).apply(lambda x:1 if x.days==0 else 0 if x.days>0 else -1)
    return database3

def feature_all(feature3,y):
    # 用户
    users = user_feature(feature3)
    # 商户
    Merchants = Merchant_feature(feature3)
    # 优惠券
    Coupons_type = couponsType_feature(feature3)
    # 用户-商户
    User_Merchants = User_Merchants_feature(feature3)
    # 用户-优惠券
    User_CouponsType = User_CouponsType_feature(feature3)
    # 商户-优惠券
    Merchants_CouponsType = Merchants_CouponsType_feature(feature3)
    y = leakage(y)
    feature_final = pd.merge(y,users,on='User_id',how='left')
    feature_final = pd.merge(feature_final,Merchants,on='Merchant_id',how='left')
    feature_final = pd.merge(feature_final,Coupons_type,on='Discount_rate',how='left')
    feature_final = pd.merge(feature_final,User_Merchants,on=['User_id','Merchant_id'],how='left')
    feature_final = pd.merge(feature_final,User_CouponsType,on=['User_id','Discount_rate'],how='left')
    feature_final = pd.merge(feature_final,Merchants_CouponsType,on=['Merchant_id','Discount_rate'],how='left')
    feature_final = feature_final[feature_final['Discount_rate']==feature_final['Discount_rate']]
    feature_final['user_distance_max_interval'] = feature_final['Distance']-feature_final['user_distance_max']
    feature_final['user_distance_mean_interval'] = feature_final['Distance']-feature_final['user_distance_mean']
    feature_final['Merchant_distance_max_interval'] = feature_final['Distance']-feature_final['Merchant_distance_max']
    feature_final['Merchant_distance_mean_interval'] = feature_final['Distance']-feature_final['Merchant_distance_mean']
    feature_final['Coupons_Type_distance_mean_interval'] = feature_final['Distance']-feature_final['Coupons_Type_distance_mean']
    feature_final['Coupons_Type_distance_max_interval'] = feature_final['Distance']-feature_final['Coupons_Type_distance_max']
    feature_final['user_Discount_mk_mean_interval'] = feature_final['Discount_rate_mk']-feature_final['user_Discount_mk_mean']
    feature_final['user_Discount_mk_min_interval'] = feature_final['Discount_rate_mk']-feature_final['user_Discount_mk_min']
    feature_final['user_Discount_mk_max_interval'] = feature_final['Discount_rate_mk']-feature_final['user_Discount_mk_max']
    
#     feature_final = feature_final.replace(np.nan,-99999)
    return feature_final



#可视化

plt.figure(figsize=(10, 6))
plt.hist(data_off['date_interval'].dropna(), bins=30, edgecolor='k')
plt.title('Distribution of Days from Coupon Received to Used')
plt.xlabel('Days')
plt.ylabel('Frequency')
plt.show()

plt.figure(figsize=(10, 6))
data_off['receive_week'].value_counts().sort_index().plot(kind='bar')
plt.title('Distribution of Coupon Received Day of the Week')
plt.xlabel('Day of the Week')
plt.ylabel('Frequency')
plt.show()

plt.figure(figsize=(8, 8))
data_off['receive_isWeekend'].value_counts().plot(kind='pie', autopct='%1.1f%%', startangle=90, labels=['Weekday', 'Weekend'])
plt.title('Coupon Received on Weekday vs Weekend')
plt.ylabel('')
plt.show()

plt.figure(figsize=(10, 6))
plt.hist(data_off['Discount_rate_%'].dropna(), bins=20, edgecolor='k')
plt.title('Discount Rate Distribution')
plt.xlabel('Discount Rate')
plt.ylabel('Frequency')
plt.show()

plt.figure(figsize=(10, 6))
plt.hist(data_off['Discount_rate_mk'].dropna(), bins=20, edgecolor='k')
plt.title('Discount Threshold Distribution')
plt.xlabel('Discount Threshold')
plt.ylabel('Frequency')
plt.show()

time_periods = ['January-April Data', 'May Data', 'February-May Data', 'June Data', 'March-May Data', 'July Data']
data_counts = [len(feature1), len(database1), len(feature2), len(database2), len(feature3), len(database3)]

plt.figure(figsize=(12, 6))
plt.bar(time_periods, data_counts, color='skyblue')
plt.title('Data Count Distribution Across Different Periods')
plt.xlabel('Time Period')
plt.ylabel('Data Count')
plt.show()

import numpy as np
import pandas as pd
from sklearn.impute import SimpleImputer
from sklearn.model_selection import train_test_split
from sklearn.metrics import recall_score, precision_score, roc_auc_score, roc_curve, auc, confusion_matrix, classification_report
import matplotlib.pyplot as plt
import seaborn as sns
import xgboost as xgb
from sklearn.naive_bayes import GaussianNB
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import Adam
from sklearn.svm import SVC
from xgboost import plot_importance

data3 = feature_all(feature3, database3)
data2 = feature_all(feature2, database2)
data1 = feature_all(feature1, database1)

data_train = pd.concat([data2, data1], axis=0)

y_train = data_train['Y'].values
x_train1 = data_train.drop(columns=['date_interval', 'Discount_rate', 'Date_received', 'Date', 'User_id', 'Merchant_id', 'Coupon_id', 'Y', 'lekge_user_SpeCouSum_maxday', 'lekge_user_SpeCouSum_minday'])
x_train = x_train1.values
print('总计%i个特征' % len(x_train1.columns))

x_pred1 = data3.drop(columns=['Discount_rate', 'Date_received', 'User_id', 'Merchant_id', 'Coupon_id', 'lekge_user_SpeCouSum_maxday', 'lekge_user_SpeCouSum_minday'])
x_pred = x_pred1.values
print('总计%i个特征' % len(x_pred1.columns))

(train_x, test_x, train_y, test_y) = train_test_split(x_train, y_train, test_size=0.8, random_state=0)

# 数据填充
imputer = SimpleImputer(strategy='mean')
train_x = imputer.fit_transform(train_x)
test_x = imputer.transform(test_x)

# 确保数据类型一致
train_x = np.where(np.isfinite(train_x), train_x, np.finfo('float64').max)
test_x = np.where(np.isfinite(test_x), test_x, np.finfo('float64').max)

# 检查数据是否还有 NaN 或无穷大值
if np.isnan(train_x).any() or np.isinf(train_x).any():
    raise ValueError("Train data contains NaN or infinity after imputation.")
if np.isnan(test_x).any() or np.isinf(test_x).any():
    raise ValueError("Test data contains NaN or infinity after imputation.")

# 对 x_pred 进行相同的处理
x_pred = imputer.transform(x_pred)
x_pred = np.where(np.isfinite(x_pred), x_pred, np.finfo('float64').max)

# 检查 x_pred 是否还有 NaN 或无穷大值
if np.isnan(x_pred).any() or np.isinf(x_pred).any():
    raise ValueError("Prediction data contains NaN or infinity after imputation.")

# 加载测试数据
data33 = pd.read_csv('./项目准备/O2O优惠券使用预测/offline_test.csv')

# 训练决策树模型 (基尼不纯度)
dt_model_gini = DecisionTreeClassifier(criterion='gini', max_depth=10, min_samples_split=20, min_samples_leaf=10, random_state=42)
dt_model_gini.fit(train_x, train_y)

# 输出决策树模型 (基尼不纯度) 的评估指标
print('决策树模型 (基尼) 的召回率为：', recall_score(test_y, dt_model_gini.predict(test_x)))
print('决策树模型 (基尼) 的精确率为：', precision_score(test_y, dt_model_gini.predict(test_x)))

# 检查预测概率的形状 (基尼)
proba_gini = dt_model_gini.predict_proba(test_x)
print(proba_gini.shape)

# 确保是二分类问题 (基尼)
if proba_gini.shape[1] == 2:
    print('决策树模型 (基尼) 的 AUC 为：', roc_auc_score(test_y, proba_gini[:, 1]))
else:
    print('决策树模型 (基尼) 的 AUC 无法计算，因为预测概率数组的列数不是 2.')

# 使用决策树模型 (基尼) 进行预测
y_pred_gini = dt_model_gini.predict_proba(x_pred)
print(len(y_pred_gini))
a_gini = pd.DataFrame(y_pred_gini)[1].values
pred_gini = pd.DataFrame({'User_id': data33['User_id'].values, 'Coupon_id': data33['Coupon_id'].values, 'Date_received': data33['Date_received'].values, 'pred': a_gini})
pred_gini.to_csv('./项目准备/O2O优惠券使用预测/result_gini.csv', index=None, header=None)

# 训练决策树模型 (熵)
dt_model_entropy = DecisionTreeClassifier(criterion='entropy', max_depth=10, min_samples_split=20, min_samples_leaf=10, random_state=42)
dt_model_entropy.fit(train_x, train_y)

# 输出决策树模型 (熵) 的评估指标
print('决策树模型 (熵) 的召回率为：', recall_score(test_y, dt_model_entropy.predict(test_x)))
print('决策树模型 (熵) 的精确率为：', precision_score(test_y, dt_model_entropy.predict(test_x)))

# 检查预测概率的形状 (熵)
proba_entropy = dt_model_entropy.predict_proba(test_x)
print(proba_entropy.shape)

# 确保是二分类问题 (熵)
if proba_entropy.shape[1] == 2:
    print('决策树模型 (熵) 的 AUC 为：', roc_auc_score(test_y, proba_entropy[:, 1]))
else:
    print('决策树模型 (熵) 的 AUC 无法计算，因为预测概率数组的列数不是 2.')

# 使用决策树模型 (熵) 进行预测
y_pred_entropy = dt_model_entropy.predict_proba(x_pred)
print(len(y_pred_entropy))
a_entropy = pd.DataFrame(y_pred_entropy)[1].values
pred_entropy = pd.DataFrame({'User_id': data33['User_id'].values, 'Coupon_id': data33['Coupon_id'].values, 'Date_received': data33['Date_received'].values, 'pred': a_entropy})
pred_entropy.to_csv('./项目准备/O2O优惠券使用预测/result_entropy.csv', index=None, header=None)

# # XGBoost 模型
# xgb_model = xgb.XGBClassifier(
#     booster='gbtree',
#     objective='binary:logistic',
#     eval_metric='auc',
#     learning_rate=0.03,
#     n_estimators=1000,
#     max_depth=5,
#     min_child_weight=1.1,
#     gamma=0.1,
#     subsample=0.8,
#     colsample_bytree=0.8,
#     seed=10,
#     reg_alpha=0,
#     reg_lambda=0
# )

# xgb_model.fit(train_x, train_y)
# xgb_pred = xgb_model.predict(test_x)
# xgb_pred_proba = xgb_model.predict_proba(test_x)[:, 1]

# print('xgboost模型的召回率为：', recall_score(test_y, xgb_pred))
# print('xgboost模型的精确率为：', precision_score(test_y, xgb_pred))
# print('xgboost模型的auc为：', roc_auc_score(test_y, xgb_pred_proba))

# # 保存XGBoost预测结果
# xgb_pred_proba_all = xgb_model.predict_proba(x_pred)[:, 1]
# xgb_pred_df = pd.DataFrame({'User_id': data33['User_id'].values, 'Coupon_id': data33['Coupon_id'].values, 'Date_received': data33['Date_received'].values, 'pred': xgb_pred_proba_all})
# xgb_pred_df.to_csv('./项目准备/O2O优惠券使用预测/result_xgb.csv', index=None, header=None)

# # 贝叶斯模型
# nb_model = GaussianNB()
# nb_model.fit(train_x, train_y)
# nb_pred = nb_model.predict(test_x)
# nb_pred_proba = nb_model.predict_proba(test_x)[:, 1]

# print('\n高斯朴素贝叶斯模型:')
# print('召回率:', recall_score(test_y, nb_pred))
# print('精确率:', precision_score(test_y, nb_pred))
# print('AUC:', roc_auc_score(test_y, nb_pred_proba))

# # 保存贝叶斯预测结果
# nb_pred_proba_all = nb_model.predict_proba(x_pred)[:, 1]
# nb_pred_df = pd.DataFrame({'User_id': data33['User_id'].values, 'Coupon_id': data33['Coupon_id'].values, 'Date_received': data33['Date_received'].values, 'pred': nb_pred_proba_all})
# nb_pred_df.to_csv('./项目准备/O2O优惠券使用预测/result_nb.csv', index=None, header=None)

# # 神经网络模型
# model = Sequential()
# model.add(Dense(64, input_dim=train_x.shape[1], activation='relu'))
# model.add(Dense(32, activation='relu'))
# model.add(Dense(1, activation='sigmoid'))

# model.compile(optimizer=Adam(learning_rate=0.001), loss='binary_crossentropy', metrics=['accuracy'])
# model.fit(train_x, train_y, epochs=50, batch_size=32, verbose=1)

# nn_pred_proba = model.predict(test_x).flatten()
# nn_pred = (nn_pred_proba > 0.5).astype(int)

# print('\n神经网络模型:')
# print('召回率:', recall_score(test_y, nn_pred))
# print('精确率:', precision_score(test_y, nn_pred))
# print('AUC:', roc_auc_score(test_y, nn_pred_proba))

# # 保存神经网络预测结果
# nn_pred_proba_all = model.predict(x_pred).flatten()
# nn_pred_df = pd.DataFrame({'User_id': data33['User_id'].values, 'Coupon_id': data33['Coupon_id'].values, 'Date_received': data33['Date_received'].values, 'pred': nn_pred_proba_all})
# nn_pred_df.to_csv('./项目准备/O2O优惠券使用预测/result_nn.csv', index=None, header=None)

# # 使用更小的数据集进行训练
# small_train_x, _, small_train_y, _ = train_test_split(train_x, train_y, test_size=0.9, random_state=0)

# # 支持向量机模型
# svm_model = SVC(kernel='rbf', probability=True, random_state=0)
# svm_model.fit(small_train_x, small_train_y)

# svm_pred = svm_model.predict(test_x)
# svm_pred_proba = svm_model.predict_proba(test_x)[:, 1]

# print('\n支持向量机 (SVM) 模型:')
# print('召回率:', recall_score(test_y, svm_pred))
# print('精确率:', precision_score(test_y, svm_pred))
# print('AUC:', roc_auc_score(test_y, svm_pred_proba))

# # 保存支持向量机预测结果
# svm_pred_proba_all = svm_model.predict_proba(x_pred)[:, 1]
# svm_pred_df = pd.DataFrame({'User_id': data33['User_id'].values, 'Coupon_id': data33['Coupon_id'].values, 'Date_received': data33['Date_received'].values, 'pred': svm_pred_proba_all})
# svm_pred_df.to_csv('./项目准备/O2O优惠券使用预测/result_svm.csv', index=None, header=None)

# # 绘制 ROC 曲线
# def plot_roc_curve(test_y, pred_proba, model_name):
#     fpr, tpr, _ = roc_curve(test_y, pred_proba)
#     roc_auc = auc(fpr, tpr)
    
#     plt.plot(fpr, tpr, label=f'{model_name} (AUC = {roc_auc:.2f})')

# plt.figure(figsize=(10, 8))
# plot_roc_curve(test_y, xgb_pred_proba, 'XGBoost')
# plot_roc_curve(test_y, nb_pred_proba, 'GaussianNB')
# plot_roc_curve(test_y, nn_pred_proba, 'Neural Network')
# plot_roc_curve(test_y, svm_pred_proba, 'SVM')

# plt.plot([0, 1], [0, 1], color='navy', lw=2, linestyle='--')
# plt.xlim([0.0, 1.0])
# plt.ylim([0.0, 1.05])
# plt.xlabel('False Positive Rate')
# plt.ylabel('True Positive Rate')
# plt.title('ROC Curve')
# plt.legend(loc="lower right")
# plt.show()

# # 绘制混淆矩阵
# def plot_confusion_matrix(test_y, pred, model_name):
#     cm = confusion_matrix(test_y, pred)
#     sns.heatmap(cm, annot=True, fmt='d', cmap='Blues')
#     plt.title(f'{model_name} Confusion Matrix')
#     plt.xlabel('Predicted')
#     plt.ylabel('Actual')
#     plt.show()

# plt.figure(figsize=(8, 6))
# plot_confusion_matrix(test_y, xgb_pred, 'XGBoost')
# plot_confusion_matrix(test_y, nb_pred, 'GaussianNB')
# plot_confusion_matrix(test_y, nn_pred, 'Neural Network')
# plot_confusion_matrix(test_y, svm_pred, 'SVM')

# # 绘制特征重要性图
# plt.figure(figsize=(10, 6))
# plot_importance(xgb_model, max_num_features=10)
# plt.title('Top 10 Important Features')
# plt.show()