# encoding: utf-8

# visualization 范例
import seaborn as sns
import matplotlib.pyplot as plt

import pandas as pd
from pyspark.sql import SparkSession
from pyspark.sql import Row
from pyspark import SparkFiles

df1 = spark.sql("select * from idealsh.add_all_month_202110 where offer_flag_5g3 =0 limit 1000000")
df2 = spark.sql("select * from idealsh.add_all_month_202110 where offer_flag_5g3 =1 limit 1000000")

pdf1 = df1.toPandas()
pdf2 = df2.toPandas()
pdf3 = pd.concat([pdf1, pdf2])
print(pdf3.head(10))
print(pdf3.shape)

# int process
strtoint = ['comp_serv_flag',
            'present_1x_duration',
            'disct_amount',
            'fair_disct_amount',
            'net_flow',
            'pre_disct_amount',
            'pre_fair_disct_amount',
            'yy_init_val',
            'yy_accu_val',
            'll_init_val',
            'll_use_val',
            'll_accu_val',
            'mon_flux_rate',
            'amount_3avg',
            'fair_amount_3avg',
            'online_len',
            'amount',
            'last_amount',
            'last_2_amount',
            'amount_without_tax',
            'last_amount_without_tax',
            'last_2_amount_without_tax',
            'fair_amount',
            'fair_last_amount',
            'fair_last_2_amount',
            'fair_amount_without_tax',
            'fair_last_amount_without_tax',
            'fair_last_2_amount_without_tax',
            'flux_1x',
            'active_day_num',
            'rate_duration',
            'offs_amount',
            'terminal_flag_5g',
            'rpice',
            'terminal_price',
            'fuka_num',
            'arpu_average_3',
            'fair_amount_sum',
            'max_fk_num',
            'free_fk_limit',
            'coefficient',
            'off_value',
            'if_only_ty',
            'is_three_null_user',
            'area_5']


def convert_dtype(j):
    try:
        a = float(j)
    except BaseException:
        a = 0
    return a


for i in strtoint:
    pdf3[i] = pdf3[i].apply(convert_dtype)
    print('********', i, '----')
    med = pdf3[i].median()
    pdf3[i] = pdf3[i].fillna(value=med)
    # print(i)

print('done')

pd.set_option('display.max_columns', None)
pdf3[['disct_amount',
'fair_disct_amount',
'pre_disct_amount',
'pre_fair_disct_amount',
'amount_3avg',
'fair_amount_3avg',
'amount',
'last_amount',
'last_2_amount',
'amount_without_tax',
'last_amount_without_tax',
'last_2_amount_without_tax',
'fair_amount',
'fair_last_amount',
'fair_last_2_amount',
'fair_amount_without_tax',
'fair_last_amount_without_tax',
'fair_last_2_amount_without_tax',
'offs_amount',
'arpu_average_3',
'off_value']].head(10)

import time
# date 处理
def convert_day(day):
    try:
        day1 = day
        # 中间过程，一般都需要将字符串转化为时间数组
        day1 = time.strptime(day1, '%Y-%m-%d %H:%M:%S')
        day2 = "2021-12-01 00:00:00"
        day2 = time.strptime(day2, '%Y-%m-%d %H:%M:%S')
        day_num = (int(time.mktime(day2)) - int(time.mktime(day1))) / (24 * 60 * 60)
    except BaseException:
        day_num = 0
    return day_num


pdf3['bill_date_new'] = pdf3['bill_date'].apply(convert_day)
pdf3['lte_sim_change_date_new'] = pdf3['lte_sim_change_date'].apply(convert_day)
print('done')

g = sns.FacetGrid(pdf3, col='offer_flag_5g3')
g.map(plt.hist, 'lte_sim_change_date_new', bins=20)

g = sns.FacetGrid(pdf3, col='offer_flag_5g3')
g.map(plt.hist, 'fuka_num', bins=20)

df_test =pdf3[['disct_amount',
'fair_disct_amount',
'pre_disct_amount',
'pre_fair_disct_amount',
'amount_3avg',
'fair_amount_3avg',
'amount']]
sns.heatmap(df_test.corr(), annot=True, fmt='.1f')

grid = sns.FacetGrid(pdf3, col='offer_flag_5g3', row='comp_serv_flag', size=2.2, aspect=1.6)
grid.map(plt.hist, 'arpu_average_3', alpha=.5, bins=20)
grid.add_legend();

