# #!/usr/bin/env python
# # -*- encoding: utf-8 -*-
# '''
# @File    :   jdx_match_test_data.py
# @Contact :   pengwei.sun@aihuishou.com
# @License :   (C)Copyright aihuishou
#
# @Modify Time      @Author       @Version    @Desciption
# ------------      -----------   --------    -----------
# 2021-09-09 10:45   pengwei.sun      1.0         None
# '''
# import os, sys
#
# sys.path.append(os.getcwd())
# from src.utils.config import logger
# from src.utils.db_processor import presto_processor, mysql_prediction_processor,hive_processor
# import pandas as pd
# from src.utils.feishu_message import feishu_messager
# import numpy as np
# from src.mobile.android_ppvrate.sku2_skulevel_period_price import get_period_price_fun
# from src.utils.util import get_today, format_date
# import datetime
# from src.mobile.android_ppvrate.c2b_config_utils import get_ppv_combine_sql,ppv_rank_df,get_all_combine_data_rank_score,\
#     get_product_base_ppv_fun,query_level_sub_ppv_price,query_process_sql,query_product_ppv_small_version_cnt,get_level_rank
#
#
#
# class C2bppvrateModel:
#     def __init__(self, model_date=None):
#         """
#
#         :param model_date: 建模日期
#         """
#         if model_date is None:
#             self.model_date = get_today()
#         else:
#             self.model_date = format_date(model_date)
#
#         self.end_date = format_date(self.model_date)
#         self.start7_date = self.end_date - datetime.timedelta(7)
#         self.start14_date = self.end_date - datetime.timedelta(14)
#         self.start21_date = self.end_date - datetime.timedelta(21)
#         self.start30_date = self.end_date - datetime.timedelta(30)
#
#         self.combine_df = get_all_combine_data_rank_score()
#         self.ppv_combine_percent_df = get_ppv_combine_sql(self.start30_date, self.start30_date, self.end_date)
#
#         #获取型号维度 ppv组合最近三十天的出货数量 cnt_product 为内存维度的出货数量
#         self.product_base_ppv_df = get_product_base_ppv_fun(self.combine_df, self.start30_date, self.start30_date, self.end_date)
#         self.query_level_sub_ppv_df = query_level_sub_ppv_price(self.start30_date, self.start30_date, self.end_date)
#         self.level_name = get_level_rank()
#
#     #读取数，为组合比率的计算提供数据
#     def get_process_data(self,ppv_level_df,startdate,enddate):
#         #获取ppv的原始数据
#         df = presto_processor.load_sql(query_process_sql.format(startdate,startdate,enddate))
#
#         #聚合ppv
#         df['storage']=df['storage'].fillna('unknown')
#         df['purchase_way']=df['purchase_way'].fillna('unknown')
#         df['guarantee']=df['guarantee'].fillna('unknown')
#         df['ppv_combine'] = df['storage'] + '_' + df['purchase_way'] + '_' + df['guarantee']
#
#         #根据ppv组合占等级的比率数据
#
#
#         #获取组合物品数占比最大的组合
#         df_max = ppv_level_df.groupby(by=['product_id','secondary_level_id','storage'])['percent'].max().reset_index()
#         df_max = df_max.rename(columns={"percent": "max_percent"})
#
#         #标识占比最多的组合
#         df2 = df.merge(df_max, how='left', on=['product_id','secondary_level_id','storage'])
#         df2 = df2.merge(self.ppv_combine_percent_df[['product_id','secondary_level_id', 'ppv_combine','total_cnt', 'percent','percent_rank']], how='left', on=['product_id','secondary_level_id', 'ppv_combine'])
#         #确认所有组合中，哪一个是占比最多的组合
#         df2['max_flag'] = 0
#         df2.loc[(df2.max_percent == df2.percent) & (df2.percent_rank==1), 'max_flag'] = 1
#         df2_max = df2.loc[df2['max_flag'] == 1, ['product_id',  'secondary_level_id','storage', 'max_flag',
#                                                  'item_quotation_price_num_avg','cnt']]
#
#         df2_max = df2_max.rename(columns={"item_quotation_price_num_avg": "max_price","cnt":"max_cnt"})
#         df2 = df2.merge(df2_max[['product_id', 'secondary_level_id','storage',  'max_price']], how='left', on=['product_id',  'secondary_level_id','storage'])
#         df2['ppv_rate'] = df2['item_quotation_price_num_avg'] / df2['max_price']
#         df2 = df2.loc[df2.ppv_rate>0]
#         # df2.to_csv('/data/sunpengwei/tmp/c2b_sku2_detail_rate_0907.csv', encoding='utf-8-sig')
#         df2.loc[df2.purchase_way == '美版-无锁（含零售/官修官换/展示机）', ['secondary_level_name', 'ppv_combine', 'item_quotation_price_num_avg', 'max_price','ppv_rate']].sort_values(by='secondary_level_name')
#         return df2,ppv_rank_df
#
#
#     def process_ppv_rate_fun(self,ppv_rank_df,df2,cnt_limit=0):
#         #计算组合的价格比率
#         df3 = df2.loc[df2.ppv_rate > 0]
#         # df3['ppv_rate'] = df3['item_quotation_price_num_avg'] / df3['max_price']
#         # df3=df3.loc[df3.secondary_level_name.isin(['C2','C2'])]
#         #
#         df3 = df3.loc[df3.cnt > cnt_limit]
#
#         df44 = df3.groupby(by=['product_id','product_name','secondary_level_id','secondary_level_name', 'ppv_combine','storage','purchase_way','guarantee'])[
#             'ppv_rate'].agg({'price_mean': 'mean', 'price_count': 'count', 'price_media': 'median'}).reset_index()
#         # df44['price_count']=df44['count']
#         # df_avg_settle = df3.groupby(by=['product_id','secondary_level_name', 'ppv_combine'])['item_quotation_price_num_avg'].agg(
#         #     {'item_quotation_price_num_avg': 'mean'}).reset_index()
#         # df_avg_settle1 = df3.groupby(by=['product_id', 'secondary_level_name', 'ppv_combine'])[['item_quotation_price_num_avg', 'cnt']].apply(avg_price_by_cnt_weight_fun).reset_index()
#
#         # df_avg_max_price = df3.groupby(by=['product_id','secondary_level_name', 'ppv_combine'])['max_price'].agg(
#         #     {'max_price': 'mean'}).reset_index()
#
#         # df44 = df44.merge(df_avg_max_price, on=['product_id','secondary_level_name', 'ppv_combine'])
#         # df44 = df44.merge(df_avg_settle, on=['product_id','secondary_level_name', 'ppv_combine'])
#         # df44['settle_rate'] = df44['item_quotation_price_num_avg'] / df44['max_price']
#
#         df44 = df44.merge(self.combine_df[['product_id', 'ppv_combine', 'rank_score', 'rank_low_score']],
#                       on=['product_id', 'ppv_combine'])
#
#         df44 =df44.merge(self.ppv_combine_percent_df[['product_id', 'secondary_level_id', 'ppv_combine', 'cnt',
#            'total_cnt', 'percent', 'percent_rank']], how='left', on=['product_id','secondary_level_id', 'ppv_combine'])
#         return df44
#
#
#     def first_second_base_fun(self,df444):
#
#         df444['product_level'] = df444['product_id'].astype(str) + '_' + df444['level_sub'].astype(str) + '_' + df444['storage'].astype(str)
#         level_max_cont_df = df444.copy()
#         level_first_cont_df= level_max_cont_df.copy()
#         level_first_cont_df['count_rank'] = level_max_cont_df['sum_cnt'].groupby(df444['product_level']).rank(
#             ascending=False, method='first')
#
#         level_max_cont_df['count_rank'] = level_max_cont_df['sum_cnt'].groupby(df444['product_level']).rank(
#             ascending=False, method='first')
#         leve_first_df = level_first_cont_df.loc[level_first_cont_df.count_rank == 1]
#         leve_first_df = leve_first_df.rename(
#             columns={"price_mean_rate": "first_cnt_rate", 'rank_score': 'rank_score_first'})
#
#         leve_second_df = level_max_cont_df.loc[level_max_cont_df.count_rank == 2]
#         leve_second_df = leve_second_df.rename(
#             columns={"price_mean_rate": "second_cnt_rate", 'rank_score': 'rank_score_second'})
#         return leve_first_df,leve_second_df
#
#     def avg_ppv_rate(self,x):
#         tmp=x
#         price_mean_rate =tmp['ppv_rate1']*tmp['cnt_w1_weight']+tmp['ppv_rate2']*tmp['cnt_w2_weight']+tmp['ppv_rate3']*tmp['cnt_w3_weight']
#
#         return price_mean_rate
#
#     def main_fun(self):
#         ppv_level_df=self.ppv_combine_percent_df
#         df2,ppv_rank_df = self.get_process_data(ppv_level_df,self.start7_date,self.end_date)
#         df2_2,ppv_rank_df = self.get_process_data(ppv_level_df,self.start14_date,self.start7_date)
#         df2_3,ppv_rank_df = self.get_process_data(ppv_level_df,self.start21_date,self.start14_date)
#         df2_1_1  = df2[['product_id', 'secondary_level_id','ppv_combine','secondary_level_name','ppv_rate','cnt']].rename(columns={"ppv_rate":"ppv_rate1","secondary_level_name":"secondary_level_name_w1","cnt":"cnt_w1"})
#         df2_2_1  = df2_2[['product_id', 'secondary_level_id','ppv_combine','secondary_level_name','ppv_rate','cnt']].rename(columns={"ppv_rate":"ppv_rate2","secondary_level_name":"secondary_level_name_w2","cnt":"cnt_w2"})
#         df2_3_1  = df2_3[['product_id', 'secondary_level_id','ppv_combine','secondary_level_name','ppv_rate','cnt']].rename(columns={"ppv_rate":"ppv_rate3","secondary_level_name":"secondary_level_name_w3","cnt":"cnt_w3"})
#
#         df2_merge = ppv_level_df.merge(df2_1_1,how='left',on=['product_id', 'secondary_level_id','ppv_combine'])
#         df2_merge = df2_merge.merge(df2_2_1,how='left',on=['product_id', 'secondary_level_id','ppv_combine'])
#         df2_merge = df2_merge.merge(df2_3_1,how='left',on=['product_id', 'secondary_level_id','ppv_combine'])
#
#         df2_merge['sum_cnt'] = df2_merge[['cnt_w1','cnt_w2','cnt_w3']].sum(axis=1)
#         df2_merge['cnt_w1_weight'] = df2_merge['cnt_w1']/df2_merge['sum_cnt']
#         df2_merge['cnt_w2_weight'] = df2_merge['cnt_w2']/df2_merge['sum_cnt']
#         df2_merge['cnt_w3_weight'] = df2_merge['cnt_w3']/df2_merge['sum_cnt']
#         df2_merge[['ppv_rate1', 'ppv_rate2', 'ppv_rate3','cnt_w1_weight','cnt_w2_weight','cnt_w3_weight']] = df2_merge[['ppv_rate1', 'ppv_rate2', 'ppv_rate3','cnt_w1_weight','cnt_w2_weight','cnt_w3_weight']].fillna(0)
#         df2_merge['ppv_rate'] = df2_merge.apply(self.avg_ppv_rate,axis=1)
#         df2_merge = df2_merge.merge(self.level_name,on=['secondary_level_id'])
#         df2_merge = df2_merge.loc[(df2_merge.sum_cnt>0)&(df2_merge.ppv_rate>0)]
#         df2_merge.loc[df2_merge.product_id == 32290, ['ppv_combine','secondary_level_name', 'secondary_level_name_w1', 'secondary_level_name_w2',
#                                           'secondary_level_name_w3', 'ppv_rate','ppv_rate1', 'ppv_rate2', 'ppv_rate3', 'cnt_w1',
#                                           'cnt_w2', 'cnt_w3','sum_cnt','cnt_w1_weight','cnt_w2_weight','cnt_w3_weight']]
#
#         df44 = self.process_ppv_rate_fun(ppv_rank_df,df2_merge,cnt_limit=0)
#         logger.info('dsd')
#
#         df44['level_ppv_rank'] = df44['product_id'].astype(str) + '_'+ df44['secondary_level_name'] + '_' + df44['storage'].astype(str)
#         df44['product_level'] = df44['product_id'].astype(str) + '_' + df44['secondary_level_name'].astype(str)+ '_' + df44['storage'].astype(str)
#         df44[['rank_count']] = df44[['price_count']].groupby(df44['level_ppv_rank']).rank(ascending=False, method='first')
#         df44 = df44.sort_values(by=['product_id','secondary_level_name', 'rank_score','price_count'], ascending=[True, True, False,False])
#
#         # 此处根据小等级维度获取 基准的比率
#         level_max_cont_df = df44.loc[
#             df44.rank_count == 1, ['product_id','secondary_level_name','storage', 'level_ppv_rank', 'rank_score', 'price_media',
#                                     'price_count', 'rank_count']].sort_values(
#             by=['product_id','secondary_level_name', 'rank_score'], ascending=[True,True, False])
#
#         level_first_cont_df= level_max_cont_df.copy()
#         level_first_cont_df['count_rank'] = level_max_cont_df.loc[level_max_cont_df.price_media==1,'price_count'].groupby(df44['product_level']).rank(
#             ascending=False, method='first')
#
#         level_max_cont_df['count_rank'] = level_max_cont_df['price_count'].groupby(df44['product_level']).rank(
#             ascending=False, method='first')
#         leve_first_df = level_first_cont_df.loc[level_first_cont_df.count_rank == 1]
#         leve_first_df = leve_first_df.rename(
#             columns={"price_media": "first_cnt_rate", 'rank_score': 'rank_score_first'})
#
#         leve_second_df = level_max_cont_df.loc[level_max_cont_df.count_rank == 2]
#         leve_second_df = leve_second_df.rename(
#             columns={"price_media": "second_cnt_rate", 'rank_score': 'rank_score_second'})
#
#         df44 = df44.merge(leve_first_df[['product_id','secondary_level_name', 'storage','first_cnt_rate', 'rank_score_first']], how='left',
#                           on=['product_id','secondary_level_name','storage'])
#         df44 = df44.merge(leve_second_df[['product_id','secondary_level_name', 'storage', 'second_cnt_rate', 'rank_score_second']], how='left',
#                           on=['product_id','secondary_level_name','storage'])
#
#         # df44.loc[~(df44['price_media']>0),'price_media_filter']=df44.loc[~(df44['price_media_filter']>0),'price_media']
#         df44['reverse_rate']=df44['price_media']
#         # df44.loc[~(df44['price_media_filter']>0),'reverse_rate']=df44.loc[~(df44['price_media_filter']>0),'price_media']
#         df44['reverse_rate_bak']=df44['reverse_rate']
#         df44['level_sub']=df44['secondary_level_name'].str[0]
#         # self.save_main_fun(df44)
#         df44.to_csv('/data/sunpengwei/tmp/c2b_sku2_ppv_rate_0831.csv', encoding='utf-8-sig')
#         print('ds')
#         return df44
#
#     def save_main_fun(self,df44):
#         df44['create_date']=self.end_date
#         predict_data=df44
#         secondary_level_rate_columns=['create_date','product_id', 'product_name', 'secondary_level_id',
#        'secondary_level_name', 'ppv_combine', 'storage', 'purchase_way',
#        'guarantee', 'price_mean', 'price_count', 'price_media', 'max_price',
#        'item_quotation_price_num_avg', 'settle_rate', 'rank_score',
#        'rank_low_score', 'cnt', 'total_cnt', 'percent', 'percent_rank',
#         'product_level', 'rank_count',
#        'reverse_rate']
#
#         predict_data = predict_data.fillna(-1)
#
#         delete_rate_sql = """
#         delete from c2b_secondary_level_ppv_rate where create_date=DATE_FORMAT(date_sub(curdate(),interval 30 day),'%Y%m%d')
#         """
#         mysql_prediction_processor.execute_sql(delete_rate_sql)
#         delete_td_rate_sql = """
#                 delete from c2b_secondary_level_ppv_rate where create_date='{}'
#                 """.format(self.end_date)
#         mysql_prediction_processor.execute_sql(delete_td_rate_sql)
#         insert_sql = """
#         INSERT INTO c2b_secondary_level_ppv_rate(create_date,product_id, product_name, secondary_level_id,secondary_level_name, ppv_combine, storage, purchase_way,guarantee,
#                     price_mean, price_count, price_media, max_price,
#                     item_quotation_price_num_avg, settle_rate, rank_score,rank_low_score, cnt, total_cnt, percent, percent_rank, product_level, rank_count, reverse_rate)
#         VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,  %s,%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,  %s)
#         """
#         # predict_data=predict_data.reset_index()
#         mysql_prediction_processor.execute_insert_sql(insert_sql,
#                                                       predict_data[secondary_level_rate_columns].to_records(index=False).tolist())
#         logger.info('saving predict data to mysql done')
#         mysql_price_num = predict_data.shape[0]
#         logger.info('c2b_secondary_level_ppv_rate datasize={}'.format(mysql_price_num))
#
#     def pool_rate(self,x):
#         tmp=x
#         price_mean_rate =(tmp['price_media']*tmp['percent']).sum()/tmp['percent'].sum()
#         sum_cnt=tmp['cnt'].sum()
#         cishu=tmp['cnt'].count()
#         size=tmp.shape[0]
#         # if size>1:
#         #     print(size)
#         # total_cnt=tmp['total_cnt'].sum()
#         cols =['price_mean_rate','sum_cnt','cishu']
#         return pd.Series([price_mean_rate,sum_cnt,cishu],index=cols)
#
#
#     def process_level_sub_ppv_rate_fun(self,df44):
#         # product_base_ppv_df =get_product_base_ppv_fun()
#         product_base_ppv_rate_df = df44.merge(self.product_base_ppv_df[['product_id','ppv_combine','cnt','ppv_base_rank','rank_score_base']],on=['product_id','ppv_combine'])
#         product_base_ppv_rate_df = product_base_ppv_rate_df[['product_id','secondary_level_name', 'storage','price_mean']]
#         product_base_ppv_rate_df=product_base_ppv_rate_df.rename(columns={'price_mean':'price_mean_base'})
#
#         df44 = df44.merge(product_base_ppv_rate_df,on=['product_id','secondary_level_name', 'storage'])
#
#         df44['price_mean'] =df44['price_media']/df44['price_mean_base']
#         df_ret =df44.groupby(by=['product_id','product_name','level_sub', 'storage','ppv_combine','rank_score']).apply(self.pool_rate).reset_index()
#         sum_df =df44[['product_id','product_name','level_sub','secondary_level_name', 'storage','total_cnt']].drop_duplicates()
#         sum_df =sum_df.groupby(by=['product_id','level_sub', 'storage'])['total_cnt'].sum().reset_index()
#         df_ret = df_ret.merge(sum_df,on=['product_id','level_sub', 'storage'])
#         leve_first_df,leve_second_df =self.first_second_base_fun(df_ret)
#         ret_df = df_ret.merge(leve_first_df[['product_id', 'level_sub', 'storage', 'first_cnt_rate', 'rank_score_first']],
#                           how='left',
#                           on=['product_id', 'level_sub', 'storage'])
#         ret_df = ret_df.merge(leve_second_df[['product_id', 'level_sub', 'storage', 'second_cnt_rate', 'rank_score_second']],
#                           how='left',
#                           on=['product_id', 'level_sub', 'storage'])
#         ret_df.loc[pd.isna(ret_df.second_cnt_rate),'second_cnt_rate']=ret_df.loc[pd.isna(ret_df.second_cnt_rate),'first_cnt_rate']
#         ret_df.loc[pd.isna(ret_df.rank_score_second),'rank_score_second']=ret_df.loc[pd.isna(ret_df.rank_score_second),'rank_score_first']-1
#
#         ret_df['diff_score_per_rate'] = (ret_df['first_cnt_rate'] - ret_df['second_cnt_rate']) / (
#                     ret_df['rank_score_first'] - ret_df['rank_score_second'])
#         ret_df['reverse_rate'] = 1 + ret_df['diff_score_per_rate'] * (ret_df['rank_score'] - ret_df['rank_score_first'])
#
#
#         ret_df =ret_df.merge(self.query_level_sub_ppv_df[['product_id','level_sub','ppv_combine','level_item_quotation_avg_price']],how='left',on=['product_id','level_sub','ppv_combine'])
#         product_level_storage_rate = ret_df.loc[ret_df.price_mean_rate == 1, ['product_id', 'level_sub', 'storage','sum_cnt', 'level_item_quotation_avg_price']]
#
#         product_level_storage_rate['product_level'] = product_level_storage_rate['product_id'].astype(str) + '_' + product_level_storage_rate['level_sub'].astype(
#             str)
#         product_level_storage_rate['count_rank'] = product_level_storage_rate['sum_cnt'].groupby(product_level_storage_rate['product_level']).rank(ascending=False,
#                                                                                                             method='first')
#         product_level_storage_rate = product_level_storage_rate.loc[product_level_storage_rate.count_rank==1]
#         product_level_storage_rate = product_level_storage_rate[['product_id', 'level_sub','level_item_quotation_avg_price']].rename(columns={'level_item_quotation_avg_price': 'level_item_quotation_avg_base_price'})
#
#         ret_df = ret_df.merge(product_level_storage_rate, on=['product_id', 'level_sub'])
#         ret_df['base_storage_rate']=ret_df['level_item_quotation_avg_price']*1.000000/ret_df['level_item_quotation_avg_base_price']
#
#         product_level_storage_base_rate = ret_df.loc[ret_df.price_mean_rate == 1]
#
#         product_level_storage_base_rate['count_rank'] = product_level_storage_base_rate['sum_cnt'].groupby(product_level_storage_base_rate['product_level']).rank(ascending=False,method='first')
#
#         product_level_storage_base_rate = product_level_storage_base_rate.loc[ (product_level_storage_base_rate.price_mean_rate == 1)&(product_level_storage_base_rate.count_rank==1), ['product_id', 'level_sub', 'storage', 'base_storage_rate']]
#         product_level_storage_base_rate = product_level_storage_base_rate.rename( columns={'base_storage_rate': 'base_rate'})
#
#         ret_df = ret_df.merge(product_level_storage_base_rate, on=['product_id', 'level_sub','storage'])
#
#         ret_df['final_rate'] = ret_df['base_rate'] * ret_df['price_mean_rate']
#         ret_df.sort_values(by=['product_id', 'level_sub', 'rank_score'],inplace=True)
#         # self.save_process_level_sub_ppv_rate_fun(ret_df)
#         ret_df.to_csv('/data/sunpengwei/tmp/c2b_level_sub_ppv_rate_0824.csv', encoding='utf-8-sig')
#         return ret_df
#
#     def save_process_level_sub_ppv_rate_fun(self,ret_df):
#         ret_df['create_date']=self.end_date
#         predict_data=ret_df
#         levelsub_rate_columns=[ 'create_date','product_id', 'product_name', 'level_sub',  'ppv_combine','storage',
#        'rank_score', 'price_mean_rate', 'sum_cnt', 'cishu', 'total_cnt',
#        'product_level', 'first_cnt_rate', 'rank_score_first',
#        'second_cnt_rate', 'rank_score_second', 'diff_score_per_rate',
#        'reverse_rate', 'level_item_quotation_avg_price',
#        'level_item_quotation_avg_base_price', 'base_storage_rate', 'base_rate','final_rate']
#
#         predict_data = predict_data.fillna(-1)
#
#         delete_rate_sql = """
#         delete from c2b_levelsub_ppv_rate where create_date=DATE_FORMAT(date_sub(curdate(),interval 30 day),'%Y%m%d')
#         """
#         mysql_prediction_processor.execute_sql(delete_rate_sql)
#         delete_td_rate_sql = """
#                 delete from c2b_levelsub_ppv_rate where create_date='{}'
#                 """.format(self.end_date)
#         mysql_prediction_processor.execute_sql(delete_td_rate_sql)
#         insert_sql = """
#         INSERT INTO c2b_levelsub_ppv_rate(create_date,product_id, product_name, level_sub,  ppv_combine,storage,
#        rank_score, price_mean_rate, sum_cnt, cishu, total_cnt,
#        product_level, first_cnt_rate, rank_score_first,
#        second_cnt_rate, rank_score_second, diff_score_per_rate,
#        reverse_rate, level_item_quotation_avg_price,
#        level_item_quotation_avg_base_price, base_storage_rate, base_rate,final_rate)
#         VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,  %s,%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
#         """
#         # predict_data=predict_data.reset_index()
#         mysql_prediction_processor.execute_insert_sql(insert_sql,
#                                                       predict_data[levelsub_rate_columns].to_records(index=False).tolist())
#         logger.info('saving predict data to mysql done')
#         mysql_price_num = predict_data.shape[0]
#         logger.info('c2b_levelsub_ppv_rate datasize={}'.format(mysql_price_num))
#
#
#     def reverse_mean_rate_fun(self,grouped,resDf):
#
#         totalCnt = grouped.count().shape[0]
#         iter=0
#         for name, group in grouped:
#             # product_key = group[0]
#             group = group.sort_values(by=['rank_score'],ascending=[ False])
#             group.reset_index(drop=True, inplace=True)
#             size = group.shape[0]
#             if size<2:
#                 resDf = resDf.append(group.copy())
#                 continue
#             for i in range(size-1):
#                 if group.at[i,'reverse_rate']<group.at[i+1,'reverse_rate']:
#                     final_rate=group.at[i + 1, 'reverse_rate']
#                     storage_purchase=group.at[i + 1, 'storage_purchase']
#                     storage_guarantee=group.at[i + 1, 'storage_guarantee']
#                     sum_cnt=group.at[i + 1, 'sum_cnt']
#                     flag=0
#
#                     for j in range(i,-1,-1):
#                         if group.at[j, 'storage_purchase']==storage_purchase and  group.at[j, 'reverse_rate']<=final_rate:
#                             discount=1.00
#                             if group.at[i + 1, 'sum_cnt'] >= 10 and group.at[j, 'sum_cnt'] >= 10:
#                                 discount=0.2
#                             if sum_cnt<=group.at[j, 'sum_cnt']:
#                                 flag=1
#                                 up_rate=group.at[i + 1, 'reverse_rate']-(group.at[i + 1, 'reverse_rate']-group.at[i + 1, 'rank_score']*1.000000/group.at[j, 'rank_score']*group.at[j, 'reverse_rate'])*discount
#                                 group.at[i + 1, 'reverse_rate']=min(up_rate,group.at[i, 'reverse_rate']-0.01)
#                                 group.at[i + 1, 'reverse_flag']=1
#                                 break
#                             else:
#                                 flag = 1
#                                 index_j_rate = group.at[j, 'rank_score'] * 1.000000 / group.at[ i + 1, 'rank_score'] * group.at[i + 1, 'reverse_rate']
#                                 index_j_rate = max(index_j_rate, group.at[i + 1, 'reverse_rate'] + 0.01)
#                                 if j>0:
#                                     up_rate = group.at[j-1, 'reverse_rate']
#                                     if index_j_rate>up_rate:
#                                         index_j_rate=up_rate-0.01
#                                 group.at[j, 'reverse_rate'] =index_j_rate
#                                 group.at[j, 'reverse_flag'] = 1
#                                 break
#                     if flag==0:
#                         for j in range(i,-1,-1):
#                             if group.at[j, 'storage_guarantee']==storage_guarantee and group.at[j, 'reverse_rate']<=final_rate:
#                                 discount = 1.00
#                                 if group.at[i + 1, 'sum_cnt'] >= 10 and group.at[j, 'sum_cnt'] >= 10:
#                                     discount = 0.2
#                                 if sum_cnt <= group.at[j, 'sum_cnt']:
#                                     flag = 1
#                                     up_rate = group.at[i + 1, 'reverse_rate']-(group.at[i + 1, 'reverse_rate']-group.at[i + 1, 'rank_score'] * 1.000000 / group.at[ j, 'rank_score'] * group.at[j, 'reverse_rate'])*discount
#                                     group.at[i + 1, 'reverse_rate'] = max(min(up_rate,group.at[i, 'reverse_rate']-0.01),group.at[i, 'reverse_rate']-0.05)
#                                     group.at[i + 1, 'reverse_flag'] = 1
#                                     break
#                                 else:
#                                     flag = 1
#                                     index_j_rate = group.at[j, 'reverse_rate']-(group.at[j, 'reverse_rate']-group.at[j, 'rank_score'] * 1.000000 / group.at[i + 1, 'rank_score'] *  group.at[i + 1, 'reverse_rate'])*discount
#                                     index_j_rate=max(index_j_rate,group.at[i+1, 'reverse_rate']+0.01)
#                                     if j > 0:
#                                         up_rate = group.at[j - 1, 'reverse_rate']
#                                         if index_j_rate > up_rate:
#                                             index_j_rate = up_rate - 0.01
#                                     group.at[j, 'reverse_rate'] = index_j_rate
#                                     group.at[j, 'reverse_flag'] = 1
#                     if flag==0:
#                         for j in range(i,-1,-1):
#                             if group.at[j, 'purchase_way_guarantee']==storage_guarantee and group.at[j, 'reverse_rate']<=final_rate:
#                                 discount = 1.00
#                                 if group.at[i + 1, 'sum_cnt'] >= 10 and group.at[j, 'sum_cnt'] >= 10:
#                                     discount = 0.2
#                                 if sum_cnt <= group.at[j, 'sum_cnt']:
#                                     flag = 1
#                                     up_rate = group.at[i, 'reverse_rate']-(group.at[i + 1, 'reverse_rate']-group.at[i + 1, 'rank_score'] * 1.000000 / group.at[ j, 'rank_score'] * group.at[j, 'reverse_rate'])*discount
#                                     group.at[i + 1, 'reverse_rate'] = max(min(up_rate,group.at[i, 'reverse_rate']-0.01),group.at[i, 'reverse_rate']-0.05)
#                                     group.at[i + 1, 'reverse_flag'] = 1
#                                     break
#                                 else:
#                                     flag = 1
#                                     index_j_rate = group.at[j, 'reverse_rate']-(group.at[j, 'reverse_rate']-group.at[j, 'rank_score'] * 1.000000 / group.at[i + 1, 'rank_score'] *  group.at[i + 1, 'reverse_rate'])*discount
#                                     index_j_rate = max(index_j_rate, group.at[i + 1, 'reverse_rate'] + 0.01)
#                                     if j > 0:
#                                         up_rate = group.at[j - 1, 'reverse_rate']
#                                         if index_j_rate > up_rate:
#                                             index_j_rate = up_rate - 0.01
#                                     group.at[j, 'reverse_rate'] = index_j_rate
#                                     group.at[j, 'reverse_flag'] = 1
#                     flag=0
#                 # print(i)
#             resDf = resDf.append(group.copy())
#             iter += 1
#             iter += 1
#             resDf.loc[(resDf.product_id == 43511) & (resDf.level_sub == 'B'), ['ppv_combine', 'rank_score', 'final_rate',
#                                                                                'reverse_flag', 'reverse_rate', 'sum_cnt',
#                                                                                'storage_purchase',
#                                                                                'storage_guarantee']].sort_values(
#                 by='rank_score', ascending=False)
#             group[['ppv_combine', 'rank_score', 'final_rate', 'reverse_flag', 'reverse_rate', 'sum_cnt', 'storage_purchase',
#                    'storage_guarantee']]
#             logger.info('totalCnt= {},product_key={},iter = {},size={}'.format(totalCnt, str(name), iter, size))
#         return resDf
#
#     def repair_reverse_rate_fun(self):
#         c2b_level_sub_ppv_rate_df = pd.read_csv('/data/sunpengwei/tmp/c2b_level_sub_ppv_rate_0824.csv', encoding='utf-8-sig')
#         c2b_level_sub_ppv_rate_df['purchase_way']=c2b_level_sub_ppv_rate_df['ppv_combine'].apply(lambda x:x.split('_')[1])
#         c2b_level_sub_ppv_rate_df['guarantee']=c2b_level_sub_ppv_rate_df['ppv_combine'].apply(lambda x:x.split('_')[2])
#
#         c2b_level_sub_ppv_rate_df['storage_purchase']= c2b_level_sub_ppv_rate_df['storage'] + '_' + c2b_level_sub_ppv_rate_df['purchase_way'].astype(str)
#         c2b_level_sub_ppv_rate_df['storage_guarantee']= c2b_level_sub_ppv_rate_df['storage'] + '_' + c2b_level_sub_ppv_rate_df['guarantee'].astype(str)
#         c2b_level_sub_ppv_rate_df['purchase_way_guarantee']= c2b_level_sub_ppv_rate_df['purchase_way'] + '_' + c2b_level_sub_ppv_rate_df['guarantee'].astype(str)
#         c2b_level_sub_ppv_rate_df['reverse_flag']=0
#         c2b_level_sub_ppv_rate_df['reverse_rate']=c2b_level_sub_ppv_rate_df['final_rate']
#         # c2b_level_sub_ppv_rate_df = c2b_level_sub_ppv_rate_df.loc[(c2b_level_sub_ppv_rate_df.product_id==36045)&(c2b_level_sub_ppv_rate_df.level_sub=='A')]
#         resDf = pd.DataFrame(columns=c2b_level_sub_ppv_rate_df.columns.tolist())
#         grouped = c2b_level_sub_ppv_rate_df.groupby(by=['product_id', 'product_name','storage', 'level_sub'])
#         storage_resDf = self.reverse_mean_rate_fun(grouped,resDf)
#         # resDf=storage_resDf
#         grouped = storage_resDf.groupby(by=['product_id','product_name','level_sub'])
#         resDf = pd.DataFrame(columns=storage_resDf.columns.tolist())
#         resDf = self.reverse_mean_rate_fun(grouped, resDf)
#
#         resDf['price_mean_rate_reverse']=resDf['reverse_rate']/resDf['base_rate']
#         resDf.to_csv('/data/sunpengwei/tmp/c2b_level_sub_ppv_reverse_rate_0824.csv', encoding='utf-8-sig')
#         return resDf
#
#
#
#     def get_sale_price_to_ppv_rate_price_fun(self,md_date):
#         query_sql="""
#         select sku2level.product_sku_name as product_sku_name,sku2level.product_id as product_id,sku2level.product_name as product_name,sku2level.product_brand_id as product_brand_id,
#         sku2level.level_template_id as level_template_id,
#         sku2level.product_brand_name as product_brand_name,sku2level.product_level_name as product_level_name,sku2level.product_sku_id as product_sku_id,
#         sku2level.product_level_id as product_level_id,sku2level.rank as rank,
#         case when sku2level.price_3>0 then cast(sku2level.price_3 as int) else -1 end price_3,
#         case when sku2level.price_2>0 then cast(sku2level.price_2 as int) else -1 end price_2,
#         case when sku2level.price_1>0 then cast(sku2level.price_1 as int) else -1 end price_1,
#          cast(sku2level.saleprice as int) as saleprice,
#         case when qty='' then 0 else CAST(sku2level.qty as int) end qty,sku2level.up_date as up_date,
#         dpk.memory_name as storage ,
#         dpk.purchase_channel_name as purchase_way ,
#         dpk.network_standard_name as guarantee,
#         dpk.small_model_name as small_version
#         from c2b.BasePriceData_CategoryPhone sku2level
#         inner join dim.dim_product_sku dpk
#         on sku2level.product_sku_id=dpk.product_sku_id
#         where sku2level.up_date=cast('{}' as date)  and sku2level.product_category_parent_id=1
#         and dpk.partition_flag=1
#         and sku2level.product_brand_id !=52
#         and sku2level.product_id in (31767, 39143, 37600, 34097, 23906, 39004, 35515, 34688, 39025, 36687, 36248, 33498, 32850, 39165, 37524, 35622,
# 35164, 38972, 36247, 23866, 36045, 33009, 39177, 37703, 34402, 34053, 37109, 37557, 35557, 34352, 39064, 36300, 37598, 34694, 34086, 27551,
#  35941, 35884, 36047, 36046, 26422, 27784, 37330, 34397, 33012, 42861, 35623, 36439, 28421, 30175, 22706, 37680, 35480, 37051, 35963, 20692,
#   35100, 37650, 34748, 17425, 38297, 27877, 34464, 29116, 34403, 37512, 24390, 22441, 17527, 36246, 27822, 36939, 37589, 28760, 35106,
#   38394, 37217, 35700, 34687, 35290, 29637, 28121, 2246,2247,17455,17457,17460,17458,17459,20079,34701,43512,43513,43511,43510,
#   39142, 39087, 38660, 38564, 38257, 37677, 37676, 37664, 37627, 37604, 37519, 37376,
#   37094, 37019, 36985, 36805, 36744, 36708, 36510, 36493, 36249, 36095, 36044, 35585, 35584, 35547, 35246, 34948, 34809, 34756, 34755,
#   34754, 34753, 34708, 34576, 34575, 34504, 34401, 34324, 34286, 34164, 33468, 33281, 32955, 32835, 32588, 32461, 32460, 32292, 32291,
#   32290, 32068, 32066, 32050, 31945, 31008, 30173, 30044, 29705, 29693, 29607, 29291, 29261, 29260, 29115, 29023, 29002, 29000, 28999,
#   28968, 28829, 28612, 28495, 28168, 27860, 27859, 27782, 27781, 27640, 27639, 27637, 27627, 27550, 27512, 27344, 27301, 27280, 27253,
#   27036, 26959, 26913, 26912, 26707, 26613, 26536, 26512, 26491, 26467, 26464, 26431, 26428, 26381, 26170, 26102, 26099, 26078, 25998,
#   25950, 25827, 25820, 25806, 25731, 25680, 25679, 25677, 25676, 25519, 25240, 25170, 25097, 24799, 24348, 24347, 24240, 24217, 23819,
#   23788, 23756, 23668, 23640, 23423, 23422, 23049, 20421, 20122, 19664, 17895, 17752, 17726, 17462, 17461, 15080,
#   9478, 15080, 17252, 17338, 17507, 17578, 17887, 17895, 17896, 19664, 19665, 20122, 20421, 22187, 22441, 22706, 23049, 23077, 23125,
#    23534, 23587, 23614, 23640, 23668, 23760, 23788, 23814, 23819, 23862, 23866, 24217, 24240, 24390, 24992, 25231, 25432, 25519, 25641,
#     25682, 25820, 25985, 26062, 26129, 26165, 26167, 26170, 26422, 26490, 26491, 26493, 26513, 26662, 27035, 27036, 27541, 28760, 29348,
#     36744, 29260, 21885, 23841, 24552, 26808, 27253, 36246, 32955, 17698, 26098, 34576, 26912, 32588, 35884, 30173, 28495, 20499, 25616,
#     25765, 34756, 35762, 27344, 34754, 20751, 21769, 22117, 32827, 34376, 32835, 27550, 36633, 23375, 25400, 25422, 26078, 26536, 27512,
#     27782, 29637, 27860, 27781, 34286, 28614, 26431, 19657, 23968, 26671, 29261, 23756, 28168, 34053, 24348, 32244, 26613, 28829, 27980,
#     26197, 29003, 32089, 34755, 27301, 29117, 35290, 32890, 27600, 34948, 25232, 19534, 26913, 30175, 17304, 19343, 23865, 24239, 29116,
#     31008, 32066, 35547, 36784, 24347, 34590, 32460, 32461, 27549, 27627, 26464, 25097, 34575, 27280, 33280, 17616, 17967, 25401, 27859,
#     28779, 29693, 36510, 26959, 33416, 29118, 28612, 27764, 20173, 20174, 25239, 25731, 26099, 36249, 26467, 26492, 29263, 27602, 27110,
#     28502, 25806, 29443, 32068, 34164, 34753, 26102, 29291, 36628, 17795, 21036, 24993, 29836, 35164, 39142, 29222, 26428, 27822, 35963,
#     36493, 20204, 24232, 32581, 34504, 36804, 34687, 25950, 26958, 32019, 24799, 25170, 26476, 27877, 37018, 37205, 34686, 30044, 34096,
#     26707, 26266, 27995, 33009, 38676, 37066, 29023, 33415, 36610, 20606, 20742, 23346, 25240, 29705, 32850, 35271, 35163, 34574, 37519,
#     35564, 28480, 17657, 23257, 24349, 25998, 27643, 28121, 32022, 35044, 36687, 33194, 36394, 37376, 37676, 47987, 25676, 35100, 24996,
#     27861, 28999, 29208, 31007, 29246, 30174, 33007, 27170, 32884, 35585, 31445, 34748, 17793, 25677, 28253, 34808, 27646, 23906, 34809,
#     27006, 33436, 34054, 35006, 14990, 17259, 17912, 20692, 23460, 23619, 23797, 24995, 25948, 26911, 26927, 28443, 36708, 38660, 17746,
#     20144, 32716, 35322, 39087, 38600, 35584, 27908, 29666, 35197, 26512, 34324, 29002, 37077, 37677, 38257, 39001, 29777, 36749, 27551,
#     29807, 33498, 37600, 25303, 27893, 28968, 38564, 31097, 20658, 22440, 27270, 32988, 35527, 39141, 37589, 17669, 26780, 30384,
#     35349, 24966, 35106, 34086, 23535, 35246, 33468, 34287, 33281, 34734, 34097, 29115, 17325, 31945, 36250, 28525, 36805, 14972,
#     29706, 29937, 36291, 27190, 36101, 17489, 27297, 29366, 32528, 36093, 36398, 17337, 20603, 23758, 29500, 27783, 35830, 17488,
#     21581, 30602, 37687, 29607, 27793, 35809, 23891, 34795, 17425, 18030, 23281, 33363, 34401, 34709, 27882, 27011, 26380, 17886,
#     22115, 36789, 17521, 17903, 32472, 37019, 38696, 39143, 32972, 36248, 35623, 34598, 34448, 37627, 17321, 17345, 17493, 17614,
#     17712, 17857, 17869, 36985, 23776, 25752, 27334, 29413, 36462, 29845, 34431, 17579, 20618, 33012, 29534, 37487, 34397, 37330,
#     25853, 17647, 17988, 29000, 26809, 34403, 30985, 27756, 35451, 17615, 24497, 34733, 25040, 37377, 17610, 23289, 23613, 34365,
#     35582, 17611, 28466, 35008, 34708, 37596, 37650, 32055, 37604, 36677, 30385, 36939, 19533, 42701, 26001, 34896, 35941, 37664, 38584,
#     30423, 34730, 9208, 17320, 23740, 26899, 34443, 26783, 35005, 37679, 20092, 32634, 32821, 37535, 29135, 42519, 37051, 38884, 42595,
#     27778, 32632, 36692, 34526, 32958, 25521, 38250, 34402, 39165, 24341, 26716, 27405, 31440, 17486, 25717, 28684, 33425, 20143, 27693,
#     43241, 42705, 34405, 23286, 17527, 17686, 31567, 31550, 31767, 42861, 34897, 43692, 27372, 27784, 35671, 36494, 38630, 39236, 32872,
#     34464, 37094, 17787, 35007, 37680, 27491, 34688, 43220, 20418, 28625, 36300, 36352, 10025, 17494, 17713, 36095, 28566, 26027, 17733, 27988,
#     35699, 15054, 17875, 23764, 26381, 28478, 32511, 37581,66428,
#     38959, 43695, 54816, 37656, 37255, 35833, 43408, 23820, 43693, 76793, 20210, 55383, 42879, 42880, 42860, 57425, 52555, 57426, 43349, 32776, 35181, 52548, 52379, 58585, 43694, 38329)
#         """
#         # and sku2level.product_id in (34701, 27637, 25679, 20079, 17726, 17461)
#         df = hive_processor.load_sql(query_sql.format(self.end_date))
#         # 获取ppv的rank score数据
#         df = df.merge(ppv_rank_df.loc[ppv_rank_df.column_name == 'small_version', ['column_value_name', 'ppv_rank', 'low_ppv_rank']],
#                           how='left', left_on='small_version', right_on='column_value_name')
#         df = df.rename(columns={"ppv_rank": "small_version_rank"})
#
#         df['small_version_rank']=df['small_version_rank'].fillna(100)
#         df['small_version_rank_rate'] = df['small_version_rank']*1.0000/100
#
#
#         #聚合ppv
#         df['storage']=df['storage'].fillna('unknown')
#         df['purchase_way']=df['purchase_way'].fillna('unknown')
#         df['guarantee']=df['guarantee'].fillna('unknown')
#
#         df['ppv_combine'] = df['storage'] + '_' + df['purchase_way'] + '_' + df['guarantee']
#         df['level_sub'] = df['product_level_name'].str[0]
#         df = df.merge(self.combine_df[['product_id', 'ppv_combine', 'rank_score', 'rank_low_score','storage_rank','purchase_way_rank','guarantee_rank']],
#                       on=['product_id', 'ppv_combine'])
#         df = df.rename(columns={"rank_score": "rank_score_price", "rank_low_score": "rank_low_score_price"})
#
#
#         perid_df = get_period_price_fun(md_date, flag=False)
#         df = df.merge(perid_df, how='left', left_on=['product_sku_id', 'product_level_id'],
#                         right_on=['product_sku_key', 'product_level_key'])
#
#
#
#         query_product_ppv_small_version_cnt_df = presto_processor.load_sql(query_product_ppv_small_version_cnt.format(self.start7_date,self.start7_date,self.end_date))
#         query_product_ppv_small_version_cnt_df['small_version'] = query_product_ppv_small_version_cnt_df['small_version'].fillna('unknown')
#         query_product_ppv_small_version_cnt_df['storage'] = query_product_ppv_small_version_cnt_df['storage'].fillna('unknown')
#         query_product_ppv_small_version_cnt_df['purchase_way'] = query_product_ppv_small_version_cnt_df['purchase_way'].fillna('unknown')
#         query_product_ppv_small_version_cnt_df['guarantee'] = query_product_ppv_small_version_cnt_df['guarantee'].fillna('unknown')
#         query_product_ppv_small_version_cnt_df = query_product_ppv_small_version_cnt_df.merge(ppv_rank_df.loc[ppv_rank_df.column_name == 'small_version', ['column_value_name', 'ppv_rank']],
#                       how='left', left_on='small_version', right_on='column_value_name')
#         query_product_ppv_small_version_cnt_df = query_product_ppv_small_version_cnt_df.rename(columns={"ppv_rank": "small_version_rank"})
#         query_product_ppv_small_version_cnt_df['small_version_rank'] = query_product_ppv_small_version_cnt_df['small_version_rank'].fillna(100)
#
#         query_product_ppv_small_version_cnt_df['ppv_combine'] = query_product_ppv_small_version_cnt_df['storage'] + '_' + query_product_ppv_small_version_cnt_df[
#             'purchase_way'] + '_' + query_product_ppv_small_version_cnt_df['guarantee']
#         small_version_df = query_product_ppv_small_version_cnt_df.groupby(by=['product_id','ppv_combine','small_version_rank'])['cnt'].sum().reset_index()
#
#         small_version_100_df = small_version_df.loc[small_version_df.small_version_rank==100,['product_id','ppv_combine','small_version_rank','cnt']]
#         small_version_100_df=small_version_100_df.rename(columns={"cnt": "cnt_100"})
#         small_version_98_df = small_version_df.loc[small_version_df.small_version_rank==98,['product_id','ppv_combine','small_version_rank','cnt']]
#         small_version_98_df=small_version_98_df.rename(columns={"cnt": "cnt_98"})
#
#         #计算出型号中基准的sku
#         base_storage_sku_df = df.groupby(by=['product_id','ppv_combine','storage','product_sku_id'])['sale_num_0_7'].sum().reset_index()
#
#         base_storage_sku_df['cnt_rank'] = base_storage_sku_df.groupby(by=['product_id', 'storage'])['sale_num_0_7'].rank(ascending=False, method='first')
#
#         base_storage_sku_df = base_storage_sku_df.loc[base_storage_sku_df.cnt_rank == 1]
#         base_storage_sku_df = base_storage_sku_df.rename(columns={"sale_num_0_7": 'qty_sum'})
#
#         #ppv 组合比率处理
#         level_sub_ppv_rate_df =pd.read_csv('/data/sunpengwei/tmp/c2b_all_level_sub_ppv_reverse_rate_0927.csv',encoding='utf-8-sig')
#         level_sub_ppv_rate_df['price_mean_rate'] = level_sub_ppv_rate_df['reverse_rate']*1.000000/level_sub_ppv_rate_df['base_rate']
#         level_sub_ppv_rate_df['price_mean_rate_reverse'] = level_sub_ppv_rate_df['reverse_rate']*1.000000/level_sub_ppv_rate_df['base_rate']
#
#         level_sub_base_ppv_combine = level_sub_ppv_rate_df[['product_id','level_sub','ppv_combine','storage','price_mean_rate_reverse']].merge(base_storage_sku_df[['product_id','ppv_combine']],on=['product_id','ppv_combine'])
#         level_sub_base_ppv_combine = level_sub_base_ppv_combine[['product_id','level_sub','storage','price_mean_rate_reverse']].rename(columns={"price_mean_rate_reverse": "product_base_mean_rate"})
#         level_sub_ppv_rate_df = level_sub_ppv_rate_df.merge(level_sub_base_ppv_combine,on=['product_id','level_sub','storage'])
#         level_sub_ppv_rate_df['price_mean_rate_reverse']=level_sub_ppv_rate_df['price_mean_rate_reverse']/level_sub_ppv_rate_df['product_base_mean_rate']
#
#         base_storage_sku_df['product_id'] = base_storage_sku_df['product_id'].astype(np.int64)
#         level_sub_ppv_rate_df['product_id'] = level_sub_ppv_rate_df['product_id'].astype(np.int64)
#         base_storage_mean_rate = level_sub_ppv_rate_df.merge(base_storage_sku_df[['product_id','ppv_combine']],on=['product_id','ppv_combine'])
#         base_storage_mean_rate = base_storage_mean_rate.rename(columns={"price_mean_rate_reverse": "storage_base_mean_rate"})
#         level_sub_ppv_rate_df = level_sub_ppv_rate_df.merge(base_storage_mean_rate[['product_id','level_sub','storage','storage_base_mean_rate']],on=['product_id','level_sub','storage'])
#         level_sub_ppv_rate_df['price_mean_rate_reverse']=level_sub_ppv_rate_df['price_mean_rate_reverse']/level_sub_ppv_rate_df['storage_base_mean_rate']
#         level_sub_ppv_rate_df['price_mean_rate']=level_sub_ppv_rate_df['price_mean_rate']/level_sub_ppv_rate_df['storage_base_mean_rate']
#
#         level_sub_ppv_rate_df = level_sub_ppv_rate_df.merge(small_version_100_df[['product_id','ppv_combine','cnt_100']],how='left',on=['product_id','ppv_combine'])
#         level_sub_ppv_rate_df = level_sub_ppv_rate_df.merge(small_version_98_df[['product_id','ppv_combine','cnt_98']],how='left',on=['product_id','ppv_combine'])
#         level_sub_ppv_rate_df['cnt_100']=level_sub_ppv_rate_df['cnt_100'].fillna(0)
#         level_sub_ppv_rate_df['cnt_98']=level_sub_ppv_rate_df['cnt_98'].fillna(0)
#
#         level_sub_ppv_rate_df['small_version_flag']=1
#         level_sub_ppv_rate_df.loc[level_sub_ppv_rate_df.cnt_100<level_sub_ppv_rate_df.cnt_98,'small_version_flag']=0
#
#         #加权汇总等级比率
#         product_level_sub_ppv_rate_df = level_sub_ppv_rate_df
#         product_level_sub_ppv_rate_df.loc[product_level_sub_ppv_rate_df.sum_cnt==0,'sum_cnt'] = 0.001
#         product_level_sub_ppv_rate_df['weight_price_mean_rate_reverse'] = product_level_sub_ppv_rate_df['price_mean_rate_reverse']*product_level_sub_ppv_rate_df['sum_cnt']
#
#         product_ppv_avg_sum_rate_df = product_level_sub_ppv_rate_df.groupby(by=['product_id','product_name','ppv_combine'])['weight_price_mean_rate_reverse'].sum().reset_index()
#         product_ppv_avg_sum_cnt_df = product_level_sub_ppv_rate_df.groupby(by=['product_id','ppv_combine'])['sum_cnt'].sum().reset_index()
#
#         product_ppv_avg_rate_df = product_ppv_avg_sum_rate_df.merge(product_ppv_avg_sum_cnt_df,on=['product_id','ppv_combine'])
#         product_ppv_avg_rate_df['price_mean_avg_mean'] = product_ppv_avg_rate_df['weight_price_mean_rate_reverse']/product_ppv_avg_rate_df['sum_cnt']
#
#
#         # product_ppv_avg_rate_df.rename(columns={'price_mean_rate_reverse':'price_mean_avg_mean'},inplace=True)
#
#         df = df.merge(level_sub_ppv_rate_df[['product_id','level_sub', 'ppv_combine','price_mean_rate_reverse','small_version_flag']],how='left',on=['product_id','level_sub','ppv_combine'])
#         df = df.merge(product_ppv_avg_rate_df[['product_id','ppv_combine','price_mean_avg_mean']],how='left',on=['product_id','ppv_combine'])
#
#         product_base_ppv_df_tmp=self.product_base_ppv_df.copy()
#         product_base_ppv_df_tmp = product_base_ppv_df_tmp.rename(columns={"ppv_combine":"ppv_combine_base"})
#
#         df=df.merge(product_base_ppv_df_tmp,how='left',on=['product_id','storage'])
#         # query_sku_sale_cnt_df = presto_processor.load_sql(query_sku_sale_cnt.format(start7_date,start7_date,end_date))
#
#
#
#         base_product_df = df.merge(base_storage_sku_df[['product_sku_id', 'qty_sum']], on='product_sku_id')
#         base_product_df=base_product_df.rename(columns={"saleprice":'saleprice_base'})
#
#         tmp = df.merge(base_product_df[['product_id','product_level_id','storage','saleprice_base']],how='left',on=['product_id','product_level_id','storage'])
#         tmp['score_rate'] = tmp['rank_score_price']*1.000000/tmp['rank_score_base']
#
#         tmp['rate']=tmp['price_mean_avg_mean'] #改为用平均值
#         # tmp['rate']=tmp['price_mean_rate_reverse'] #改为用平均值
#         tmp.loc[tmp.small_version_flag==0,'small_version_rank_rate']=tmp.loc[tmp.small_version_flag==0,'small_version_rank']*1.0000/98
#
#         tmp.loc[pd.isna(tmp.price_mean_rate_reverse),'rate'] = tmp.loc[pd.isna(tmp.price_mean_rate_reverse),'price_mean_avg_mean']
#         tmp.loc[pd.isna(tmp.rate),'rate'] = tmp.loc[pd.isna(tmp.rate),'score_rate']
#         tmp['rate'] = tmp['rate']*tmp['small_version_rank_rate']
#         tmp['ppv_rate_price'] = tmp['saleprice_base']*tmp['rate']
#
#         # query_sku_sale_cnt_df = query_sku_sale_cnt_df.loc[query_sku_sale_cnt_df.cnt_rank==1]
#         tmp['diff_ppv_price'] =0
#         tmp['diff_saleprice'] =0
#         tmp.loc[tmp.sale_num_0_7>0,'diff_ppv_price']=(tmp.loc[tmp.sale_num_0_7>0,'ppv_rate_price']-tmp.loc[tmp.sale_num_0_7>0,'price_0_7'])*tmp.loc[tmp.sale_num_0_7>0,'sale_num_0_7']
#         tmp.loc[tmp.sale_num_0_7>0,'diff_saleprice']=(tmp.loc[tmp.sale_num_0_7>0,'saleprice']-tmp.loc[tmp.sale_num_0_7>0,'price_0_7'])*tmp.loc[tmp.sale_num_0_7>0,'sale_num_0_7']
#         tmp.loc[tmp.sale_num_0_7>0,'sum_ppvprice']=tmp.loc[tmp.sale_num_0_7>0,'ppv_rate_price']*tmp.loc[tmp.sale_num_0_7>0,'sale_num_0_7']
#         tmp.loc[tmp.sale_num_0_7>0,'sum_saleprice']=tmp.loc[tmp.sale_num_0_7>0,'saleprice']*tmp.loc[tmp.sale_num_0_7>0,'sale_num_0_7']
#         tmp['abs_diff_ppv_price'] = abs(tmp['diff_ppv_price'])
#         tmp['abs_diff_saleprice'] = abs(tmp['diff_saleprice'])
#         tmp.to_csv('/data/sunpengwei/tmp/df_ppv_rate_price_0825.csv', encoding='utf-8-sig')
#         level_sub_ppv_rate_df.to_csv('/data/sunpengwei/tmp/c2b_all_level_change_rate_0929.csv', encoding='utf-8-sig')
#         product_ppv_avg_rate_df.to_csv('/data/sunpengwei/tmp/product_ppv_avg_rate_df_1020.csv', encoding='utf-8-sig')
#         # self.save_process_all_level_sub_ppv_rate_fun(level_sub_ppv_rate_df)
#         # self.save_ppv_rate_price_detail_fun(tmp)
#         # self.save_product_ppv_avg_rate_fun(product_ppv_avg_rate_df)
#         print(tmp.shape)
#
#
#     def save_process_all_level_sub_ppv_rate_fun(self,ret_df):
#         ret_df['create_date']=self.end_date
#         predict_data=ret_df
#         all_levelsub_rate_columns=['create_date','product_id', 'product_name',
#        'level_sub','ppv_combine','storage', 'rank_score',
#        'price_mean_rate', 'price_mean_rate_reverse', 'sum_cnt', 'cishu',
#        'total_cnt', 'product_level', 'first_cnt_rate', 'rank_score_first',
#        'second_cnt_rate', 'rank_score_second', 'diff_score_per_rate',
#        'reverse_rate',   'reverse_flag', 'sum_cnt_flag', 'base_rate','ppv_rate', 'final_rate',
#        'product_base_mean_rate', 'storage_base_mean_rate', 'cnt_100', 'cnt_98',
#        'small_version_flag', 'weight_price_mean_rate_reverse']
#
#         predict_data = predict_data.fillna(-1)
#
#         delete_rate_sql = """
#         delete from c2b_all_levelsub_reverse_ppv_rate where create_date=DATE_FORMAT(date_sub(curdate(),interval 30 day),'%Y%m%d')
#         """
#
#         mysql_prediction_processor.execute_sql(delete_rate_sql)
#         delete_td_rate_sql = """
#                 delete from c2b_all_levelsub_reverse_ppv_rate where create_date='{}'
#                 """.format(self.end_date)
#
#         mysql_prediction_processor.execute_sql(delete_td_rate_sql)
#         insert_sql = """
#         INSERT INTO c2b_all_levelsub_reverse_ppv_rate(create_date,product_id, product_name,
#            level_sub,ppv_combine,storage, rank_score,
#            price_mean_rate, price_mean_rate_reverse, sum_cnt, cishu,
#            total_cnt, product_level, first_cnt_rate, rank_score_first,
#            second_cnt_rate, rank_score_second, diff_score_per_rate,
#            reverse_rate,   reverse_flag, sum_cnt_flag, base_rate,ppv_rate, final_rate,
#            product_base_mean_rate, storage_base_mean_rate, cnt_100, cnt_98,
#            small_version_flag, weight_price_mean_rate_reverse)
#         VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
#         """
#         # predict_data=predict_data.reset_index()
#         mysql_prediction_processor.execute_insert_sql(insert_sql,
#                                                       predict_data[all_levelsub_rate_columns].to_records(index=False).tolist())
#         logger.info('saving predict data to mysql done')
#         mysql_price_num = predict_data.shape[0]
#         logger.info('c2b_all_levelsub_reverse_ppv_rate datasize={}'.format(mysql_price_num))
#
#     def save_ppv_rate_price_detail_fun(self, ret_df):
#         ret_df['create_date'] = self.end_date
#         predict_data = ret_df
#         all_detail_price_columns = [ 'create_date','product_brand_id',
#       'product_brand_name', 'level_template_id', 'product_id', 'product_name',
#        'product_sku_id','product_sku_name',  'product_level_id', 'rank', 'price_3', 'price_2',
#        'price_1',  'qty',  'up_date', 'storage', 'purchase_way',
#        'guarantee', 'small_version',  'small_version_rank',
#        'small_version_rank_rate', 'ppv_combine', 'level_sub',
#        'rank_score_price', 'storage_rank',
#        'purchase_way_rank', 'guarantee_rank',
#         'sale_num_0_3', 'price_0_3',
#        'price_8_14', 'sale_num_8_14',  'thisprice', 'price_mean_rate_reverse',
#        'small_version_flag', 'ppv_combine_base', 'cnt',
#        'ppv_base_rank', 'rank_score_base',
#         'score_rate', 'product_level_name', 'price_mean_avg_mean','rate', 'saleprice_base','saleprice','ppv_rate_price','price_0_7', 'sale_num_0_7',
#        'diff_ppv_price', 'diff_saleprice', 'sum_ppvprice', 'sum_saleprice',
#        'abs_diff_ppv_price', 'abs_diff_saleprice']
#
#         predict_data = predict_data.fillna(-1)
#
#         delete_rate_sql = """
#            delete from c2b_ppv_rate_price_detail where create_date=DATE_FORMAT(date_sub(curdate(),interval 30 day),'%Y%m%d')
#            """
#
#         mysql_prediction_processor.execute_sql(delete_rate_sql)
#         delete_td_rate_sql = """
#                    delete from c2b_ppv_rate_price_detail where create_date='{}'
#                    """.format(self.end_date)
#
#         mysql_prediction_processor.execute_sql(delete_td_rate_sql)
#         insert_sql = """
#            INSERT INTO c2b_ppv_rate_price_detail(create_date,product_brand_id,
#       product_brand_name, level_template_id, product_id, product_name,
#        product_sku_id,product_sku_name,  product_level_id, rank, price_3, price_2,
#        price_1,  qty,  up_date, storage, purchase_way,
#        guarantee, small_version,  small_version_rank,
#        small_version_rank_rate, ppv_combine, level_sub,
#        rank_score_price, storage_rank,
#        purchase_way_rank, guarantee_rank,
#         sale_num_0_3, price_0_3,
#        price_8_14, sale_num_8_14,  thisprice, price_mean_rate_reverse,
#        small_version_flag, ppv_combine_base, cnt,
#        ppv_base_rank, rank_score_base,
#         score_rate, product_level_name, price_mean_avg_mean,rate, saleprice_base,saleprice,ppv_rate_price,price_0_7, sale_num_0_7,
#        diff_ppv_price, diff_saleprice, sum_ppvprice, sum_saleprice,
#        abs_diff_ppv_price, abs_diff_saleprice)
#            VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s, %s, %s, %s, %s,
#             %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
#            """
#         # predict_data=predict_data.reset_index()
#         mysql_prediction_processor.execute_insert_sql(insert_sql,
#                                                       predict_data[all_detail_price_columns].to_records(
#                                                           index=False).tolist())
#         logger.info('saving predict data to mysql done')
#         mysql_price_num = predict_data.shape[0]
#         logger.info('c2b_ppv_rate_price_detail datasize={}'.format(mysql_price_num))
#
#     def save_product_ppv_avg_rate_fun(self,ret_df):
#         ret_df['create_date']=self.end_date
#         predict_data=ret_df
#         all_levelsub_rate_columns=['create_date','product_id', 'product_name','ppv_combine', 'weight_price_mean_rate_reverse',
#        'sum_cnt', 'price_mean_avg_mean']
#
#         predict_data = predict_data.fillna(-1)
#
#         delete_rate_sql = """
#         delete from c2b_product_ppv_avg_rate where create_date=DATE_FORMAT(date_sub(curdate(),interval 30 day),'%Y%m%d')
#         """
#
#         mysql_prediction_processor.execute_sql(delete_rate_sql)
#         delete_td_rate_sql = """
#                 delete from c2b_product_ppv_avg_rate where create_date='{}'
#                 """.format(self.end_date)
#
#         mysql_prediction_processor.execute_sql(delete_td_rate_sql)
#         insert_sql = """
#         INSERT INTO c2b_product_ppv_avg_rate(create_date,product_id, product_name,ppv_combine, weight_price_mean_rate_reverse,
#        sum_cnt, price_mean_avg_mean)
#         VALUES(%s, %s, %s, %s, %s, %s, %s)
#         """
#         # predict_data=predict_data.reset_index()
#         mysql_prediction_processor.execute_insert_sql(insert_sql,
#                                                       predict_data[all_levelsub_rate_columns].to_records(index=False).tolist())
#         logger.info('saving predict data to mysql done')
#         mysql_price_num = predict_data.shape[0]
#         logger.info('c2b_all_levelsub_reverse_ppv_rate datasize={}'.format(mysql_price_num))
#
#
#     def ppv_price_evluate_test(self):
#         detail_df = pd.read_csv('/data/sunpengwei/tmp/df_ppv_rate_price_0825.csv', encoding='utf-8-sig')
#         level_sub_ppv_rate_df = pd.read_csv('/data/sunpengwei/tmp/c2b_all_level_sub_ppv_reverse_rate_0927.csv', encoding='utf-8-sig')
#         product_ppv_avg_rate_df = pd.read_csv('/data/sunpengwei/tmp/product_ppv_avg_rate_df_1020.csv', encoding='utf-8-sig')
#         df = detail_df
#         df = detail_df.loc[~(detail_df.rate==1)]
#         diff_ppv_sum_df =df.groupby(by=['product_id','product_name'])['abs_diff_ppv_price'].sum().reset_index()
#         ppv_sum_df =df.groupby(by=['product_id','product_name'])['sum_ppvprice'].sum().reset_index()
#         diff_saleprice_sum_df =df.groupby(by=['product_id','product_name'])['abs_diff_saleprice'].sum().reset_index()
#         saleprice_sum_df =df.groupby(by=['product_id','product_name'])['sum_saleprice'].sum().reset_index()
#         num07_sum_df =df.groupby(by=['product_id','product_name'])['sale_num_0_7'].sum().reset_index()
#
#         sum_df = diff_ppv_sum_df.merge(ppv_sum_df,on=['product_id','product_name'])
#         sum_df = sum_df.merge(diff_saleprice_sum_df,on=['product_id','product_name'])
#         sum_df = sum_df.merge(saleprice_sum_df,on=['product_id','product_name'])
#         sum_df = sum_df.merge(num07_sum_df,on=['product_id','product_name'])
#
#         sum_df['abs_ppv_diff_rate'] = sum_df['abs_diff_ppv_price']/sum_df['sum_ppvprice']
#         sum_df['abs_saleprice_diff_rate'] = sum_df['abs_diff_saleprice']/sum_df['sum_saleprice']
#         sum_df['diff_rate'] = sum_df['abs_saleprice_diff_rate']- sum_df['abs_ppv_diff_rate']
#         aa = sum_df.sort_values(by='sale_num_0_7', ascending=False)
#         aa1 = aa[['abs_diff_ppv_price', 'sum_ppvprice','abs_diff_saleprice', 'sum_saleprice', 'sale_num_0_7']].sum()
#         aa1['ppv_rate'] = aa1['abs_diff_ppv_price'] / aa1['sum_ppvprice']
#         aa1['saleprice_rate'] = aa1['abs_diff_saleprice'] / aa1['sum_saleprice']
#
#         writer = pd.ExcelWriter("/data/sunpengwei/tmp/df_ppv_rate_price_1101_detail.xlsx")
#         detail_df.to_excel(excel_writer=writer, sheet_name='detail_df')
#         aa.to_excel(excel_writer=writer, sheet_name='型号汇总数据')
#         aa1.to_excel(excel_writer=writer, sheet_name='汇总数据')
#         level_sub_ppv_rate_df.to_excel(excel_writer=writer, sheet_name='型号大等级维度比率')
#         product_ppv_avg_rate_df.to_excel(excel_writer=writer, sheet_name='型号维度比率')
#         writer.save()
#         writer.close()
#         # aa.to_csv('/data/sunpengwei/tmp/df_ppv_rate_price_1018_detail.csv', encoding='utf-8-sig')
#         logger.info(aa1)
#
#
#     def reverse_completion_mean_rate_fun(self,grouped,resDf):
#
#         totalCnt = grouped.count().shape[0]
#         iter=0
#         for name, group in grouped:
#             # product_key = group[0]
#             group = group.sort_values(by=['sum_cnt_flag','rank_score'],ascending=[ False,False])
#             group.reset_index(drop=True, inplace=True)
#             size = group.shape[0]
#             if size<2:
#                 resDf = resDf.append(group.copy())
#                 continue
#             for i in range(size-1):
#                 if group.at[i+1,'sum_cnt']==0:
#                     final_rate=group.at[i + 1, 'reverse_rate']
#                     storage_purchase=group.at[i + 1, 'storage_purchase']
#                     storage_guarantee=group.at[i + 1, 'storage_guarantee']
#                     sum_cnt=group.at[i + 1, 'sum_cnt']
#                     flag=0
#
#                     for j in range(i,-1,-1):
#                         if group.at[j, 'storage_purchase']==storage_purchase and group.at[i+1,'sum_cnt']==0:
#                             if sum_cnt<=group.at[j, 'sum_cnt']:
#                                 flag=1
#                                 group.at[i + 1, 'reverse_rate']=group.at[i + 1, 'rank_score']*1.000000/group.at[j, 'rank_score']*group.at[j, 'reverse_rate']
#                                 group.at[i + 1, 'reverse_flag']=1
#                                 break
#                             else:
#                                 flag = 1
#                                 index_j_rate = group.at[j, 'rank_score'] * 1.000000 / group.at[ i + 1, 'rank_score'] * group.at[i + 1, 'reverse_rate']
#                                 if j>0:
#                                     up_rate = group.at[j-1, 'reverse_rate']
#                                     if index_j_rate>up_rate:
#                                         index_j_rate=up_rate-0.01
#                                 group.at[j, 'reverse_rate'] =index_j_rate
#                                 group.at[j, 'reverse_flag'] = 1
#                                 break
#                     if flag==0:
#                         for j in range(i,-1,-1):
#                             if group.at[j, 'storage_guarantee']==storage_guarantee and group.at[i+1,'sum_cnt']==0:
#                                 if sum_cnt <= group.at[j, 'sum_cnt']:
#                                     flag = 1
#                                     group.at[i + 1, 'reverse_rate'] = group.at[i + 1, 'rank_score'] * 1.000000 / group.at[ j, 'rank_score'] * group.at[j, 'reverse_rate']
#                                     group.at[i + 1, 'reverse_flag'] = 1
#                                     break
#                                 else:
#                                     flag = 1
#                                     index_j_rate = group.at[j, 'rank_score'] * 1.000000 / group.at[i + 1, 'rank_score'] *  group.at[i + 1, 'reverse_rate']
#                                     if j > 0:
#                                         up_rate = group.at[j - 1, 'reverse_rate']
#                                         if index_j_rate > up_rate:
#                                             index_j_rate = up_rate - 0.01
#                                     group.at[j, 'reverse_rate'] = index_j_rate
#                                     group.at[j, 'reverse_flag'] = 1
#                     if flag==0:
#                         for j in range(i,-1,-1):
#                             if group.at[j, 'purchase_way_guarantee']==storage_guarantee and group.at[i+1,'sum_cnt']==0:
#                                 if sum_cnt <= group.at[j, 'sum_cnt']:
#                                     flag = 1
#                                     group.at[i + 1, 'reverse_rate'] = group.at[i + 1, 'rank_score'] * 1.000000 / group.at[ j, 'rank_score'] * group.at[j, 'reverse_rate']
#                                     group.at[i + 1, 'reverse_flag'] = 1
#                                     break
#                                 else:
#                                     flag = 1
#                                     index_j_rate = group.at[j, 'rank_score'] * 1.000000 / group.at[i + 1, 'rank_score'] *  group.at[i + 1, 'reverse_rate']
#                                     if j > 0:
#                                         up_rate = group.at[j - 1, 'reverse_rate']
#                                         if index_j_rate > up_rate:
#                                             index_j_rate = up_rate - 0.01
#                                     group.at[j, 'reverse_rate'] = index_j_rate
#                                     group.at[j, 'reverse_flag'] = 1
#                     flag=0
#                 # print(i)
#             group = group.sort_values(by=['rank_score'], ascending=[ False])
#             group.reset_index(drop=True, inplace=True)
#             resDf = resDf.append(group.copy())
#             iter += 1
#             iter += 1
#             resDf.loc[(resDf.product_id == 43511) & (resDf.level_sub == 'B'), ['ppv_combine', 'rank_score',
#                                                                                'reverse_flag', 'reverse_rate', 'sum_cnt',
#                                                                                'storage_purchase',
#                                                                                'storage_guarantee']].sort_values(
#                 by='rank_score', ascending=False)
#             group[['ppv_combine', 'rank_score',  'reverse_flag', 'reverse_rate', 'sum_cnt', 'storage_purchase',
#                    'storage_guarantee']]
#             logger.info('totalCnt= {},product_key={},iter = {},size={}'.format(totalCnt, str(name), iter, size))
#         return resDf
#
#
#
#     def get_all_combine_fun(self,):
#         level_sub_ppv_rate_df = pd.read_csv('/data/sunpengwei/tmp/c2b_level_sub_ppv_reverse_rate_0824.csv', encoding='utf-8-sig')
#         combine_df = self.combine_df
#
#         combine_df_D_df=combine_df.copy()
#         combine_df_D_df['level_sub']='D'
#         combine_df_A_df=combine_df.copy()
#         combine_df_A_df['level_sub']='A'
#         combine_df_B_df=combine_df.copy()
#         combine_df_B_df['level_sub']='B'
#         combine_df_C_df=combine_df.copy()
#         combine_df_C_df['level_sub']='C'
#         all_level_sub_combine_df = combine_df_A_df
#         # all_level_sub_combine_df =all_level_sub_combine_df.append(combine_df_A_df)
#         all_level_sub_combine_df =all_level_sub_combine_df.append(combine_df_B_df)
#         all_level_sub_combine_df =all_level_sub_combine_df.append(combine_df_C_df)
#         # all_level_sub_combine_df =all_level_sub_combine_df.append(combine_df_D_df)
#
#         tmp = all_level_sub_combine_df.merge(level_sub_ppv_rate_df[['product_id','level_sub','ppv_combine','price_mean_rate','price_mean_rate_reverse','sum_cnt','cishu', 'total_cnt',]],how='left',on=['product_id','level_sub','ppv_combine'])
#         tmp['sum_cnt'] = tmp['sum_cnt'].fillna(0)
#         tmp['cishu'] = tmp['cishu'].fillna(0)
#         tmp['total_cnt'] = tmp['total_cnt'].fillna(0)
#         tmp['price_mean_rate'] = tmp['price_mean_rate'].fillna(1.00000)
#         df_ret=tmp
#
#         leve_first_df,leve_second_df = self.first_second_base_fun(tmp)
#         ret_df = df_ret.merge(leve_first_df[['product_id', 'level_sub', 'storage', 'first_cnt_rate', 'rank_score_first']],
#                           how='left',
#                           on=['product_id', 'level_sub', 'storage'])
#         ret_df = ret_df.merge(leve_second_df[['product_id', 'level_sub', 'storage', 'second_cnt_rate', 'rank_score_second']],
#                           how='left',
#                           on=['product_id', 'level_sub', 'storage'])
#         ret_df.loc[pd.isna(ret_df.second_cnt_rate),'second_cnt_rate']=ret_df.loc[pd.isna(ret_df.second_cnt_rate),'first_cnt_rate']
#         ret_df.loc[pd.isna(ret_df.rank_score_second),'rank_score_second']=ret_df.loc[pd.isna(ret_df.rank_score_second),'rank_score_first']-1
#
#         ret_df['diff_score_per_rate'] = (ret_df['first_cnt_rate'] - ret_df['second_cnt_rate']) / (
#                     ret_df['rank_score_first'] - ret_df['rank_score_second'])
#         ret_df['reverse_rate'] = 1 + ret_df['diff_score_per_rate'] * (ret_df['rank_score'] - ret_df['rank_score_first'])
#         ret_df.loc[ret_df.sum_cnt==0,'price_mean_rate']=ret_df.loc[ret_df.sum_cnt==0,'reverse_rate']
#         ret_df.loc[ret_df.sum_cnt==0,'price_mean_rate_reverse']=ret_df.loc[ret_df.sum_cnt==0,'reverse_rate']
#         ret_df['storage_purchase']= ret_df['storage'] + '_' + ret_df['purchase_way'].astype(str)
#         ret_df['storage_guarantee']= ret_df['storage'] + '_' + ret_df['guarantee'].astype(str)
#         ret_df['purchase_way_guarantee']= ret_df['purchase_way'] + '_' + ret_df['guarantee'].astype(str)
#
#         #进行内存维度的数据补全处理
#         ret_df['reverse_flag']=0
#         ret_df['reverse_rate']=ret_df['price_mean_rate_reverse']
#         ret_df['sum_cnt_flag']=0
#         ret_df.loc[ret_df.sum_cnt>0,'sum_cnt_flag']=1
#         # ret_df = ret_df.loc[(ret_df.product_id==25827)&(ret_df.level_sub=='A')]
#         # ret_df = ret_df.loc[(ret_df.product_id==25827)]
#         resDf = pd.DataFrame(columns=ret_df.columns.tolist())
#         grouped = ret_df.groupby(by=['product_id', 'product_name','storage', 'level_sub'])
#         storage_resDf = self.reverse_completion_mean_rate_fun(grouped,resDf)
#         storage_resDf['product_id'] = storage_resDf['product_id'].astype(np.int64)
#
#         #获取组合比率内存之间的换算数据
#         ret_df = storage_resDf.merge(self.query_level_sub_ppv_df[['product_id', 'level_sub', 'ppv_combine', 'level_item_quotation_avg_price']],  how='left', on=['product_id', 'level_sub', 'ppv_combine'])
#         product_level_storage_rate = ret_df.loc[  ret_df.reverse_rate == 1, ['product_id', 'level_sub', 'storage', 'sum_cnt', 'level_item_quotation_avg_price']]
#
#         product_level_storage_rate['product_level'] = product_level_storage_rate['product_id'].astype(str) + '_' + \
#                                                       product_level_storage_rate['level_sub'].astype(
#                                                           str)
#         product_level_storage_rate['count_rank'] = product_level_storage_rate['sum_cnt'].groupby(
#             product_level_storage_rate['product_level']).rank(ascending=False,
#                                                               method='first')
#         product_level_storage_rate = product_level_storage_rate.loc[product_level_storage_rate.count_rank == 1]
#         product_level_storage_rate = product_level_storage_rate[
#             ['product_id', 'level_sub', 'level_item_quotation_avg_price']].rename(
#             columns={'level_item_quotation_avg_price': 'level_item_quotation_avg_base_price'})
#
#         ret_df = ret_df.merge(product_level_storage_rate, on=['product_id', 'level_sub'])
#         ret_df['base_storage_rate'] = ret_df['level_item_quotation_avg_price'] * 1.000000 / ret_df['level_item_quotation_avg_base_price']
#         product_level_storage_base_rate = ret_df.loc[ ret_df.price_mean_rate == 1, ['product_id', 'level_sub', 'storage','sum_cnt', 'base_storage_rate']]
#
#         product_level_storage_base_rate['product_level'] = product_level_storage_base_rate['product_id'].astype(str) + '_' +  product_level_storage_base_rate['level_sub'].astype(str)+'_'+ product_level_storage_base_rate['storage'].astype(str)
#         product_level_storage_base_rate['count_rank'] = product_level_storage_base_rate['sum_cnt'].groupby( product_level_storage_base_rate['product_level']).rank(ascending=False,method='first')
#         product_level_storage_base_rate = product_level_storage_base_rate.loc[product_level_storage_base_rate.count_rank == 1,['product_id', 'level_sub', 'storage', 'base_storage_rate']]
#
#         product_level_storage_base_rate = product_level_storage_base_rate.rename(columns={'base_storage_rate': 'base_rate'})
#
#         ret_df = ret_df.merge(product_level_storage_base_rate, on=['product_id', 'level_sub', 'storage'])
#
#         ret_df.loc[ret_df.reverse_rate == 1, ['product_id', 'level_sub', 'storage', 'base_rate']]
#         # product_level_storage_base_rate = storage_resDf.loc[storage_resDf.reverse_rate == 1, ['product_id', 'level_sub', 'storage', 'base_rate']]
#         # storage_resDf = storage_resDf.merge(product_level_storage_base_rate, on=['product_id', 'level_sub', 'storage'])
#         product_level_storage_base_rate_df = ret_df[['product_id', 'product_name', 'level_sub', 'storage', 'base_rate', 'storage_rank']].drop_duplicates().sort_values(by=['product_id', 'level_sub', 'base_rate','storage_rank'])
#         product_level_storage_base_rate_df['base_rate_bak']=product_level_storage_base_rate_df['base_rate']
#         # product_level_storage_base_rate_df=product_level_storage_base_rate_df.loc[(product_level_storage_base_rate_df.level_sub == 'A') & (product_level_storage_base_rate_df.product_id == 32290)]
#
#         res_base_rate_df = pd.DataFrame(columns=product_level_storage_base_rate_df.columns.tolist())
#         grouped = product_level_storage_base_rate_df.groupby(by=['product_id','level_sub'])
#         totalCnt = grouped.count().shape[0]
#         iter = 0
#         for name, group in grouped:
#             group.reset_index(drop=True, inplace=True)
#             size = group.shape[0]
#             # if size < 2:
#             #     res_base_rate_df = res_base_rate_df.append(group.copy())
#             #     continue
#             if group.at[0, 'product_id']==25827 and group.at[0, 'level_sub']=='A' :
#                 print(1)
#             for i in range(size):
#                 if np.isnan(group.at[i, 'base_rate']):
#                     if i==0:
#                         group.loc[i, 'base_rate']=1.00000
#                     else:
#                         group.loc[i, 'base_rate'] = group.at[i-1, 'base_rate']/group.at[i-1, 'storage_rank']*group.at[i, 'storage_rank']
#
#             group=group.sort_values(by='storage_rank')
#             group.reset_index(drop=True, inplace=True)
#             if size>1:
#                 for i in range(size-1):
#                     if group.at[i, 'base_rate']>group.at[i+1, 'base_rate']:
#                         if np.isnan(group.at[i, 'base_rate_bak']) and i>0 :
#                             group.at[i, 'base_rate'] = group.at[i-1, 'base_rate'] / group.at[i-1, 'storage_rank'] * group.at[i, 'storage_rank']
#                         elif  group.at[i+1, 'base_rate']!=1:
#                             group.at[i + 1, 'base_rate']=group.at[i, 'base_rate']/group.at[i, 'storage_rank']*group.at[i+1, 'storage_rank']
#                         else:
#                             group.at[i, 'base_rate'] = group.at[i +1, 'base_rate'] / group.at[i +1, 'storage_rank'] * group.at[i, 'storage_rank']
#
#
#             res_base_rate_df = res_base_rate_df.append(group.copy())
#
#         res_base_rate_df.sort_values(by=['product_id', 'level_sub', 'base_rate'])
#         res_base_rate_df['product_id'] = res_base_rate_df['product_id'].astype(np.int64)
#
#         #组合比率关联内存质检的换算比率
#         storage_resDf = storage_resDf.merge(res_base_rate_df[['product_id', 'level_sub', 'storage', 'base_rate']],on=['product_id', 'level_sub', 'storage'])
#         storage_resDf['ppv_rate']=storage_resDf['reverse_rate']
#         storage_resDf['final_rate'] = storage_resDf['ppv_rate']*storage_resDf['base_rate']
#         storage_resDf['reverse_rate']=storage_resDf['final_rate']
#
#         resDf = pd.DataFrame(columns=storage_resDf.columns.tolist())
#         grouped = storage_resDf.groupby(by=['product_id', 'product_name','level_sub'])
#         storage_resDf = self.reverse_mean_rate_fun(grouped, resDf)
#         storage_resDf['price_mean_rate_reverse'] = storage_resDf['reverse_rate'] / storage_resDf['base_rate']
#         level_sub_ppv_rate_df = level_sub_ppv_rate_df.rename(columns={"price_mean_rate":'price_mean_rate_bak'})
#
#         storage_resDf['product_id'] = storage_resDf['product_id'].astype(np.int64)
#         storage_resDf = storage_resDf.merge(level_sub_ppv_rate_df[
#                                                  ['product_id', 'level_sub', 'ppv_combine', 'price_mean_rate_bak' ]],
#                                              how='left', on=['product_id', 'level_sub', 'ppv_combine'])
#
#
#         storage_resDf.to_csv('/data/sunpengwei/tmp/c2b_all_level_sub_ppv_reverse_rate_0927.csv', encoding='utf-8-sig')
#
#
#         print(1)
#
#     def launch_model(self):
#         df44 = self.main_fun()
#         ret_df =self.process_level_sub_ppv_rate_fun(df44)#获取大等级的ppv组合比率
#         self.repair_reverse_rate_fun()
#         self.get_all_combine_fun()
#
#         price_df = self.get_sale_price_to_ppv_rate_price_fun(self.end_date)
#         self.ppv_price_evluate_test()
#
# if __name__=='__main__':
#     model = C2bppvrateModel(model_date='2022-11-01')
#     model.launch_model()
#
