#!/usr/bin/env python
# -*- encoding: utf-8 -*-
'''
@File    :   jdx_match_test_data.py
@Contact :   pengwei.sun@aihuishou.com
@License :   (C)Copyright aihuishou

@Modify Time      @Author       @Version    @Desciption
------------      -----------   --------    -----------
2021-09-09 10:45   pengwei.sun      1.0         None
'''
import os, sys

sys.path.append(os.getcwd())
from src.utils.config import logger
from src.utils.db_processor import presto_processor, mysql_prediction_processor
import pandas as pd
from src.utils.feishu_message import feishu_messager
import numpy as np
from src.tablet.levelrate.reverse.sku2_skulevel_period_price import get_period_price_fun
from src.utils.util import get_today, format_date
import datetime
from src.tablet.levelrate.tablet_ppvrate.c2b_config_utils1 import get_ppv_combine_sql,ppv_rank_df,get_all_combine_data_rank_score,\
    get_product_base_ppv_fun,query_level_sub_ppv_price,query_process_sql,query_product_ppv_small_version_cnt,get_level_rank,andoriod_product_ids

old_product_ids=(29186,17881)

class C2bppvrateModel:
    def __init__(self, model_date=None):
        """

        :param model_date: 建模日期
        """
        if model_date is None:
            self.model_date = get_today()
        else:
            self.model_date = format_date(model_date)

        self.end_date = format_date(self.model_date)
        self.start7_date = self.end_date - datetime.timedelta(7)
        self.start14_date = self.end_date - datetime.timedelta(14)
        self.start21_date = self.end_date - datetime.timedelta(21)
        self.start30_date = self.end_date - datetime.timedelta(30)

        self.combine_df = get_all_combine_data_rank_score()
        self.ppv_combine_percent_df = get_ppv_combine_sql(self.start21_date, self.start21_date, self.end_date)

        #获取型号维度 ppv组合最近三十天的出货数量 cnt_product 为内存维度的出货数量
        self.product_base_ppv_df = get_product_base_ppv_fun(self.combine_df, self.start21_date, self.start21_date, self.end_date)
        self.query_level_sub_ppv_df = query_level_sub_ppv_price(self.start21_date, self.start21_date, self.end_date)
        self.level_name = get_level_rank()

    #读取数，为组合比率的计算提供数据
    def get_process_data(self,ppv_level_df,startdate,enddate):
        #获取ppv的原始数据
        df = presto_processor.load_sql(query_process_sql.format(startdate,startdate,enddate,andoriod_product_ids,startdate,startdate,enddate,andoriod_product_ids))

        #聚合ppv
        df['storage']=df['storage'].fillna('unknown')
        df['purchase_way']=df['purchase_way'].fillna('unknown')
        df['guarantee']=df['guarantee'].fillna('unknown')
        df['networks']=df['networks'].fillna('unknown')
        df['ppv_combine'] = df['storage'] + '_' + df['purchase_way'] + '_' + df['guarantee'] + '_' + df['networks']

        #根据ppv组合占等级的比率数据


        #获取组合物品数占比最大的组合
        df_max = ppv_level_df.groupby(by=['product_id','secondary_level_id','storage'])['percent'].max().reset_index()
        df_max = df_max.rename(columns={"percent": "max_percent"})

        #标识占比最多的组合
        df2 = df.merge(df_max, how='left', on=['product_id','secondary_level_id','storage'])
        df2 = df2.merge(self.ppv_combine_percent_df[['product_id','secondary_level_id', 'ppv_combine','total_cnt', 'percent','percent_rank']], how='left', on=['product_id','secondary_level_id', 'ppv_combine'])
        #确认所有组合中，哪一个是占比最多的组合
        df2['max_flag'] = 0
        df2.loc[(df2.max_percent == df2.percent) & (df2.percent_rank==1), 'max_flag'] = 1
        df2_max = df2.loc[df2['max_flag'] == 1, ['product_id',  'secondary_level_id','storage', 'max_flag',
                                                 'item_quotation_price_num_avg','cnt']]

        df2_max = df2_max.rename(columns={"item_quotation_price_num_avg": "max_price","cnt":"max_cnt"})
        df2 = df2.merge(df2_max[['product_id', 'secondary_level_id','storage',  'max_price']], how='left', on=['product_id',  'secondary_level_id','storage'])
        df2['ppv_rate'] = df2['item_quotation_price_num_avg'] / df2['max_price']
        df2 = df2.loc[df2.ppv_rate>0]
        # df2.to_csv('/data/sunpengwei/tmp/c2b_sku2_detail_rate_0907.csv', encoding='utf-8-sig')
        df2.loc[df2.purchase_way == '美版-无锁（含零售/官修官换/展示机）', ['secondary_level_name', 'ppv_combine', 'item_quotation_price_num_avg', 'max_price','ppv_rate']].sort_values(by='secondary_level_name')
        return df2,ppv_rank_df


    def process_ppv_rate_fun(self,ppv_rank_df,df2,cnt_limit=0):
        #计算组合的价格比率
        df3 = df2.loc[df2.ppv_rate > 0]
        # df3['ppv_rate'] = df3['item_quotation_price_num_avg'] / df3['max_price']
        # df3=df3.loc[df3.secondary_level_name.isin(['C2','C2'])]
        #
        df3 = df3.loc[df3.cnt > cnt_limit]

        df44 = df3.groupby(by=['product_id','product_name','secondary_level_id','secondary_level_name', 'ppv_combine','storage','purchase_way','guarantee','networks'])[
            'ppv_rate'].agg({'price_mean': 'mean', 'price_count': 'count', 'price_media': 'median'}).reset_index()
        # df44['price_count']=df44['count']
        # df_avg_settle = df3.groupby(by=['product_id','secondary_level_name', 'ppv_combine'])['item_quotation_price_num_avg'].agg(
        #     {'item_quotation_price_num_avg': 'mean'}).reset_index()
        # df_avg_settle1 = df3.groupby(by=['product_id', 'secondary_level_name', 'ppv_combine'])[['item_quotation_price_num_avg', 'cnt']].apply(avg_price_by_cnt_weight_fun).reset_index()

        # df_avg_max_price = df3.groupby(by=['product_id','secondary_level_name', 'ppv_combine'])['max_price'].agg(
        #     {'max_price': 'mean'}).reset_index()

        # df44 = df44.merge(df_avg_max_price, on=['product_id','secondary_level_name', 'ppv_combine'])
        # df44 = df44.merge(df_avg_settle, on=['product_id','secondary_level_name', 'ppv_combine'])
        # df44['settle_rate'] = df44['item_quotation_price_num_avg'] / df44['max_price']

        df44 = df44.merge(self.combine_df[['product_id', 'ppv_combine', 'rank_score', 'rank_low_score']],
                      on=['product_id', 'ppv_combine'])

        df44 =df44.merge(self.ppv_combine_percent_df[['product_id', 'secondary_level_id', 'ppv_combine', 'cnt',
           'total_cnt', 'percent', 'percent_rank']], how='left', on=['product_id','secondary_level_id', 'ppv_combine'])
        return df44


    def first_second_base_fun(self,df444):

        df444['product_level'] = df444['product_id'].astype(str) + '_' + df444['level_sub'].astype(str) + '_' + df444['storage'].astype(str)
        level_max_cont_df = df444.copy()
        level_first_cont_df= level_max_cont_df.copy()
        level_first_cont_df['count_rank'] = level_max_cont_df['sum_cnt'].groupby(df444['product_level']).rank(
            ascending=False, method='first')

        level_max_cont_df['count_rank'] = level_max_cont_df['sum_cnt'].groupby(df444['product_level']).rank(
            ascending=False, method='first')
        leve_first_df = level_first_cont_df.loc[level_first_cont_df.count_rank == 1]
        leve_first_df = leve_first_df.rename(
            columns={"price_mean_rate": "first_cnt_rate", 'rank_score': 'rank_score_first'})

        leve_second_df = level_max_cont_df.loc[level_max_cont_df.count_rank == 2]
        leve_second_df = leve_second_df.rename(
            columns={"price_mean_rate": "second_cnt_rate", 'rank_score': 'rank_score_second'})
        return leve_first_df,leve_second_df

    def avg_ppv_rate(self,x):
        tmp=x
        price_mean_rate =tmp['ppv_rate1']*tmp['cnt_w1_weight']+tmp['ppv_rate2']*tmp['cnt_w2_weight']+tmp['ppv_rate3']*tmp['cnt_w3_weight']

        return price_mean_rate

    def main_fun(self):
        ppv_level_df=self.ppv_combine_percent_df
        df2,ppv_rank_df = self.get_process_data(ppv_level_df,self.start7_date,self.end_date)
        df2_2,ppv_rank_df = self.get_process_data(ppv_level_df,self.start14_date,self.start7_date)
        df2_3,ppv_rank_df = self.get_process_data(ppv_level_df,self.start21_date,self.start14_date)
        df2_1_1  = df2[['product_id', 'secondary_level_id','ppv_combine','secondary_level_name','ppv_rate','cnt']].rename(columns={"ppv_rate":"ppv_rate1","secondary_level_name":"secondary_level_name_w1","cnt":"cnt_w1"})
        df2_2_1  = df2_2[['product_id', 'secondary_level_id','ppv_combine','secondary_level_name','ppv_rate','cnt']].rename(columns={"ppv_rate":"ppv_rate2","secondary_level_name":"secondary_level_name_w2","cnt":"cnt_w2"})
        df2_3_1  = df2_3[['product_id', 'secondary_level_id','ppv_combine','secondary_level_name','ppv_rate','cnt']].rename(columns={"ppv_rate":"ppv_rate3","secondary_level_name":"secondary_level_name_w3","cnt":"cnt_w3"})

        df2_merge = ppv_level_df.merge(df2_1_1,how='left',on=['product_id', 'secondary_level_id','ppv_combine'])
        df2_merge = df2_merge.merge(df2_2_1,how='left',on=['product_id', 'secondary_level_id','ppv_combine'])
        df2_merge = df2_merge.merge(df2_3_1,how='left',on=['product_id', 'secondary_level_id','ppv_combine'])

        df2_merge['sum_cnt'] = df2_merge[['cnt_w1','cnt_w2','cnt_w3']].sum(axis=1)
        df2_merge['cnt_w1_weight'] = df2_merge['cnt_w1']/df2_merge['sum_cnt']
        df2_merge['cnt_w2_weight'] = df2_merge['cnt_w2']/df2_merge['sum_cnt']
        df2_merge['cnt_w3_weight'] = df2_merge['cnt_w3']/df2_merge['sum_cnt']
        df2_merge[['ppv_rate1', 'ppv_rate2', 'ppv_rate3','cnt_w1_weight','cnt_w2_weight','cnt_w3_weight']] = df2_merge[['ppv_rate1', 'ppv_rate2', 'ppv_rate3','cnt_w1_weight','cnt_w2_weight','cnt_w3_weight']].fillna(0)
        df2_merge['ppv_rate'] = df2_merge.apply(self.avg_ppv_rate,axis=1)
        df2_merge = df2_merge.merge(self.level_name,on=['secondary_level_id'])
        df2_merge = df2_merge.loc[(df2_merge.sum_cnt>0)&(df2_merge.ppv_rate>0)]
        df2_merge.loc[df2_merge.product_id == 32290, ['ppv_combine','secondary_level_name', 'secondary_level_name_w1', 'secondary_level_name_w2',
                                          'secondary_level_name_w3', 'ppv_rate','ppv_rate1', 'ppv_rate2', 'ppv_rate3', 'cnt_w1',
                                          'cnt_w2', 'cnt_w3','sum_cnt','cnt_w1_weight','cnt_w2_weight','cnt_w3_weight']]

        df44 = self.process_ppv_rate_fun(ppv_rank_df,df2_merge,cnt_limit=0)
        logger.info('dsd')

        df44['level_ppv_rank'] = df44['product_id'].astype(str) + '_'+ df44['secondary_level_name'] + '_' + df44['storage'].astype(str)
        df44['product_level'] = df44['product_id'].astype(str) + '_' + df44['secondary_level_name'].astype(str)+ '_' + df44['storage'].astype(str)
        df44[['rank_count']] = df44[['price_count']].groupby(df44['level_ppv_rank']).rank(ascending=False, method='first')
        df44 = df44.sort_values(by=['product_id','secondary_level_name', 'rank_score','price_count'], ascending=[True, True, False,False])

        # 此处根据小等级维度获取 基准的比率
        level_max_cont_df = df44.loc[
            df44.rank_count == 1, ['product_id','secondary_level_name','storage', 'level_ppv_rank', 'rank_score', 'price_media',
                                    'price_count', 'rank_count']].sort_values(
            by=['product_id','secondary_level_name', 'rank_score'], ascending=[True,True, False])

        level_first_cont_df= level_max_cont_df.copy()
        level_first_cont_df['count_rank'] = level_max_cont_df.loc[level_max_cont_df.price_media==1,'price_count'].groupby(df44['product_level']).rank(
            ascending=False, method='first')

        level_max_cont_df['count_rank'] = level_max_cont_df['price_count'].groupby(df44['product_level']).rank(
            ascending=False, method='first')
        leve_first_df = level_first_cont_df.loc[level_first_cont_df.count_rank == 1]
        leve_first_df = leve_first_df.rename(
            columns={"price_media": "first_cnt_rate", 'rank_score': 'rank_score_first'})

        leve_second_df = level_max_cont_df.loc[level_max_cont_df.count_rank == 2]
        leve_second_df = leve_second_df.rename(
            columns={"price_media": "second_cnt_rate", 'rank_score': 'rank_score_second'})

        df44 = df44.merge(leve_first_df[['product_id','secondary_level_name', 'storage','first_cnt_rate', 'rank_score_first']], how='left',
                          on=['product_id','secondary_level_name','storage'])
        df44 = df44.merge(leve_second_df[['product_id','secondary_level_name', 'storage', 'second_cnt_rate', 'rank_score_second']], how='left',
                          on=['product_id','secondary_level_name','storage'])

        # df44.loc[~(df44['price_media']>0),'price_media_filter']=df44.loc[~(df44['price_media_filter']>0),'price_media']
        df44['reverse_rate']=df44['price_media']
        # df44.loc[~(df44['price_media_filter']>0),'reverse_rate']=df44.loc[~(df44['price_media_filter']>0),'price_media']
        df44['reverse_rate_bak']=df44['reverse_rate']
        df44['level_sub']=df44['secondary_level_name'].str[0]
        # self.save_main_fun(df44)
        df44.to_csv('/data/sunpengwei/tmp/c2b_tablet_sku2_ppv_rate_0831.csv', encoding='utf-8-sig')
        print('ds')
        return df44

    def save_main_fun(self,df44):
        df44['create_date']=self.end_date
        predict_data=df44
        secondary_level_rate_columns=['create_date','product_id', 'product_name', 'secondary_level_id',
       'secondary_level_name', 'ppv_combine', 'storage', 'purchase_way','networks',
       'guarantee', 'price_mean', 'price_count', 'price_media', 'max_price',
       'item_quotation_price_num_avg', 'settle_rate', 'rank_score',
       'rank_low_score', 'cnt', 'total_cnt', 'percent', 'percent_rank',
        'product_level', 'rank_count',
       'reverse_rate']

        predict_data = predict_data.fillna(-1)

        delete_rate_sql = """
        delete from c2b_secondary_level_ppv_rate where create_date=DATE_FORMAT(date_sub(curdate(),interval 30 day),'%Y%m%d') 
        """
        mysql_prediction_processor.execute_sql(delete_rate_sql)
        delete_td_rate_sql = """
                delete from c2b_secondary_level_ppv_rate where create_date='{}' 
                """.format(self.end_date)
        mysql_prediction_processor.execute_sql(delete_td_rate_sql)
        insert_sql = """
        INSERT INTO c2b_secondary_level_ppv_rate(create_date,product_id, product_name, secondary_level_id,secondary_level_name, ppv_combine, storage, purchase_way,guarantee,
                    price_mean, price_count, price_media, max_price,
                    item_quotation_price_num_avg, settle_rate, rank_score,rank_low_score, cnt, total_cnt, percent, percent_rank, product_level, rank_count, reverse_rate)
        VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,  %s,%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,  %s)
        """
        # predict_data=predict_data.reset_index()
        mysql_prediction_processor.execute_insert_sql(insert_sql,
                                                      predict_data[secondary_level_rate_columns].to_records(index=False).tolist())
        logger.info('saving predict data to mysql done')
        mysql_price_num = predict_data.shape[0]
        logger.info('c2b_secondary_level_ppv_rate datasize={}'.format(mysql_price_num))

    def pool_rate(self,x):
        tmp=x
        price_mean_rate =(tmp['price_media']*tmp['percent']).sum()/tmp['percent'].sum()
        sum_cnt=tmp['cnt'].sum()
        cishu=tmp['cnt'].count()
        size=tmp.shape[0]
        # if size>1:
        #     print(size)
        # total_cnt=tmp['total_cnt'].sum()
        cols =['price_mean_rate','sum_cnt','cishu']
        return pd.Series([price_mean_rate,sum_cnt,cishu],index=cols)


    def process_level_sub_ppv_rate_fun(self,df44):
        # product_base_ppv_df =get_product_base_ppv_fun()
        product_base_ppv_rate_df = df44.merge(self.product_base_ppv_df[['product_id','ppv_combine','cnt','ppv_base_rank','rank_score_base']],on=['product_id','ppv_combine'])
        product_base_ppv_rate_df = product_base_ppv_rate_df[['product_id','secondary_level_name', 'storage','price_mean']]
        product_base_ppv_rate_df=product_base_ppv_rate_df.rename(columns={'price_mean':'price_mean_base'})

        df44 = df44.merge(product_base_ppv_rate_df,on=['product_id','secondary_level_name', 'storage'])

        df44['price_mean'] =df44['price_media']/df44['price_mean_base']
        df_ret =df44.groupby(by=['product_id','product_name','level_sub', 'storage','ppv_combine','rank_score']).apply(self.pool_rate).reset_index()
        sum_df =df44[['product_id','product_name','level_sub','secondary_level_name', 'storage','total_cnt']].drop_duplicates()
        sum_df =sum_df.groupby(by=['product_id','level_sub', 'storage'])['total_cnt'].sum().reset_index()
        df_ret = df_ret.merge(sum_df,on=['product_id','level_sub', 'storage'])
        leve_first_df,leve_second_df =self.first_second_base_fun(df_ret)
        ret_df = df_ret.merge(leve_first_df[['product_id', 'level_sub', 'storage', 'first_cnt_rate', 'rank_score_first']],
                          how='left',
                          on=['product_id', 'level_sub', 'storage'])
        ret_df = ret_df.merge(leve_second_df[['product_id', 'level_sub', 'storage', 'second_cnt_rate', 'rank_score_second']],
                          how='left',
                          on=['product_id', 'level_sub', 'storage'])
        ret_df.loc[pd.isna(ret_df.second_cnt_rate),'second_cnt_rate']=ret_df.loc[pd.isna(ret_df.second_cnt_rate),'first_cnt_rate']
        ret_df.loc[pd.isna(ret_df.rank_score_second),'rank_score_second']=ret_df.loc[pd.isna(ret_df.rank_score_second),'rank_score_first']-1

        ret_df['diff_f_s_rate'] = ret_df['first_cnt_rate'] - ret_df['second_cnt_rate']
        ret_df['diff_f_s_score'] = ret_df['rank_score_first'] - ret_df['rank_score_second']

        ret_df['diff_score_per_rate'] = (ret_df['first_cnt_rate'] - ret_df['second_cnt_rate']) / (
                ret_df['rank_score_first'] - ret_df['rank_score_second'])
        if ret_df.loc[ret_df['diff_score_per_rate'] < 0, 'diff_score_per_rate'].shape[0]>0:
            ret_df.loc[
                (ret_df['diff_score_per_rate'] < 0) & (ret_df['rank_score_first'] < ret_df['rank_score_second']) & (
                            ret_df['rank_score'] == ret_df['rank_score_second']), 'price_mean_rate'] = ret_df.loc[(
                                                                                                                              ret_df[
                                                                                                                                  'diff_score_per_rate'] < 0) & (
                                                                                                                              ret_df[
                                                                                                                                  'rank_score_first'] <
                                                                                                                              ret_df[
                                                                                                                                  'rank_score_second']) & (
                                                                                                                              ret_df[
                                                                                                                                  'rank_score'] ==
                                                                                                                              ret_df[
                                                                                                                                  'rank_score_second']), 'first_cnt_rate'] + 0.01
            ret_df.loc[
                (ret_df['diff_score_per_rate'] < 0) & (ret_df['rank_score_first'] >= ret_df['rank_score_second']) & (
                            ret_df['rank_score'] == ret_df['rank_score_second']), 'price_mean_rate'] = ret_df.loc[(
                                                                                                                              ret_df[
                                                                                                                                  'diff_score_per_rate'] < 0) & (
                                                                                                                              ret_df[
                                                                                                                                  'rank_score_first'] >=
                                                                                                                              ret_df[
                                                                                                                                  'rank_score_second']) & (
                                                                                                                              ret_df[
                                                                                                                                  'rank_score'] ==
                                                                                                                              ret_df[
                                                                                                                                  'rank_score_second']), 'first_cnt_rate'] - 0.01

            ret_df.loc[ret_df['diff_score_per_rate'] < 0, 'diff_score_per_rate'] = 0.0000000000000000000000000001
            # ret_df.loc[(ret_df['diff_score_per_rate'] < 0)&(ret_df['rank_score']==ret_df['rank_score_second']), 'price_mean_rate'] = ret_df.loc[(ret_df['diff_score_per_rate'] < 0)&(ret_df['rank_score']==ret_df['rank_score_second']), 'first_cnt_rate']+0.01


        ret_df.loc[ret_df['diff_f_s_rate'] == 0, 'diff_score_per_rate'] = 0.0000000000000000000000000001
        ret_df.loc[ret_df['diff_f_s_score'] == 0, 'diff_score_per_rate'] = 0.0000000000000000000000000001


        ret_df =ret_df.merge(self.query_level_sub_ppv_df[['product_id','level_sub','ppv_combine','level_item_quotation_avg_price']],how='left',on=['product_id','level_sub','ppv_combine'])
        # product_level_storage_rate = ret_df.loc[ret_df.price_mean_rate == 1, ['product_id', 'level_sub', 'storage','sum_cnt', 'level_item_quotation_avg_price']]
        ret_df['product_level_storage'] = ret_df['product_id'].astype(str) + '_' +ret_df['level_sub'].astype(str) +ret_df['storage'].astype(str)
        ret_df['sum_cnt_score'] = ret_df['sum_cnt']*1000000000000+ret_df['rank_score']
        ret_df['sum_cnt_score_rank'] = ret_df['sum_cnt_score'].groupby(ret_df['product_level_storage']).rank(ascending=False,method='first')

        product_level_storage_rate = ret_df.loc[ret_df.sum_cnt_score_rank == 1, ['product_id', 'level_sub', 'rank_score','storage','sum_cnt', 'level_item_quotation_avg_price']] #EE

        product_level_storage_rate['product_level'] = product_level_storage_rate['product_id'].astype(str) + '_' + product_level_storage_rate['level_sub'].astype(
            str)
        product_level_storage_rate['count_rank'] = product_level_storage_rate['sum_cnt'].groupby(product_level_storage_rate['product_level']).rank(ascending=False,
                                                                                                            method='first')
        product_level_storage_rate = product_level_storage_rate.loc[product_level_storage_rate.count_rank==1]
        product_level_storage_rate = product_level_storage_rate[['product_id', 'level_sub','level_item_quotation_avg_price']].rename(columns={'level_item_quotation_avg_price': 'level_item_quotation_avg_base_price'})

        ret_df = ret_df.merge(product_level_storage_rate, on=['product_id', 'level_sub'])
        ret_df['base_storage_rate']=ret_df['level_item_quotation_avg_price']*1.000000/ret_df['level_item_quotation_avg_base_price']

        product_level_storage_base_rate = ret_df.loc[ret_df.sum_cnt_score_rank == 1]

        product_level_storage_base_rate['count_rank'] = product_level_storage_base_rate['sum_cnt'].groupby(product_level_storage_base_rate['product_level']).rank(ascending=False,method='first')

        product_level_storage_base_rate = product_level_storage_base_rate.loc[ (product_level_storage_base_rate.count_rank == 1)&(product_level_storage_base_rate.count_rank==1), ['product_id', 'level_sub', 'storage', 'base_storage_rate']]
        product_level_storage_base_rate = product_level_storage_base_rate.rename( columns={'base_storage_rate': 'base_rate'})

        ret_df = ret_df.merge(product_level_storage_base_rate, on=['product_id', 'level_sub','storage'])

        ret_df['final_rate'] = ret_df['base_rate'] * ret_df['price_mean_rate']
        ret_df.sort_values(by=['product_id', 'level_sub', 'rank_score'],inplace=True)
        # self.save_process_level_sub_ppv_rate_fun(ret_df)
        ret_df.to_csv('/data/sunpengwei/tmp/c2b_level_sub_ppv_rate_tablet_0824.csv', encoding='utf-8-sig')
        return ret_df

    def save_process_level_sub_ppv_rate_fun(self,ret_df):
        ret_df['create_date']=self.end_date
        predict_data=ret_df
        levelsub_rate_columns=[ 'create_date','product_id', 'product_name', 'level_sub',  'ppv_combine','storage',
       'rank_score', 'price_mean_rate', 'sum_cnt', 'cishu', 'total_cnt',
       'product_level', 'first_cnt_rate', 'rank_score_first',
       'second_cnt_rate', 'rank_score_second', 'diff_score_per_rate',
       'reverse_rate', 'level_item_quotation_avg_price',
       'level_item_quotation_avg_base_price', 'base_storage_rate', 'base_rate','final_rate']

        predict_data = predict_data.fillna(-1)

        delete_rate_sql = """
        delete from c2b_levelsub_ppv_rate where create_date=DATE_FORMAT(date_sub(curdate(),interval 30 day),'%Y%m%d') 
        """
        mysql_prediction_processor.execute_sql(delete_rate_sql)
        delete_td_rate_sql = """
                delete from c2b_levelsub_ppv_rate where create_date='{}' 
                """.format(self.end_date)
        mysql_prediction_processor.execute_sql(delete_td_rate_sql)
        insert_sql = """
        INSERT INTO c2b_levelsub_ppv_rate(create_date,product_id, product_name, level_sub,  ppv_combine,storage,
       rank_score, price_mean_rate, sum_cnt, cishu, total_cnt,
       product_level, first_cnt_rate, rank_score_first,
       second_cnt_rate, rank_score_second, diff_score_per_rate,
       reverse_rate, level_item_quotation_avg_price,
       level_item_quotation_avg_base_price, base_storage_rate, base_rate,final_rate)
        VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,  %s,%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
        """
        # predict_data=predict_data.reset_index()
        mysql_prediction_processor.execute_insert_sql(insert_sql,
                                                      predict_data[levelsub_rate_columns].to_records(index=False).tolist())
        logger.info('saving predict data to mysql done')
        mysql_price_num = predict_data.shape[0]
        logger.info('c2b_levelsub_ppv_rate datasize={}'.format(mysql_price_num))


    def reverse_mean_rate_fun(self,grouped,resDf):

        totalCnt = grouped.count().shape[0]
        iter=0
        for name, group in grouped:
            # product_key = group[0]
            group = group.sort_values(by=['rank_score'],ascending=[ False])
            group.reset_index(drop=True, inplace=True)
            size = group.shape[0]
            if group.loc[group.product_id == 38541].shape[0] > 0 and group.loc[group.storage == '256G'].shape[0] > 0:
                logger.info('sd')
            if size<2:
                resDf = resDf.append(group.copy())
                continue
            for i in range(size-1):
                if group.loc[i,'reverse_rate']<=group.loc[i+1,'reverse_rate']:
                    final_rate=group.loc[i + 1, 'reverse_rate']
                    # storage_purchase=group.loc[i + 1, 'storage_purchase']
                    storage_guarantee_networks=group.loc[i + 1, 'storage_guarantee_networks']
                    storage_purchase_networks=group.loc[i + 1, 'storage_purchase_networks']
                    storage_purchase_guarantee=group.loc[i + 1, 'storage_purchase_guarantee']
                    purchase_guarantee_networks=group.loc[i + 1, 'purchase_guarantee_networks']
                    sum_cnt=group.loc[i + 1, 'sum_cnt']
                    flag=0
                    # if group.loc[group.product_id==37676].shape[0]>0:
                    #     logger.info('sd')
                    for j in range(i,-1,-1):
                        if group.loc[j, 'storage_purchase_networks']==storage_purchase_networks and  group.loc[j, 'reverse_rate']<=final_rate:
                            discount=1.00
                            if group.loc[i + 1, 'sum_cnt'] >= 10 and group.loc[j, 'sum_cnt'] >= 10:
                                discount=0.2
                            if sum_cnt<=group.loc[j, 'sum_cnt']:
                                flag=1
                                up_rate=group.loc[i + 1, 'reverse_rate']-(group.loc[i + 1, 'reverse_rate']-group.loc[i + 1, 'rank_score']*1.000000/group.loc[j, 'rank_score']*group.loc[j, 'reverse_rate'])*discount
                                if sum_cnt == 0:
                                    group.loc[i + 1, 'reverse_rate'] = up_rate
                                else:
                                    group.loc[i + 1, 'reverse_rate']=min(up_rate,group.loc[i, 'reverse_rate']-0.02)
                                group.loc[i + 1, 'reverse_flag']=1
                                break
                            else:
                                flag = 1
                                index_j_rate = group.loc[j, 'rank_score'] * 1.000000 / group.loc[ i + 1, 'rank_score'] * group.loc[i + 1, 'reverse_rate']
                                index_j_rate = max(index_j_rate, group.loc[i + 1, 'reverse_rate'] + 0.01)
                                if j>0:
                                    up_rate = group.loc[j-1, 'reverse_rate']
                                    if index_j_rate>up_rate:
                                        index_j_rate=up_rate-0.01
                                group.loc[j, 'reverse_rate'] =index_j_rate
                                group.loc[j, 'reverse_flag'] = 1
                                break
                    if flag==0:
                        for j in range(i,-1,-1):
                            if group.loc[j, 'storage_guarantee_networks']==storage_guarantee_networks and group.loc[j, 'reverse_rate']<=final_rate:
                                discount = 1.00
                                if group.loc[i + 1, 'sum_cnt'] >= 10 and group.loc[j, 'sum_cnt'] >= 10:
                                    discount = 0.2
                                if sum_cnt <= group.loc[j, 'sum_cnt']:
                                    flag = 1
                                    up_rate = group.loc[i + 1, 'reverse_rate']-(group.loc[i + 1, 'reverse_rate']-group.loc[i + 1, 'rank_score'] * 1.000000 / group.loc[ j, 'rank_score'] * group.loc[j, 'reverse_rate'])*discount
                                    if sum_cnt==0:
                                        group.loc[i + 1, 'reverse_rate']=up_rate
                                    else:
                                        group.loc[i + 1, 'reverse_rate'] = max(min(up_rate,group.loc[i, 'reverse_rate']-0.01),group.loc[i, 'reverse_rate']-0.05)
                                    group.loc[i + 1, 'reverse_flag'] = 1
                                    break
                                else:
                                    flag = 1
                                    index_j_rate = group.loc[j, 'reverse_rate']-(group.loc[j, 'reverse_rate']-group.loc[j, 'rank_score'] * 1.000000 / group.loc[i + 1, 'rank_score'] *  group.loc[i + 1, 'reverse_rate'])*discount
                                    index_j_rate=max(index_j_rate,group.loc[i+1, 'reverse_rate']+0.01)
                                    if j > 0:
                                        up_rate = group.loc[j - 1, 'reverse_rate']
                                        if index_j_rate > up_rate:
                                            index_j_rate = up_rate - 0.01
                                    group.loc[j, 'reverse_rate'] = index_j_rate
                                    group.loc[j, 'reverse_flag'] = 1
                    if flag==0:
                        for j in range(i,-1,-1):
                            if group.loc[j, 'storage_purchase_guarantee']==storage_purchase_guarantee and group.loc[j, 'reverse_rate']<=final_rate:
                                discount = 1.00
                                if group.loc[i + 1, 'sum_cnt'] >= 10 and group.loc[j, 'sum_cnt'] >= 10:
                                    discount = 0.2
                                if sum_cnt <= group.loc[j, 'sum_cnt']:
                                    flag = 1
                                    up_rate = group.loc[i + 1, 'reverse_rate']-(group.loc[i + 1, 'reverse_rate']-group.loc[i + 1, 'rank_score'] * 1.000000 / group.loc[ j, 'rank_score'] * group.loc[j, 'reverse_rate'])*discount
                                    if sum_cnt==0:
                                        group.loc[i + 1, 'reverse_rate']=up_rate
                                    else:
                                        group.loc[i + 1, 'reverse_rate'] = max(min(up_rate,group.loc[i, 'reverse_rate']-0.01),group.loc[i, 'reverse_rate']-0.05)
                                    group.loc[i + 1, 'reverse_flag'] = 1
                                    break
                                else:
                                    flag = 1
                                    index_j_rate = group.loc[j, 'reverse_rate']-(group.loc[j, 'reverse_rate']-group.loc[j, 'rank_score'] * 1.000000 / group.loc[i + 1, 'rank_score'] *  group.loc[i + 1, 'reverse_rate'])*discount
                                    index_j_rate=max(index_j_rate,group.loc[i+1, 'reverse_rate']+0.01)
                                    if j > 0:
                                        up_rate = group.loc[j - 1, 'reverse_rate']
                                        if index_j_rate > up_rate:
                                            index_j_rate = up_rate - 0.01
                                    group.loc[j, 'reverse_rate'] = index_j_rate
                                    group.loc[j, 'reverse_flag'] = 1
                    if flag==0:
                        for j in range(i,-1,-1):
                            if group.loc[j, 'purchase_guarantee_networks']==purchase_guarantee_networks and group.loc[j, 'reverse_rate']<=final_rate:
                                discount = 1.00
                                if group.loc[i + 1, 'sum_cnt'] >= 10 and group.loc[j, 'sum_cnt'] >= 10:
                                    discount = 0.2
                                if sum_cnt <= group.loc[j, 'sum_cnt']:
                                    flag = 1
                                    up_rate = group.loc[i, 'reverse_rate']-(group.loc[i + 1, 'reverse_rate']-group.loc[i + 1, 'rank_score'] * 1.000000 / group.loc[ j, 'rank_score'] * group.loc[j, 'reverse_rate'])*discount
                                    if sum_cnt == 0:
                                        group.loc[i + 1, 'reverse_rate'] = up_rate
                                    else:
                                        group.loc[i + 1, 'reverse_rate'] = max(min(up_rate,group.loc[i, 'reverse_rate']-0.01),group.loc[i, 'reverse_rate']-0.05)
                                    group.loc[i + 1, 'reverse_flag'] = 1
                                    break
                                else:
                                    flag = 1
                                    index_j_rate = group.loc[j, 'reverse_rate']-(group.loc[j, 'reverse_rate']-group.loc[j, 'rank_score'] * 1.000000 / group.loc[i + 1, 'rank_score'] *  group.loc[i + 1, 'reverse_rate'])*discount
                                    index_j_rate = max(index_j_rate, group.loc[i + 1, 'reverse_rate'] + 0.01)
                                    if j > 0:
                                        up_rate = group.loc[j - 1, 'reverse_rate']
                                        if index_j_rate > up_rate:
                                            index_j_rate = up_rate - 0.01
                                    group.loc[j, 'reverse_rate'] = index_j_rate
                                    group.loc[j, 'reverse_flag'] = 1
                    flag=0
                # print(i)
            resDf = resDf.append(group.copy())
            iter += 1
            iter += 1
            # resDf.loc[(resDf.product_id == 43511) & (resDf.level_sub == 'B'), ['ppv_combine', 'rank_score', 'final_rate',
            #                                                                    'reverse_flag', 'reverse_rate', 'sum_cnt',
            #                                                                    'storage_purchase',
            #                                                                    'storage_guarantee']].sort_values(
            #     by='rank_score', ascending=False)
            # group[['ppv_combine', 'rank_score', 'final_rate', 'reverse_flag', 'reverse_rate', 'sum_cnt', 'storage_purchase',
            #        'storage_guarantee']]
            logger.info('totalCnt= {},product_key={},iter = {},size={}'.format(totalCnt, str(name), iter, size))
        return resDf

    def avg_weight_ppv_rate_fun(self):
        level_sub_ppv_rate_df = pd.read_csv('/data/sunpengwei/tmp/c2b_level_sub_ppv_rate_tablet_0824.csv',encoding='utf-8-sig')

        product_base_ppv_rate_df = level_sub_ppv_rate_df.merge(self.product_base_ppv_df[['product_id', 'ppv_combine', 'cnt', 'ppv_base_rank', 'rank_score_base']],on=['product_id', 'ppv_combine'])
        product_base_ppv_rate_df = product_base_ppv_rate_df[['product_id', 'level_sub', 'storage', 'price_mean_rate']]
        product_base_ppv_rate_df = product_base_ppv_rate_df.rename(columns={'price_mean_rate': 'price_mean_base'})

        level_sub_ppv_rate_df = level_sub_ppv_rate_df.merge(product_base_ppv_rate_df, on=['product_id', 'level_sub', 'storage'])

        level_sub_ppv_rate_df['price_mean_rate_reverse']=level_sub_ppv_rate_df['price_mean_rate']/level_sub_ppv_rate_df['price_mean_base']
        level_sub_ppv_rate_df['price_mean_rate']=level_sub_ppv_rate_df['price_mean_rate']/level_sub_ppv_rate_df['price_mean_base']

        #加权汇总等级比率
        product_level_sub_ppv_rate_df = level_sub_ppv_rate_df
        product_level_sub_ppv_rate_df.loc[product_level_sub_ppv_rate_df.sum_cnt==0,'sum_cnt'] = 0.01
        product_level_sub_ppv_rate_df['weight_price_mean_rate_reverse'] = product_level_sub_ppv_rate_df['price_mean_rate']*product_level_sub_ppv_rate_df['sum_cnt']

        product_ppv_avg_sum_rate_df = product_level_sub_ppv_rate_df.groupby(by=['product_id','product_name','ppv_combine'])['weight_price_mean_rate_reverse'].sum().reset_index()
        product_ppv_avg_sum_cnt_df = product_level_sub_ppv_rate_df.groupby(by=['product_id','ppv_combine'])['sum_cnt'].sum().reset_index()

        product_ppv_avg_rate_df = product_ppv_avg_sum_rate_df.merge(product_ppv_avg_sum_cnt_df,on=['product_id','ppv_combine'])
        product_ppv_avg_rate_df['price_mean_rate'] = product_ppv_avg_rate_df['weight_price_mean_rate_reverse']/product_ppv_avg_rate_df['sum_cnt']
        product_ppv_avg_rate_df.to_csv('/data/sunpengwei/tmp/c2b_tablet_product_ppv_avg_rate_1103.csv', encoding='utf-8-sig')

    # def repair_reverse_rate_fun(self):
    #     c2b_level_sub_ppv_rate_df = pd.read_csv('/data/sunpengwei/tmp/c2b_level_sub_ppv_rate_tablet_0824.csv', encoding='utf-8-sig')
    #     c2b_level_sub_ppv_rate_df['purchase_way']=c2b_level_sub_ppv_rate_df['ppv_combine'].apply(lambda x:x.split('_')[1])
    #     c2b_level_sub_ppv_rate_df['guarantee']=c2b_level_sub_ppv_rate_df['ppv_combine'].apply(lambda x:x.split('_')[2])
    #
    #     c2b_level_sub_ppv_rate_df['storage_purchase']= c2b_level_sub_ppv_rate_df['storage'] + '_' + c2b_level_sub_ppv_rate_df['purchase_way'].astype(str)
    #     c2b_level_sub_ppv_rate_df['storage_guarantee']= c2b_level_sub_ppv_rate_df['storage'] + '_' + c2b_level_sub_ppv_rate_df['guarantee'].astype(str)
    #     c2b_level_sub_ppv_rate_df['purchase_way_guarantee']= c2b_level_sub_ppv_rate_df['purchase_way'] + '_' + c2b_level_sub_ppv_rate_df['guarantee'].astype(str)
    #     c2b_level_sub_ppv_rate_df['reverse_flag']=0
    #     c2b_level_sub_ppv_rate_df['reverse_rate']=c2b_level_sub_ppv_rate_df['final_rate']
    #     # c2b_level_sub_ppv_rate_df = c2b_level_sub_ppv_rate_df.loc[(c2b_level_sub_ppv_rate_df.product_id==36045)&(c2b_level_sub_ppv_rate_df.level_sub=='A')]
    #     resDf = pd.DataFrame(columns=c2b_level_sub_ppv_rate_df.columns.tolist())
    #     grouped = c2b_level_sub_ppv_rate_df.groupby(by=['product_id', 'product_name','storage', 'level_sub'])
    #     storage_resDf = self.reverse_mean_rate_fun(grouped,resDf)
    #     # resDf=storage_resDf
    #     grouped = storage_resDf.groupby(by=['product_id','product_name','level_sub'])
    #     resDf = pd.DataFrame(columns=storage_resDf.columns.tolist())
    #     resDf = self.reverse_mean_rate_fun(grouped, resDf)
    #
    #     resDf['price_mean_rate_reverse']=resDf['reverse_rate']/resDf['base_rate']
    #     resDf.to_csv('/data/sunpengwei/tmp/c2b_level_sub_ppv_reverse_rate_0824.csv', encoding='utf-8-sig')
    #     return resDf



    def get_sale_price_to_ppv_rate_price_fun(self,md_date):
        query_sql="""
        select dpk.product_sku_name as product_sku_name,sku2level.product_id as product_id,dpk.product_name as product_name,dpk.product_brand_id as product_brand_id,
        0  as level_template_id,
        dpk.product_brand_name as product_brand_name,ln.product_level_name as product_level_name,sku2level.product_sku_id as product_sku_id,
        sku2level.product_level_id as product_level_id,
        case when sku2level.price_3>0 then cast(sku2level.price_3 as int) else -1 end price_3,
        case when sku2level.price_2>0 then cast(sku2level.price_2 as int) else -1 end price_2,
        case when sku2level.price_1>0 then cast(sku2level.price_1 as int) else -1 end price_1,
         cast(sku2level.saleprice as int) as saleprice,
        case when sku2level.qty is null then 0 else  cast(sku2level.qty as int) end as qty,
        dpk.memory_capacity_name as storage , 
        dpk.purchase_channel_name as purchase_way ,
        dpk.warranty_duration_name as guarantee,
        dpk.network_standard_name as networks,
        dpk.small_model_name as small_version
        from algo.algo_source_sku_level_saleprice sku2level
        inner join dim.dim_product_sku dpk 
        on sku2level.product_sku_id=dpk.product_sku_id
        left join dim.dim_product_level ln 
        on sku2level.product_level_id=ln.product_level_id
        where  sku2level.product_category_id=6
        and dpk.partition_flag='1'
        and dpk.product_brand_id =52
        """

        # and sku2level.product_id in (34701, 27637, 25679, 20079, 17726, 17461)
        price_df = presto_processor.load_sql(query_sql.format(self.end_date))

        query_android_sql="""
            select a.product_sku_key, a.product_level_key, a.template_rank as rank,a.y_pred as y_pred_android,a.forecast_reference_price as y_pred_inverse
            from price_prediction_level2_price_reverse_tablet_v1 a 
            where a.date = DATE_FORMAT(date_add(curdate(),interval 1 day),'%Y%m%d')
        """
        query_android_df = mysql_prediction_processor.load_sql(query_android_sql)
        df = price_df.merge(query_android_df,left_on=['product_sku_id','product_level_id'],right_on=['product_sku_key','product_level_key'])
        df['salepricebak'] = df['saleprice'].copy()
        df['saleprice']=df['y_pred_android']
        # 获取ppv的rank score数据
        df = df.merge(ppv_rank_df.loc[ppv_rank_df.column_name == 'small_version', ['column_value_name', 'ppv_rank', 'low_ppv_rank']],
                          how='left', left_on='small_version', right_on='column_value_name')
        df = df.rename(columns={"ppv_rank": "small_version_rank"})

        df['small_version_rank']=df['small_version_rank'].fillna(100)
        df['small_version_rank_rate'] = df['small_version_rank']*1.0000/100


        #聚合ppv
        df['storage']=df['storage'].fillna('unknown')
        df['purchase_way']=df['purchase_way'].fillna('unknown')
        df['guarantee']=df['guarantee'].fillna('unknown')
        df['networks']=df['networks'].fillna('unknown')

        df['ppv_combine'] = df['storage'] + '_' + df['purchase_way'] + '_' + df['guarantee'] + '_' + df['networks']
        df['level_sub'] = df['product_level_name'].str[0]
        df = df.merge(self.combine_df[['product_id', 'ppv_combine', 'rank_score', 'rank_low_score','storage_rank','purchase_way_rank','guarantee_rank','networks_rank']],
                      on=['product_id', 'ppv_combine'])
        df = df.rename(columns={"rank_score": "rank_score_price", "rank_low_score": "rank_low_score_price"})


        perid_df = get_period_price_fun(md_date, flag=True)
        df = df.merge(perid_df, how='left', left_on=['product_sku_id', 'product_level_id'],
                        right_on=['product_sku_key', 'product_level_key'])



        query_product_ppv_small_version_cnt_df = presto_processor.load_sql(query_product_ppv_small_version_cnt.format(self.start21_date,self.start21_date,self.end_date,andoriod_product_ids,self.start21_date,self.start21_date,self.end_date,andoriod_product_ids))
        query_product_ppv_small_version_cnt_df['small_version'] = query_product_ppv_small_version_cnt_df['small_version'].fillna('unknown')
        query_product_ppv_small_version_cnt_df['storage'] = query_product_ppv_small_version_cnt_df['storage'].fillna('unknown')
        query_product_ppv_small_version_cnt_df['purchase_way'] = query_product_ppv_small_version_cnt_df['purchase_way'].fillna('unknown')
        query_product_ppv_small_version_cnt_df['guarantee'] = query_product_ppv_small_version_cnt_df['guarantee'].fillna('unknown')
        query_product_ppv_small_version_cnt_df['networks'] = query_product_ppv_small_version_cnt_df['networks'].fillna('unknown')
        query_product_ppv_small_version_cnt_df = query_product_ppv_small_version_cnt_df.merge(ppv_rank_df.loc[ppv_rank_df.column_name == 'small_version', ['column_value_name', 'ppv_rank']],
                      how='left', left_on='small_version', right_on='column_value_name')
        query_product_ppv_small_version_cnt_df = query_product_ppv_small_version_cnt_df.rename(columns={"ppv_rank": "small_version_rank"})
        query_product_ppv_small_version_cnt_df['small_version_rank'] = query_product_ppv_small_version_cnt_df['small_version_rank'].fillna(100)

        query_product_ppv_small_version_cnt_df['ppv_combine'] = query_product_ppv_small_version_cnt_df['storage'] + '_' + query_product_ppv_small_version_cnt_df[
            'purchase_way'] + '_' + query_product_ppv_small_version_cnt_df['guarantee'] + '_' + query_product_ppv_small_version_cnt_df['networks']
        small_version_df = query_product_ppv_small_version_cnt_df.groupby(by=['product_id','ppv_combine','small_version_rank'])['cnt'].sum().reset_index()

        small_version_100_df = small_version_df.loc[small_version_df.small_version_rank==100,['product_id','ppv_combine','small_version_rank','cnt']]
        small_version_100_df=small_version_100_df.rename(columns={"cnt": "cnt_100"})
        small_version_98_df = small_version_df.loc[small_version_df.small_version_rank==98,['product_id','ppv_combine','small_version_rank','cnt']]
        small_version_98_df=small_version_98_df.rename(columns={"cnt": "cnt_98"})

        #计算出型号中基准的sku
        base_storage_sku_df = df.groupby(by=['product_id','ppv_combine','storage','product_sku_id'])['sale_num_0_7','qty'].sum().reset_index()



        #ppv 组合比率处理
        level_sub_ppv_rate_df =pd.read_csv('/data/sunpengwei/tmp/c2b_tablet_all_product_ppv_reverse_rate_1103.csv',encoding='utf-8-sig')


        level_sub_ppv_rate_df = level_sub_ppv_rate_df.merge(small_version_100_df[['product_id','ppv_combine','cnt_100']],how='left',on=['product_id','ppv_combine'])
        level_sub_ppv_rate_df = level_sub_ppv_rate_df.merge(small_version_98_df[['product_id','ppv_combine','cnt_98']],how='left',on=['product_id','ppv_combine'])
        level_sub_ppv_rate_df['cnt_100']=level_sub_ppv_rate_df['cnt_100'].fillna(0)
        level_sub_ppv_rate_df['cnt_98']=level_sub_ppv_rate_df['cnt_98'].fillna(0)

        level_sub_ppv_rate_df['small_version_flag']=1
        level_sub_ppv_rate_df.loc[level_sub_ppv_rate_df.cnt_100<level_sub_ppv_rate_df.cnt_98,'small_version_flag']=0

        base_storage_sku_df = base_storage_sku_df.merge(level_sub_ppv_rate_df[['product_id','ppv_combine','sum_cnt','sum_rank']],how='left',on=['product_id','ppv_combine'])
        base_storage_sku_df = base_storage_sku_df.loc[base_storage_sku_df.sum_rank==1]
        base_storage_sku_df['huansuan_cnt'] = base_storage_sku_df['sum_cnt']*100000+base_storage_sku_df['sale_num_0_7']
        base_storage_sku_df['cnt_rank'] = base_storage_sku_df.groupby(by=['product_id', 'storage'])['huansuan_cnt'].rank(ascending=False, method='first')
        # base_storage_sku_df.loc[base_storage_sku_df.product_id == 34464]
        base_storage_sku_df = base_storage_sku_df.loc[base_storage_sku_df.cnt_rank == 1]
        base_storage_sku_df = base_storage_sku_df.rename(columns={"sale_num_0_7": 'qty_sum'})

        #归一化基准的比率为1
        base_level_sub_rate_df  = level_sub_ppv_rate_df[['product_id','storage','ppv_combine','price_mean_rate_reverse']].merge(base_storage_sku_df[['product_id','ppv_combine']],on=['product_id','ppv_combine'])
        base_level_sub_rate_df = base_level_sub_rate_df.rename(columns={"price_mean_rate_reverse": 'storage_base_mean_rate'})
        level_sub_ppv_rate_df = level_sub_ppv_rate_df.merge(base_level_sub_rate_df[['product_id','storage','storage_base_mean_rate']],how='left',on=['product_id','storage'])
        level_sub_ppv_rate_df['storage_base_mean_rate'].fillna(1,inplace=True)
        level_sub_ppv_rate_df['price_mean_rate_reverse'] = level_sub_ppv_rate_df['price_mean_rate_reverse']/level_sub_ppv_rate_df['storage_base_mean_rate']

        # df = df.merge(level_sub_ppv_rate_df[['product_id','level_sub', 'ppv_combine','price_mean_rate_reverse','small_version_flag']],how='left',on=['product_id','level_sub','ppv_combine'])
        df = df.merge(level_sub_ppv_rate_df[['product_id','ppv_combine','price_mean_rate_reverse','small_version_flag']],how='left',on=['product_id','ppv_combine'])

        product_base_ppv_df_tmp=self.product_base_ppv_df.copy()
        product_base_ppv_df_tmp = product_base_ppv_df_tmp.rename(columns={"ppv_combine":"ppv_combine_base"})

        df=df.merge(product_base_ppv_df_tmp,how='left',on=['product_id','storage'])
        # query_sku_sale_cnt_df = presto_processor.load_sql(query_sku_sale_cnt.format(start7_date,start7_date,end_date))



        base_product_df = df.merge(base_storage_sku_df[['product_sku_id', 'qty_sum']], on='product_sku_id')
        base_product_df=base_product_df.rename(columns={"saleprice":'saleprice_base'})

        tmp = df.merge(base_product_df[['product_id','product_level_id','storage','saleprice_base']],how='left',on=['product_id','product_level_id','storage'])
        tmp['score_rate'] = tmp['rank_score_price']*1.000000/tmp['rank_score_base']

        tmp['rate']=tmp['price_mean_rate_reverse'] #改为用平均值
        # tmp['rate']=tmp['price_mean_rate_reverse'] #改为用平均值
        tmp.loc[tmp.small_version_flag==0,'small_version_rank_rate']=tmp.loc[tmp.small_version_flag==0,'small_version_rank']*1.0000/98

        tmp.loc[pd.isna(tmp.price_mean_rate_reverse),'rate'] = tmp.loc[pd.isna(tmp.price_mean_rate_reverse),'price_mean_rate_reverse']
        tmp.loc[pd.isna(tmp.rate),'rate'] = tmp.loc[pd.isna(tmp.rate),'score_rate']
        tmp['rate'] = tmp['rate']*tmp['small_version_rank_rate']
        tmp['ppv_rate_price'] = tmp['saleprice_base']*tmp['rate']
        #tmp.loc[tmp.product_id == 108423]
        # query_sku_sale_cnt_df = query_sku_sale_cnt_df.loc[query_sku_sale_cnt_df.cnt_rank==1]
        tmp['diff_ppv_price'] =0
        tmp['diff_saleprice'] =0
        tmp.loc[tmp.sale_num_0_7>0,'diff_ppv_price']=(tmp.loc[tmp.sale_num_0_7>0,'ppv_rate_price']-tmp.loc[tmp.sale_num_0_7>0,'price_0_7'])*tmp.loc[tmp.sale_num_0_7>0,'sale_num_0_7']
        tmp.loc[tmp.sale_num_0_7>0,'diff_saleprice']=(tmp.loc[tmp.sale_num_0_7>0,'salepricebak']-tmp.loc[tmp.sale_num_0_7>0,'price_0_7'])*tmp.loc[tmp.sale_num_0_7>0,'sale_num_0_7']
        tmp.loc[tmp.sale_num_0_7>0,'sum_ppvprice']=tmp.loc[tmp.sale_num_0_7>0,'ppv_rate_price']*tmp.loc[tmp.sale_num_0_7>0,'sale_num_0_7']
        tmp.loc[tmp.sale_num_0_7>0,'sum_saleprice']=tmp.loc[tmp.sale_num_0_7>0,'salepricebak']*tmp.loc[tmp.sale_num_0_7>0,'sale_num_0_7']
        tmp['abs_diff_ppv_price'] = abs(tmp['diff_ppv_price'])
        tmp['abs_diff_saleprice'] = abs(tmp['diff_saleprice'])
        tmp.to_csv('/data/sunpengwei/tmp/df_ppv_rate_tablet_price_0825.csv', encoding='utf-8-sig')
        # level_sub_ppv_rate_df.to_csv('/data/sunpengwei/tmp/c2b_all_level_change_rate_0929.csv', encoding='utf-8-sig')
        # level_sub_ppv_rate_df.to_csv('/data/sunpengwei/tmp/product_ppv_avg_rate_tablet_df_1020.csv', encoding='utf-8-sig')

        # level_sub_ppv_rate_df = pd.read_csv('/data/sunpengwei/tmp/c2b_level_sub_ppv_rate_tablet_0824.csv', encoding='utf-8-sig')
        # self.save_process_all_level_sub_ppv_rate_fun(level_sub_ppv_rate_df)
        self.save_ppv_rate_price_detail_fun(tmp)
        self.save_product_ppv_avg_rate_fun(level_sub_ppv_rate_df)
        print(tmp.shape)


    def save_process_all_level_sub_ppv_rate_fun(self,ret_df):
        ret_df['create_date']=self.end_date
        predict_data=ret_df
        all_levelsub_rate_columns=['create_date','product_id', 'product_name',
       'level_sub','ppv_combine','storage', 'rank_score',
       'price_mean_rate', 'price_mean_rate_reverse', 'sum_cnt', 'cishu',
       'total_cnt', 'product_level', 'first_cnt_rate', 'rank_score_first',
       'second_cnt_rate', 'rank_score_second', 'diff_score_per_rate',
       'reverse_rate',   'reverse_flag', 'sum_cnt_flag', 'base_rate','ppv_rate', 'final_rate',
       'product_base_mean_rate', 'storage_base_mean_rate', 'cnt_100', 'cnt_98',
       'small_version_flag', 'weight_price_mean_rate_reverse']

        predict_data = predict_data.fillna(-1)

        delete_rate_sql = """
        delete from c2b_all_levelsub_reverse_ppv_rate_tablet where create_date=DATE_FORMAT(date_sub(curdate(),interval 30 day),'%Y%m%d') 
        """

        mysql_prediction_processor.execute_sql(delete_rate_sql)
        delete_td_rate_sql = """
                delete from c2b_all_levelsub_reverse_ppv_rate_tablet where create_date='{}' 
                """.format(self.end_date)

        mysql_prediction_processor.execute_sql(delete_td_rate_sql)
        insert_sql = """
        INSERT INTO c2b_all_levelsub_reverse_ppv_rate_tablet(create_date,product_id, product_name,
           level_sub,ppv_combine,storage, rank_score,
           price_mean_rate, price_mean_rate_reverse, sum_cnt, cishu,
           total_cnt, product_level, first_cnt_rate, rank_score_first,
           second_cnt_rate, rank_score_second, diff_score_per_rate,
           reverse_rate,   reverse_flag, sum_cnt_flag, base_rate,ppv_rate, final_rate,
           product_base_mean_rate, storage_base_mean_rate, cnt_100, cnt_98,
           small_version_flag, weight_price_mean_rate_reverse)
        VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
        """
        # predict_data=predict_data.reset_index()
        mysql_prediction_processor.execute_insert_sql(insert_sql,
                                                      predict_data[all_levelsub_rate_columns].to_records(index=False).tolist())
        logger.info('saving predict data to mysql done')
        mysql_price_num = predict_data.shape[0]
        logger.info('c2b_all_levelsub_reverse_ppv_rate_tablet datasize={}'.format(mysql_price_num))

    def save_ppv_rate_price_detail_fun(self, ret_df):
        ret_df['create_date'] = self.end_date
        ret_df['up_date'] = self.end_date
        ret_df['price_mean_avg_mean'] = ret_df['price_mean_rate_reverse']
        predict_data = ret_df
        all_detail_price_columns = [ 'create_date','product_category_id','product_brand_id',
      'product_brand_name', 'level_template_id', 'product_id', 'product_name',
       'product_sku_id','product_sku_name',  'product_level_id', 'rank', 'price_3', 'price_2',
       'price_1',  'qty',  'up_date', 'storage', 'purchase_way',
       'guarantee', 'small_version',  'small_version_rank',
       'small_version_rank_rate', 'ppv_combine', 'level_sub',
       'rank_score_price', 'storage_rank',
       'purchase_way_rank', 'guarantee_rank',
        'sale_num_0_3', 'price_0_3',
       'price_8_14', 'sale_num_8_14',  'thisprice', 'price_mean_rate_reverse',
       'small_version_flag', 'ppv_combine_base', 'cnt',
       'ppv_base_rank', 'rank_score_base',
        'score_rate', 'product_level_name', 'price_mean_avg_mean','rate', 'saleprice_base','saleprice','ppv_rate_price','price_0_7', 'sale_num_0_7',
       'diff_ppv_price', 'diff_saleprice', 'sum_ppvprice', 'sum_saleprice',
       'abs_diff_ppv_price', 'abs_diff_saleprice']

        predict_data = predict_data.fillna(-1)
        predict_data['product_category_id']=6
        delete_rate_sql = """
           delete from c2b_ppv_rate_price_tablet_detail where create_date=DATE_FORMAT(date_sub(curdate(),interval 30 day),'%Y%m%d') and product_category_id=6
           """

        mysql_prediction_processor.execute_sql(delete_rate_sql)
        delete_td_rate_sql = """
                   delete from c2b_ppv_rate_price_tablet_detail where create_date='{}'  and product_category_id=6
                   """.format(self.end_date)

        mysql_prediction_processor.execute_sql(delete_td_rate_sql)
        insert_sql = """
           INSERT INTO c2b_ppv_rate_price_tablet_detail(create_date,product_category_id,product_brand_id,
      product_brand_name, level_template_id, product_id, product_name, 
       product_sku_id,product_sku_name,  product_level_id, rank, price_3, price_2,
       price_1,  qty,  up_date, storage, purchase_way,
       guarantee, small_version,  small_version_rank,
       small_version_rank_rate, ppv_combine, level_sub,
       rank_score_price, storage_rank,
       purchase_way_rank, guarantee_rank, 
        sale_num_0_3, price_0_3,
       price_8_14, sale_num_8_14,  thisprice, price_mean_rate_reverse,
       small_version_flag, ppv_combine_base, cnt,
       ppv_base_rank, rank_score_base, 
        score_rate, product_level_name, price_mean_avg_mean,rate, saleprice_base,saleprice,ppv_rate_price,price_0_7, sale_num_0_7,
       diff_ppv_price, diff_saleprice, sum_ppvprice, sum_saleprice,
       abs_diff_ppv_price, abs_diff_saleprice)
           VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s, %s, %s, %s, %s, %s,
            %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
           """
        # predict_data=predict_data.reset_index()
        mysql_prediction_processor.execute_insert_sql(insert_sql,
                                                      predict_data[all_detail_price_columns].to_records(
                                                          index=False).tolist())
        logger.info('saving predict data to mysql done')
        mysql_price_num = predict_data.shape[0]
        logger.info('c2b_ppv_rate_price_tablet_detail datasize={}'.format(mysql_price_num))

    def save_product_ppv_avg_rate_fun(self,ret_df):
        ret_df['create_date']=self.end_date
        predict_data=ret_df
        all_levelsub_rate_columns=['create_date','product_id', 'product_name','ppv_combine', 'reverse_rate',
       'sum_cnt', 'price_mean_rate_reverse']

        predict_data = predict_data.fillna(-1)

        delete_rate_sql = """
        delete from c2b_tablet_product_ppv_avg_rate where create_date=DATE_FORMAT(date_sub(curdate(),interval 30 day),'%Y%m%d') 
        """

        mysql_prediction_processor.execute_sql(delete_rate_sql)
        delete_td_rate_sql = """
                delete from c2b_tablet_product_ppv_avg_rate where create_date='{}' 
                """.format(self.end_date)

        mysql_prediction_processor.execute_sql(delete_td_rate_sql)
        insert_sql = """
        INSERT INTO c2b_tablet_product_ppv_avg_rate(create_date,product_id, product_name,ppv_combine, weight_price_mean_rate_reverse,
       sum_cnt, price_mean_avg_mean)
        VALUES(%s, %s, %s, %s, %s, %s, %s)
        """
        # predict_data=predict_data.reset_index()
        mysql_prediction_processor.execute_insert_sql(insert_sql,
                                                      predict_data[all_levelsub_rate_columns].to_records(index=False).tolist())
        logger.info('saving predict data to mysql done')
        mysql_price_num = predict_data.shape[0]
        logger.info('c2b_all_levelsub_reverse_ppv_rate_tablet datasize={}'.format(mysql_price_num))


    def ppv_price_evluate_test(self):
        detail_df = pd.read_csv('/data/sunpengwei/tmp/df_ppv_rate_tablet_price_0825.csv', encoding='utf-8-sig')
        # tmp=detail_df
        # tmp['diff_ppv_price'] =0
        # tmp['diff_saleprice'] =0
        # tmp.loc[tmp.sale_num_0_7>0,'diff_ppv_price']=(tmp.loc[tmp.sale_num_0_7>0,'ppv_rate_price']-tmp.loc[tmp.sale_num_0_7>0,'price_0_7'])*tmp.loc[tmp.sale_num_0_7>0,'sale_num_0_7']
        # tmp.loc[tmp.sale_num_0_7>0,'diff_saleprice']=(tmp.loc[tmp.sale_num_0_7>0,'saleprice']-tmp.loc[tmp.sale_num_0_7>0,'price_0_7'])*tmp.loc[tmp.sale_num_0_7>0,'sale_num_0_7']
        # tmp.loc[tmp.sale_num_0_7>0,'sum_ppvprice']=tmp.loc[tmp.sale_num_0_7>0,'ppv_rate_price']*tmp.loc[tmp.sale_num_0_7>0,'sale_num_0_7']
        # tmp.loc[tmp.sale_num_0_7>0,'sum_saleprice']=tmp.loc[tmp.sale_num_0_7>0,'saleprice']*tmp.loc[tmp.sale_num_0_7>0,'sale_num_0_7']
        # tmp['abs_diff_ppv_price'] = abs(tmp['diff_ppv_price'])
        # tmp['abs_diff_saleprice'] = abs(tmp['diff_saleprice'])


        detail_df['old_product_flag'] =0
        detail_df.loc[detail_df.product_id.isin(old_product_ids),'old_product_flag']=1
        level_sub_ppv_rate_df = pd.read_csv('/data/sunpengwei/tmp/c2b_level_sub_ppv_rate_tablet_0824.csv', encoding='utf-8-sig')
        product_ppv_avg_rate_df = pd.read_csv('/data/sunpengwei/tmp/c2b_tablet_all_product_ppv_reverse_rate_1103.csv', encoding='utf-8-sig')
        df = detail_df
        # df = detail_df.loc[~(detail_df.rate==1)]
        diff_ppv_sum_df =df.loc[df.product_id.isin([108420,24505,26470,32411,35819,43598,15145,29186,35818,66429,17853,29185,43514,25399,28925,17882,28924,34572,25471,38508,27969,108422,34571,38541,108423])].groupby(by=['product_id','product_name'])['abs_diff_ppv_price'].sum().reset_index()
        ppv_sum_df =df.groupby(by=['product_id','product_name'])['sum_ppvprice'].sum().reset_index()
        diff_saleprice_sum_df =df.groupby(by=['product_id','product_name'])['abs_diff_saleprice'].sum().reset_index()
        saleprice_sum_df =df.groupby(by=['product_id','product_name'])['sum_saleprice'].sum().reset_index()
        num07_sum_df =df.groupby(by=['product_id','product_name'])['sale_num_0_7'].sum().reset_index()

        sum_df = diff_ppv_sum_df.merge(ppv_sum_df,on=['product_id','product_name'])
        sum_df = sum_df.merge(diff_saleprice_sum_df,on=['product_id','product_name'])
        sum_df = sum_df.merge(saleprice_sum_df,on=['product_id','product_name'])
        sum_df = sum_df.merge(num07_sum_df,on=['product_id','product_name'])

        sum_df['abs_ppv_diff_rate'] = sum_df['abs_diff_ppv_price']/sum_df['sum_ppvprice']
        sum_df['abs_saleprice_diff_rate'] = sum_df['abs_diff_saleprice']/sum_df['sum_saleprice']
        sum_df['diff_rate'] = sum_df['abs_saleprice_diff_rate']- sum_df['abs_ppv_diff_rate']
        aa = sum_df.sort_values(by='sale_num_0_7', ascending=False)
        aa1 = aa[['abs_diff_ppv_price', 'sum_ppvprice','abs_diff_saleprice', 'sum_saleprice', 'sale_num_0_7']].sum()
        aa1['ppv_rate'] = aa1['abs_diff_ppv_price'] / aa1['sum_ppvprice']
        aa1['saleprice_rate'] = aa1['abs_diff_saleprice'] / aa1['sum_saleprice']

        writer = pd.ExcelWriter("/data/sunpengwei/tmp/df_ppv_rate_price_tablet_1205_detail.xlsx")
        detail_df.to_excel(excel_writer=writer, sheet_name='detail_df')
        aa.to_excel(excel_writer=writer, sheet_name='型号汇总数据')
        aa1.to_excel(excel_writer=writer, sheet_name='汇总数据')
        level_sub_ppv_rate_df.to_excel(excel_writer=writer, sheet_name='型号大等级维度比率')
        product_ppv_avg_rate_df.to_excel(excel_writer=writer, sheet_name='型号维度比率')
        writer.save()
        writer.close()
        # aa.to_csv('/data/sunpengwei/tmp/df_ppv_rate_price_1018_detail.csv', encoding='utf-8-sig')
        feishu_messager.send_message('平板苹果总体偏差率对比:{}'.format(aa1))
        logger.info('平板苹果总体偏差率对比:{}'.format(aa1))



    def get_all_combine_fun(self,):
        level_sub_ppv_rate_df = pd.read_csv('/data/sunpengwei/tmp/c2b_tablet_product_ppv_avg_rate_1103.csv', encoding='utf-8-sig')
        combine_df = self.combine_df

        all_level_sub_combine_df = combine_df.copy()

        tmp = all_level_sub_combine_df.merge(level_sub_ppv_rate_df[['product_id','ppv_combine','price_mean_rate','sum_cnt']],how='left',on=['product_id','ppv_combine'])
        tmp['sum_cnt'] = tmp['sum_cnt'].fillna(0)
        tmp['level_sub'] = 'unknown'
        # tmp['total_cnt'] = tmp['total_cnt'].fillna(0)
        tmp['price_mean_rate'] = tmp['price_mean_rate'].fillna(1.00000)
        tmp['reverse_rate'] = tmp['price_mean_rate']
        tmp['price_mean_rate_reverse'] = tmp['price_mean_rate']
        df_ret=tmp

        leve_first_df,leve_second_df = self.first_second_base_fun(tmp)
        ret_df = df_ret.merge(leve_first_df[['product_id', 'level_sub', 'storage', 'first_cnt_rate', 'rank_score_first']],
                          how='left',
                          on=['product_id', 'level_sub', 'storage'])
        ret_df = ret_df.merge(leve_second_df[['product_id', 'level_sub', 'storage', 'second_cnt_rate', 'rank_score_second']],
                          how='left',
                          on=['product_id', 'level_sub', 'storage'])
        ret_df.loc[pd.isna(ret_df.second_cnt_rate),'second_cnt_rate']=ret_df.loc[pd.isna(ret_df.second_cnt_rate),'first_cnt_rate']
        ret_df.loc[pd.isna(ret_df.rank_score_second),'rank_score_second']=ret_df.loc[pd.isna(ret_df.rank_score_second),'rank_score_first']-1

        ret_df['diff_f_s_rate'] = ret_df['first_cnt_rate'] - ret_df['second_cnt_rate']
        ret_df['diff_f_s_score'] = ret_df['rank_score_first'] - ret_df['rank_score_second']

        ret_df['diff_score_per_rate'] = (ret_df['first_cnt_rate'] - ret_df['second_cnt_rate']) / (
                ret_df['rank_score_first'] - ret_df['rank_score_second'])
        if ret_df.loc[ret_df['diff_score_per_rate'] < 0, 'diff_score_per_rate'].shape[0]>0:
            ret_df.loc[(ret_df['diff_score_per_rate'] < 0)& (ret_df['rank_score_first']<ret_df['rank_score_second']) &(ret_df['rank_score']==ret_df['rank_score_second']), 'price_mean_rate'] = ret_df.loc[(ret_df['diff_score_per_rate'] < 0)& (ret_df['rank_score_first']<ret_df['rank_score_second'])&(ret_df['rank_score']==ret_df['rank_score_second']), 'first_cnt_rate']+0.01
            ret_df.loc[(ret_df['diff_score_per_rate'] < 0)& (ret_df['rank_score_first']>=ret_df['rank_score_second']) &(ret_df['rank_score']==ret_df['rank_score_second']), 'price_mean_rate'] = ret_df.loc[(ret_df['diff_score_per_rate'] < 0)& (ret_df['rank_score_first']>=ret_df['rank_score_second'])&(ret_df['rank_score']==ret_df['rank_score_second']), 'first_cnt_rate']-0.01

            ret_df.loc[ret_df['diff_score_per_rate'] < 0, 'diff_score_per_rate'] = 0.0000000000000000000000000001



        ret_df.loc[ret_df['diff_f_s_rate'] == 0, 'diff_score_per_rate'] = 0.0000000000000000000000000001
        ret_df.loc[ret_df['diff_f_s_score'] == 0, 'diff_score_per_rate'] = 0.0000000000000000000000000001

        ret_df['reverse_rate'] = 1 + ret_df['diff_score_per_rate'] * (ret_df['rank_score'] - ret_df['rank_score_first'])
        ret_df.loc[ret_df.sum_cnt==0,'price_mean_rate']=ret_df.loc[ret_df.sum_cnt==0,'reverse_rate']
        ret_df.loc[ret_df.sum_cnt==0,'price_mean_rate_reverse']=ret_df.loc[ret_df.sum_cnt==0,'reverse_rate']
        ret_df['storage_purchase']= ret_df['storage'] + '_' + ret_df['purchase_way'].astype(str) + '_' + ret_df['networks'].astype(str)
        ret_df['storage_guarantee']= ret_df['storage'] + '_' + ret_df['guarantee'].astype(str) + '_' + ret_df['networks'].astype(str)

        ret_df['purchase_way_guarantee']= ret_df['purchase_way'] + '_' + ret_df['guarantee'].astype(str) + '_' + ret_df['networks'].astype(str)
        ret_df['storage_guarantee_networks']= ret_df['storage'] + '_' + ret_df['guarantee'].astype(str) + '_' + ret_df['networks'].astype(str)
        ret_df['storage_purchase_networks']= ret_df['storage'] + '_' + ret_df['purchase_way'].astype(str) + '_' + ret_df['networks'].astype(str)
        ret_df['storage_purchase_guarantee']= ret_df['storage'] + '_' + ret_df['purchase_way'].astype(str) + '_' + ret_df['guarantee'].astype(str)
        ret_df['purchase_guarantee_networks']=  ret_df['purchase_way'].astype(str) + '_' + ret_df['guarantee'].astype(str) + '_' + ret_df['guarantee'].astype(str)

        #进行内存维度的数据补全处理
        ret_df['reverse_flag']=0
        ret_df['reverse_rate']=ret_df['price_mean_rate_reverse']
        ret_df['sum_cnt_flag']=0
        ret_df.loc[ret_df.sum_cnt>0,'sum_cnt_flag']=1
        # ret_df = ret_df.loc[(ret_df.product_id==25827)&(ret_df.level_sub=='A')]
        # ret_df = ret_df.loc[(ret_df.product_id==25827)]
        resDf = pd.DataFrame(columns=ret_df.columns.tolist())
        grouped = ret_df.groupby(by=['product_id', 'product_name','storage'])
        storage_resDf = self.reverse_mean_rate_fun(grouped,resDf)
        storage_resDf['product_id'] = storage_resDf['product_id'].astype(np.int64)
        storage_resDf['price_mean_rate_reverse'] = storage_resDf['reverse_rate']
        storage_rank_resDf = storage_resDf[ ['product_id', 'purchase_way_guarantee', 'sum_cnt']].groupby(
            by=['product_id', 'purchase_way_guarantee']).sum().reset_index()

        storage_rank_resDf['sum_rank'] = storage_rank_resDf['sum_cnt'].groupby(storage_rank_resDf['product_id']).rank(
            ascending=False, method='first')
        storage_rank_resDf = storage_rank_resDf.loc[storage_rank_resDf.sum_rank==1]
        storage_resDf = storage_resDf.merge(storage_rank_resDf[['product_id', 'purchase_way_guarantee','sum_rank']],how='left',on=['product_id','purchase_way_guarantee'])
        # storage_resDf
        storage_resDf.to_csv('/data/sunpengwei/tmp/c2b_tablet_all_product_ppv_reverse_rate_1103.csv', encoding='utf-8-sig')


        logger.info('补数完成 总数据量：{}'.format(storage_resDf.shape))

    def launch_model(self):
        df44 = self.main_fun()
        ret_df =self.process_level_sub_ppv_rate_fun(df44)#获取大等级的ppv组合比率
        self.avg_weight_ppv_rate_fun()
        self.get_all_combine_fun()
        # # #
        price_df = self.get_sale_price_to_ppv_rate_price_fun(self.end_date)
        self.ppv_price_evluate_test()

if __name__=='__main__':
    # model = C2bppvrateModel(model_date='2022-12-04')
    model = C2bppvrateModel()
    model.launch_model()

