# #!/usr/bin/env python
# # -*- encoding: utf-8 -*-
# '''
# @File    :   level_rate_config.py
# @Contact :   pengwei.sun@aihuishou.com
# @License :   (C)Copyright aihuishou
#
# @Modify Time      @Author       @Version    @Desciption
# ------------      -----------   --------    -----------
# 2021-01-10 16:48   pengwei.sun      1.0         None
# '''
#
# import datetime
# import pandas as pd
# from sklearn.preprocessing import OneHotEncoder
# import numpy as np
# import os,sys
# sys.path.append(os.getcwd())
# from src.utils.config import logger
# from src.utils.dingding_message import dingding_messager
# from src.utils.db_processor import postgre_processor,mysql_prediction_processor
# from src.utils.util import get_today, check_date_str, format_date_string,format_date
# from src.mobile.levelrate.model_template_level2_rate import process_template_level_rate_final
# import time
# import pickle
#
# """
# 取数逻辑 ：
# 按天汇总skuleve的价格：取平均数
#  --b:筛选出具有S,A,B等级，并且等级数存在数大于1的sku
#  -- and a.product_id  in (24347,24348,26464,26536,29261,29260,34576,35106,34575)
# """
# SETTLE_DATA = """
# select a.product_id,a.mapping_product_sku_id as product_sku_id,CONCAT(a.product_id,'_',a.mapping_product_sku_id) as product_sku,
# a.mapping_product_level_id as product_level_id,a.mapping_product_level_name as product_level_name,substring(a.mapping_product_level_name,1,1) as level_sub,
# avg(a.real_sell_price) as price,
# COUNT(*) AS sale_num,
# count(distinct shop_out_date) as cnt_date
#  from product_price_info  a
# inner join (
# 	select product_id,mapping_product_sku_id,
# 	count(distinct mapping_product_level_id) AS cnt,
# 	count(distinct product_level_id_2) AS cnt1
# 	from
# 	(
# 	select distinct product_id,mapping_product_sku_id ,mapping_product_level_name ,
# 	mapping_product_level_id,
# 	case when substring(mapping_product_level_name,1,1) in ('S','A','B')
# 	THEN mapping_product_level_id ELSE null end product_level_id_2
# 	 from product_price_info
# 	where product_brand_id in (52,9,4,7,16,24,484,184,357,661)  and shop_out_date between DATE_FORMAT(date_sub(curdate(),interval 7 day),'%Y%m%d') and DATE_FORMAT(date_sub(curdate(),interval 0 day),'%Y%m%d')  and product_category_id=1 and mapping_product_level_id>0
# 	) a
# 	GROUP BY 1,2
# 	having  count(distinct product_level_id_2)>0 and count(distinct mapping_product_level_id)>=2
# )b
# on  a.product_id=b.product_id and a.mapping_product_sku_id=b.mapping_product_sku_id
#
# where a.product_brand_id in (52,9,4,7,16,24,484,184,357,661)  and a.shop_out_date>=DATE_FORMAT(date_sub(curdate(),interval 7 day),'%Y%m%d')  and DATE_FORMAT(date_sub(curdate(),interval 0 day),'%Y%m%d') and a.product_category_id=1
#
# GROUP BY 1,2,3,4,5,6
# order by 1,2,3,4
# """
#
# SETTLE_DATA_2 = """
# select a.product_id,a.mapping_product_sku_id as product_sku_id,CONCAT(a.product_id,'_',a.mapping_product_sku_id) as product_sku,
# a.mapping_product_level_id as product_level_id,a.mapping_product_level_name as product_level_name,substring(a.mapping_product_level_name,1,1) as level_sub,
# avg(a.real_sell_price) as price,
# COUNT(*) AS sale_num,
# count(distinct shop_out_date) as cnt_date
#  from product_price_info  a
# inner join (
# 	select product_id,mapping_product_sku_id,
# 	count(distinct mapping_product_level_id) AS cnt,
# 	count(distinct product_level_id_2) AS cnt1
# 	from
# 	(
# 	select distinct product_id,mapping_product_sku_id ,mapping_product_level_name ,
# 	mapping_product_level_id,
# 	case when substring(mapping_product_level_name,1,1) in ('S','A','B')
# 	THEN mapping_product_level_id ELSE null end product_level_id_2
# 	 from product_price_info
# 	where product_brand_id in (52,9,4,7,16,24,484,184,357,661)
# 	and shop_out_date between DATE_FORMAT(date_sub(curdate(),interval 14 day),'%Y%m%d')  and DATE_FORMAT(date_sub(curdate(),interval 8 day),'%Y%m%d')
# 	and product_category_id=1  and mapping_product_level_id>0
# 	) a
# 	GROUP BY 1,2
# 	having  count(distinct product_level_id_2)>0 and count(distinct mapping_product_level_id)>=2
# )b
# on  a.product_id=b.product_id and a.mapping_product_sku_id=b.mapping_product_sku_id
#
# where a.product_brand_id in (52,9,4,7,16,24,484,184,357,661)  and a.shop_out_date between DATE_FORMAT(date_sub(curdate(),interval 14 day),'%Y%m%d')  and DATE_FORMAT(date_sub(curdate(),interval 8 day),'%Y%m%d')
#  and a.product_category_id=1
#
# GROUP BY 1,2,3,4,5,6
# order by 1,2,3,4
# """
#
# SETTLE_DATA_3 = """
# select a.product_id,a.mapping_product_sku_id as product_sku_id,CONCAT(a.product_id,'_',a.mapping_product_sku_id) as product_sku,
# a.mapping_product_level_id as product_level_id,a.mapping_product_level_name as product_level_name,substring(a.mapping_product_level_name,1,1) as level_sub,
# avg(a.real_sell_price) as price,
# COUNT(*) AS sale_num,
# count(distinct shop_out_date) as cnt_date
#  from product_price_info  a
# inner join (
# 	select product_id,mapping_product_sku_id,
# 	count(distinct mapping_product_level_id) AS cnt,
# 	count(distinct product_level_id_2) AS cnt1
# 	from
# 	(
# 	select distinct product_id,mapping_product_sku_id ,mapping_product_level_name ,
# 	mapping_product_level_id,
# 	case when substring(mapping_product_level_name,1,1) in ('S','A','B')
# 	THEN mapping_product_level_id ELSE null end product_level_id_2
# 	 from product_price_info
# 	where product_brand_id in (52,9,4,7,16,24,484,184,357,661)  and shop_out_date between DATE_FORMAT(date_sub(curdate(),interval 21 day),'%Y%m%d')  and DATE_FORMAT(date_sub(curdate(),interval 15 day),'%Y%m%d')
# 	  and product_category_id=1  and mapping_product_level_id>0
# 	) a
# 	GROUP BY 1,2
# 	having  count(distinct product_level_id_2)>0 and count(distinct mapping_product_level_id)>=2
# )b
# on  a.product_id=b.product_id and a.mapping_product_sku_id=b.mapping_product_sku_id
#
# where a.product_brand_id in (52,9,4,7,16,24,484,184,357,661)
# and a.shop_out_date between DATE_FORMAT(date_sub(curdate(),interval 21 day),'%Y%m%d')  and DATE_FORMAT(date_sub(curdate(),interval 15 day),'%Y%m%d')
#  and a.product_category_id=1
#
# GROUP BY 1,2,3,4,5,6
# order by 1,2,3,4
# """
#
# #--and a.product_id=32291 S,A,B基准价格
# BASE_PRICE_DATA = """
# SELECT level_name,price as base_price FROM c_level_base_price
# """
# #等级预先排序
# BASE_LEVEL_RANK="""
# SELECT
# product_level_id,product_level_order_rank
# FROM dim.dim_product_level
# WHERE product_category_id = 1 AND is_product_level_active_flag = 1
# """
#
# #型号的全量等级
# product_level_sql="""
# select distinct a.*,level_template.*,level_template.product_level_id as level_id,
# level_template.secondary_level_template_id ||'_' ||a.product_brand_id as template_brand,
# level.product_level_name from
# (
# SELECT distinct
#     dp.product_brand_id,
#     dp.product_name,
#     dp.product_id
# FROM dim.dim_product dp JOIN dim.dim_product_sku_channel_mapping dps ON dp.product_id = dps.product_id  and dps.business_channel_id=1
# WHERE dp.product_category_id = 1 and dp.product_id_status_id=2 and dp.product_brand_id in (52,9,4,7,16,24,484,184,357,661)
# ) a
# left join
# ods.ods_opt_foundation_secondary_product_template_mapping otpm
# on a.product_id=otpm.product_id
# left join ods.ods_opt_foundation_secondary_product_level_template_level level_template
# on otpm.secondary_level_template_id=level_template.secondary_level_template_id
# left join dim.dim_product_level level
# on level_template.product_level_id=level.product_level_id
# where  otpm.business_channel = 1
# """
#
# predict_price_sql="""
# select a.*,substring(a.product_level_name,1,1) as level_sub,CONCAT(a.product_key,'_',a.product_sku_key) as product_sku,
# CONCAT(b.level_template_id,'_',b.product_brand_id) as property_template_brand,
# b.product_id,
# b.rank,b.price_3,b.price_2,b.price_1,b.saleprice,b.qty,
# a.forecast_reference_price as bi_price,
# case when c.avg_sell_price is null then  b.saleprice else c.avg_sell_price end as avg_sell_price,
# case when c.min_sell_price is null then  0 else c.min_sell_price end as min_sell_price,
# case when c.sale_num is null then  b.qty else c.sale_num end as sale_num
# from  price_prediction_train a
# inner join `imp_mysql_base_pricedata_category_phone`  b
# on a.`product_sku_key`  =b.product_sku_id and a.product_level_key=b.product_level_id
# left join (
#         select a.product_id,a.mapping_product_sku_id,CONCAT(a.product_id,'_',a.mapping_product_sku_id) as product_sku,
#         a.mapping_product_level_id as product_level_id,a.mapping_product_level_name as product_level_name,substring(a.mapping_product_level_name,1,1) as level_sub,
#         avg(a.real_sell_price) as avg_sell_price,
#         min(a.real_sell_price) as min_sell_price,
#         COUNT(*) AS sale_num,
#         count(distinct shop_out_date) as cnt_date
#          from product_price_info  a
#         inner join (
#             select product_id,mapping_product_sku_id,
#             count(distinct product_level_id) AS cnt,
#             count(distinct product_level_id_2) AS cnt1
#             from
#             (
#             select distinct product_id,mapping_product_sku_id,mapping_product_level_name as product_level_name,
#             mapping_product_level_id as product_level_id,
#             case when substring(mapping_product_level_name,1,1) in ('S','A','B')
#             THEN product_level_id ELSE null end product_level_id_2
#              from product_price_info
#             where product_brand_id in (52,9,4,7,16,24,484,184,357,661)  and shop_out_date>=DATE_FORMAT(date_sub(curdate(),interval 29 day),'%Y%m%d')  and product_category_id=1  and mapping_product_level_id>0
#             ) a
#             GROUP BY 1,2
#             having  count(distinct product_level_id_2)>0 and count(distinct product_level_id)>=2
#         )b
#         on  a.product_id=b.product_id and a.mapping_product_sku_id=b.mapping_product_sku_id
#
#         where a.product_brand_id in (52,9,4,7,16,24,484,184,357,661)
#         and a.shop_out_date>=DATE_FORMAT(date_sub(curdate(),interval 29 day),'%Y%m%d')
#         and a.product_category_id=1
#         GROUP BY 1,2,3,4,5,6
#         order by 1,2,3,4
#
# )c
# on a.`product_sku_key`  =c.mapping_product_sku_id and a.product_level_key=c.product_level_id
# where a.product_brand_id in (52,9,4,7,16,24,484,184,357,661)
# """
#
# product_avg_level_rate_sql = """
# select product_id,level_id,avg(mean_rate) as mean_rate,
# avg(template_mean_rate) as template_mean_rate
#  from
# product_sku2_level_rate_final_brand
# where date>DATE_FORMAT(date_sub(curdate(),interval 7 day),'%Y%m%d')
# and mean_rate>0
# group by 1,2
# having avg(mean_rate)>0
# """
#
# INSERT_LEVEL_RATE_PRICE_SQL = """
# INSERT INTO price_prediction_level2_rate_price_brand(date, product_sku_key, product_sku_name, product_level_key,
# product_level_name, product_key, product_name, product_category_id, product_category_name, product_brand_id,
# product_brand_name, predict_origin, forecast_reference_price,is_new_product,POLY_pred_price,
# rank,price_3,price_2,price_1,saleprice,qty,mean_rate,score,base_price,min_sale_price,sale_num,level_rate_price,avg_predict_basep_price,
# predict_level_price_rate,saleprice_level_price_rate,mean_price_rate,process_price,flag,diff_rate)
# VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,  %s,%s, %s, %s, %s, %s, %s, %s,  %s,%s, %s, %s, %s , %s,%s,
#  %s, %s, %s, %s, %s, %s, %s, %s)
# """
#
# class productLevelRate:
#     def __init__(self,sql):
#
#         # sql = SETTLE_DATA
#         #获取型号对应的skulevel聚合后的数据
#         if sql is None:
#             self.query_sql=SETTLE_DATA
#         else:
#             self.query_sql=sql
#
#         self.data = mysql_prediction_processor.load_sql(self.query_sql)
#         #基础等级价格
#         self.base_price_df = mysql_prediction_processor.load_sql(BASE_PRICE_DATA)
#         self.level_rank_df =postgre_processor.load_sql(BASE_LEVEL_RANK)
#         self.product_level_df =postgre_processor.load_sql(product_level_sql)
#
#     def fun1(self):
#         #根据售卖数量和等级计算每个等级得分，以便后续sku 筛选:高等级的等级id较小，所以采取1000-等级id，使得，相同数量的情况下，高等级的优先选择
#         self.data['score']=1000-self.data.product_level_id+self.data.sale_num*1000
#         #首先筛选出sku中 有S,A,B等级数据
#         data_sku_base_level_df=self.data.loc[self.data.level_sub.isin(['S','A','B'])]
#
#         # a= data_sku_base_level_df.groupby(by=['product_id','product_sku_id'], as_index=False)['score'].max()
#         # index= data_sku_base_level_df.groupby(by=['product_id','product_sku_id'])['score'].idxmax()
#
#         #找出数据中score得分最高的level
#         base_level=data_sku_base_level_df.loc[data_sku_base_level_df.sort_values(['product_sku','score']).drop_duplicates('product_sku',keep='last').index]
#
#         base_level.rename(columns={'price':'price_base','score':'score_base','sale_num':'base_sale_num'},inplace=True)
#
#         # base_level
#         #统计出满足条件的型号下 sku的数量
#         base_product_sku_size=data_sku_base_level_df[['product_id','product_sku_id']].groupby('product_id').agg({'product_sku_id': pd.Series.nunique}).reset_index()
#         base_product_sku_size.rename(columns={'product_sku_id':'product_sku_num'},inplace=True)
#
#
#         #给筛选出的基础数据拼接上其对应的基础价格
#         base_level=base_level.merge(self.base_price_df,left_on=['product_level_name'],right_on=['level_name'])
#         base_base_level_sale_num=base_level.groupby(by=['product_id'])['base_sale_num'].agg({'base_level_sale_sum':'sum'})
#         base_level=base_level.merge(base_base_level_sale_num,on='product_id')
#         base_level['sku_weight']=base_level.base_sale_num/base_level.base_level_sale_sum
#
#         # 给数据拼接上他们各自的基础信息
#         df_all = self.data.merge(base_level[['product_sku', 'price_base', 'score_base', 'base_price']], on=['product_sku'])
#
#         base_product_sale_num=df_all.groupby('product_id')['sale_num'].agg({'product_sale_sum':'sum' }).reset_index()
#         base_product_info=base_product_sku_size.merge(base_product_sale_num,left_on=['product_id'],right_on=['product_id'])
#         return base_level,base_product_info
#
#     def fun2(self,base_level,base_product_info):
#         # 给数据拼接上他们各自的基础信息
#         df_all = self.data.merge(base_level[['product_sku', 'price_base', 'score_base', 'base_price','sku_weight','base_level_sale_sum','base_sale_num']],
#                                  on=['product_sku'])
#
#         #计算等级比率，并还原成基础价格
#         df_all['level_rate']=df_all.price/df_all.price_base
#         df_all['to_base_price']=df_all.base_price*df_all.level_rate
#         df_all=df_all.loc[df_all.product_level_id > 0]
#         df_all['product_level_id']=df_all['product_level_id'].astype(int)
#         #型号维度的汇总
#         df_all['product_level_id_name']=df_all['product_id'].astype(str)+'_'+df_all['product_level_id'].astype(str)+'_'+df_all['product_level_name']
#
#         #求出汇总后的数据的，平均价格，平均售卖数量等信息
#         result1=df_all.groupby(by='product_level_id_name')['to_base_price'].agg({'price_mean':'mean','price_max':'max','price_min':'min','price_media':'median'}).reset_index()#这里
#         #型号下 各个等级的 售卖数量相关信息
#         cnt_res=df_all.groupby(by='product_level_id_name')['sale_num'].agg({'sale_sum':'sum','sku_cnt':'count'}).reset_index()#这里
#
#         df_all=df_all.merge(cnt_res,on='product_level_id_name')
#
#         #等级价格汇总时 ，是以此等级售卖出的数量占此等级的比例为权重，进行加权计算
#         df_all['level_weight']=df_all['sale_num']/df_all['sale_sum']
#         df_all['sku_base_level_weight']=df_all['base_sale_num']/df_all['base_level_sale_sum']
#         df_all['zh_level_weight']=df_all['sku_base_level_weight']*df_all['level_weight']
#         # df_all['zh_level_weight'](by='product_level_id_name').agg({'price_mean':'sum'})
#
#         weight_res = df_all.groupby(by='product_level_id_name')['zh_level_weight'].agg(
#             {'weight_sum': 'sum'}).reset_index()  # 这里
#
#
#         df_all=df_all.merge(weight_res[['product_level_id_name','weight_sum']],on=['product_level_id_name'])
#         df_all['weight']=df_all['zh_level_weight']/df_all.weight_sum
#
#         df_all['to_base_price_weight']=df_all['weight']*df_all['to_base_price']
#         result=df_all.groupby(by='product_level_id_name')['to_base_price_weight'].agg({'price_mean':'sum','price_max':'sum','price_min':'sum','price_media':'sum'}).reset_index()#这里
#
#         resulttmp=result.merge(result1,on='product_level_id_name')
#
#         # result=result.sort_values('price_media',ascending=False).reset_index()
#         result['product_id']=result['product_level_id_name'].str.split('_',expand=True)[0].astype(np.int64)
#         result['level_id']=result['product_level_id_name'].str.split('_',expand=True)[1].astype(np.int64)
#
#         result=result.merge(base_product_info,how='inner',left_on=['product_id'],right_on=['product_id'])
#         result=result.merge(self.level_rank_df,left_on=['level_id'],right_on=['product_level_id'])
#
#         result_t=result.merge(cnt_res,on='product_level_id_name')
#
#         #求出汇总后的等级比率和sku占比
#         result_t['rate']=result_t.price_media/10000
#
#         result_t['sku_rate']=result_t.sku_cnt/result_t.product_sku_num
#
#
#
#         result_t=result_t.sort_values('product_level_order_rank',ascending=True)
#         result_t['rate_f']=result_t['rate']
#         result_t=result_t.reset_index()
#         # result_t_tmp=result_t
#         # size=result_t.shape[0]
#         resDf = pd.DataFrame(columns=result_t.columns.tolist())
#         grouped = result_t.groupby('product_id')
#         for name, group in grouped:
#             group.reset_index(drop=True, inplace=True)
#             group=group.sort_values('product_level_order_rank', ascending=True)
#             size = group.shape[0]
#             for index in range(size):
#                 result_t=group
#                 if index==0 :
#                     if result_t.loc[index, 'sku_rate']>0.3:
#                         continue
#                     for next in range(1,size,1):
#                         if result_t.loc[next, 'sku_rate']>0.3:
#                             break
#                     #s=1.34 a=0.9 满足下列条件的为 next=4 ,针对此类差距太大的特殊处理，修改阀值，以避免此种情况
#                     if (next-index)>=2:
#                         if (result_t.loc[index, 'rate_f']-result_t.loc[next, 'rate_f'])/(next-index)>0.05:
#                             # for i in range(next-1,index,-1):
#                             result_t.loc[index, 'rate_f']=result_t.loc[next, 'rate_f']+0.05*(next-index)
#                     else:
#                         if (result_t.loc[index, 'rate_f']-result_t.loc[next, 'rate_f'])/(next-index)>0.1:
#                             # for i in range(next-1,index,-1):
#                             result_t.loc[index, 'rate_f']=result_t.loc[next, 'rate_f']+0.1*(next-index)
#
#                     # if result_t.loc[index,'rate_f']>1.2:
#                         # result_t.loc[index, 'rate_f']=1.2
#                     continue
#
#                 # if index==size-1:
#                 #     break
#                 if (result_t.loc[index,'product_level_id_name'].find('S')>0 \
#                     or result_t.loc[index,'product_level_id_name'].find('A')>0 \
#                     or result_t.loc[index, 'product_level_id_name'].find('B') > 0):
#
#                     if result_t.loc[index, 'rate_f']-result_t.loc[index-1,'rate_f']>=0 and result_t.loc[index, 'sku_rate']<0.3:
#                         result_t.loc[index, 'rate_f'] = result_t.loc[index-1, 'rate_f']-0.01
#                     if result_t.loc[index, 'rate_f']-result_t.loc[index-1,'rate_f']>=0 and result_t.loc[index, 'sku_rate']>=0.3 :
#                         rate_flag=result_t.loc[index - 1, 'rate_f']
#                         flag_index=index - 1
#                         x=index-1
#                         for x in range(index-1, 0, -1):
#                             if result_t.loc[x, 'sku_rate']>=0.3:
#                                 rate_flag=result_t.loc[x, 'rate_f']
#                                 flag_index=x
#                                 break
#                         if index-x>1 and rate_flag-result_t.loc[index, 'rate_f']>0:
#                             rate_bulk=(rate_flag - result_t.loc[index, 'rate_f'])/(index-x)
#                             for bulk in range(flag_index+1,index-1,1):
#                                 result_t.loc[bulk, 'rate_f'] = result_t.loc[bulk + 1, 'rate_f'] - rate_bulk
#                     continue
#                 if result_t.loc[index, 'rate_f']-result_t.loc[index-1,'rate_f']>0 \
#                     and result_t.loc[index, 'sku_rate']<0.3:
#                     result_t.loc[index, 'rate_f'] = result_t.loc[index - 1, 'rate_f'] - 0.02
#             resDf = resDf.append(group.copy())
#         resDf['product_id']=resDf.product_id.astype(np.int64)
#         resDf['level_id']=resDf.level_id.astype(np.int64)
#         product_level_df=self.product_level_df.merge(resDf,how='left',left_on=['product_id','level_id'],right_on=['product_id','level_id'])
#
#         product_level_df=product_level_df.merge(self.level_rank_df,left_on=['level_id'],right_on=['product_level_id'])
#         product_level_df=product_level_df.sort_values(['product_id','product_level_order_rank_y'],ascending=True)
#
#         print(1)
#         return product_level_df
#
# def fun3(process_df,rate_f='mean_rate',w='w_mean'):
#     process_df=process_df.reset_index()
#     process_df[rate_f]=process_df[rate_f].fillna(-1)
#     resDf = pd.DataFrame(columns=process_df.columns.tolist())
#
#     grouped = process_df.groupby('product_id')
#     for name, group in grouped:
#         size=group.shape[0]
#         group.reset_index(drop=True, inplace=True)
#         for i in range(size):
#             if group.loc[i,rate_f]==-1:
#                 if i==0:
#                     for next in range(i+1,size,1):
#                         if group.loc[next, rate_f]>0:
#                             break
#                     group.loc[i, rate_f]=group.loc[next, rate_f]+next*0.03
#                     continue
#                 if i>0:
#                     next=size
#                     for next in range(i+1,size,1):
#                         if group.loc[next, rate_f]>0 and group.loc[next, w]>=0.5:
#                             break
#                     if next<size-1:
#                         group.loc[i, rate_f]=group.loc[i-1, rate_f]-(group.loc[i-1, rate_f]-group.loc[next, rate_f])/(next-i+1)
#
#                     else:
#                         varience = (group.loc[i - 1, rate_f] - 0.1) / (next - i + 1)
#                         group.loc[i, rate_f] = group.loc[i - 1, rate_f]-0.05-varience
#                     continue
#             else:
#                 continue
#         resDf = resDf.append(group.copy())
#     print(1)
#     return resDf
#
#
# def fun33(process_df,rate_f='mean_rate',w='w_mean',template_rate='template_mean_rate'):
#     process_df=process_df.reset_index()
#     process_df[rate_f]=process_df[rate_f].fillna(-1)
#     resDf = pd.DataFrame(columns=process_df.columns.tolist())
#
#     grouped = process_df.groupby('product_id')
#     for name, group in grouped:
#         size=group.shape[0]
#         group.reset_index(drop=True, inplace=True)
#         for i in range(size):
#             if group.loc[i,rate_f]==-1:
#                 if i==0:
#
#                     for next in range(i+1,size,1):
#                         if group.loc[next, rate_f]>0:
#                             break
#                     tem_rate=group.loc[i, template_rate]
#                     if tem_rate>group.loc[next, rate_f]:
#                         group.loc[i, rate_f]=group.loc[i, template_rate]
#                     else:
#                         group.loc[i, rate_f]=group.loc[next, rate_f]+next*0.03
#                     continue
#                 if i>0:
#                     next=size
#                     for next in range(i+1,size,1):
#                         if group.loc[next, rate_f]>0 and group.loc[next, w]>=0.5:
#                             break
#                     if next<size-1:
#                         tem_rate = group.loc[i, template_rate]
#                         if tem_rate > group.loc[next, rate_f] and tem_rate<=group.loc[i-1, rate_f]:
#                             group.loc[i, rate_f] = group.loc[i, template_rate]
#                         else:
#                             group.loc[i, rate_f]=group.loc[i-1, rate_f]-(group.loc[i-1, rate_f]-group.loc[next, rate_f])/(next-i+1)
#
#                     else:
#                         tem_rate = group.loc[i, template_rate]
#                         if tem_rate <group.loc[i-1, rate_f]:
#                             group.loc[i, rate_f] = group.loc[i, template_rate]
#                         else:
#                             # varience = (group.loc[i - 1, rate_f] - 0.1) / (next - i + 1)
#                             # group.loc[i, rate_f] = group.loc[i - 1, rate_f]-0.05-varience
#                             rate=max(min(0.8,group.loc[i, template_rate]/group.loc[i-1, template_rate]),1)
#                             group.loc[i, rate_f] = group.loc[i - 1, rate_f]*rate
#                     continue
#             else:
#                 continue
#         resDf = resDf.append(group.copy())
#     print(1)
#     return resDf
#
#
# def save_complement_data(predict_data):
#     """
#     保存补全数据
#     :return:
#     """
#     if predict_data is not None:
#         logger.info('saving predict data...')
#         dt = datetime.datetime.strptime(time.strftime('%Y-%m-%d'), '%Y-%m-%d') + datetime.timedelta(days=1)
#         predict_data['date'] = dt.strftime('%Y-%m-%d')
#         predict_data=predict_data.fillna(-1)
#         predict_data['predict_level_price_rate']=predict_data['predict_level_price_rate'].apply(lambda x:round(x,4))
#         predict_data['saleprice_level_price_rate']=predict_data['saleprice_level_price_rate'].apply(lambda x:round(x,4))
#         predict_data['mean_price_rate']=predict_data['mean_price_rate'].apply(lambda x:round(x,4))
#         predict_data['diff_rate']=predict_data['diff_rate'].apply(lambda x:round(x,4))
#         predict_data['saleprice_level_price_rate']=predict_data['saleprice_level_price_rate'].apply(lambda x:round(x,4))
#         predict_data['process_price']=predict_data['process_price'].apply(lambda x:round(x,4))
#         predict_data['POLY_pred_price']=predict_data['process_price'].apply(lambda x:round(x,0))
#         predict_data['diff_rate'][np.isinf(predict_data['diff_rate'])] = -1
#         delete_rate_sql="""
#         delete from price_prediction_level2_rate_price_brand where date=DATE_FORMAT(date_sub(curdate(),interval 10 day),'%Y%m%d')
#         """
#         mysql_prediction_processor.execute_sql(delete_rate_sql)
#         delete_td_rate_sql = """
#                 delete from price_prediction_level2_rate_price_brand where date=DATE_FORMAT(date_add(curdate(),interval 1 day),'%Y%m%d')
#                 """
#         mysql_prediction_processor.execute_sql(delete_td_rate_sql)
#         insert_sql = """
#         INSERT INTO price_prediction_level2_rate_price_brand(date, product_sku_key, product_sku_name, product_level_key,
#         product_level_name, product_key, product_name, product_category_id, product_category_name, product_brand_id,
#         product_brand_name, predict_origin, forecast_reference_price,is_new_product,POLY_pred_price,
#         rank,price_3,price_2,price_1,saleprice,qty,mean_rate,score,base_price,min_sale_price,sale_num,level_rate_price,avg_predict_basep_price,
#         predict_level_price_rate,saleprice_level_price_rate,mean_price_rate,process_price,flag,diff_rate)
#         VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,  %s,%s, %s, %s, %s, %s, %s, %s,  %s,%s, %s, %s, %s , %s,%s,
#          %s, %s, %s, %s, %s, %s, %s, %s)
#         """
#
#         # predict_data=predict_data.reset_index()
#         mysql_prediction_processor.execute_insert_sql(insert_sql,
#                                            predict_data[
#                                                ['date', 'product_sku_key', 'product_sku_name', 'product_level_key',
#                                                 'product_level_name', 'product_key', 'product_name',
#                                                 'product_category_id', 'product_category_name','product_brand_id',
#                                                 'product_brand_name', 'predict_origin', 'forecast_reference_price','is_new_product',
#                                                 'POLY_pred_price',
#                                                 'rank', 'price_3', 'price_2', 'price_1', 'saleprice', 'qty', 'mean_rate', 'score',
#                                                 'base_price','min_sell_price','sale_num', 'level_rate_price', 'avg_predict_basep_price',
#                                                 'predict_level_price_rate', 'saleprice_level_price_rate', 'mean_price_rate',
#                                                 'process_price', 'flag', 'diff_rate'
#                                                 ]
#                                            ].to_records(index=False).tolist())
#         logger.info('saving predict data to mysql done')
#         mysql_price_num = predict_data.shape[0]
#         save_data_to_gp(predict_data)
#         return mysql_price_num
#
# def save_data_to_gp(predict_data):
#     # dt = datetime.datetime.strptime(time.strftime('%Y-%m-%d'), '%Y-%m-%d') + datetime.timedelta(days=1)
#     # predict_data['date'] = dt.strftime('%Y-%m-%d')
#     insert_sql = """
#     INSERT INTO ods.ods_algorithm_price_prediction_level2_rate_price(date, product_sku_key, product_sku_name, product_level_key,
#     product_level_name, product_key, product_name, product_category_id, product_category_name, product_brand_id,
#     product_brand_name, predict_origin, forecast_reference_price,is_new_product,POLY_pred_price,
#     rank,price_3,price_2,price_1,saleprice,qty,mean_rate,score,base_price,min_sale_price,sale_num,level_rate_price,avg_predict_basep_price,
#     predict_level_price_rate,saleprice_level_price_rate,mean_price_rate,process_price,flag,diff_rate)
#     VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,  %s,%s, %s, %s, %s, %s, %s, %s,  %s,%s, %s, %s, %s , %s,%s,
#      %s, %s, %s, %s, %s, %s, %s, %s)
#     """
#
#     postgre_processor.execute_insert_sql(insert_sql,
#                                                   predict_data[
#                                                       ['date', 'product_sku_key', 'product_sku_name',
#                                                        'product_level_key',
#                                                        'product_level_name', 'product_key', 'product_name',
#                                                        'product_category_id', 'product_category_name',
#                                                        'product_brand_id',
#                                                        'product_brand_name', 'predict_origin',
#                                                        'forecast_reference_price', 'is_new_product',
#                                                        'POLY_pred_price',
#                                                        'rank', 'price_3', 'price_2', 'price_1', 'saleprice',
#                                                        'qty', 'mean_rate', 'score',
#                                                        'base_price', 'min_sell_price', 'sale_num',
#                                                        'level_rate_price', 'avg_predict_basep_price',
#                                                        'predict_level_price_rate', 'saleprice_level_price_rate',
#                                                        'mean_price_rate',
#                                                        'process_price', 'flag', 'diff_rate'
#                                                        ]
#                                                   ].to_records(index=False).tolist())
#
#     logger.info('saving predict data size={} to gp done'.format(predict_data.shape[0]))
#
#
#
#
# def save_product_level_rate(product_level_rate):
#     """
#     保存补全数据
#     :return:
#     """
#     if product_level_rate is not None:
#         logger.info('saving predict data...')
#         dt = datetime.datetime.strptime(time.strftime('%Y-%m-%d'), '%Y-%m-%d') + datetime.timedelta(days=1)
#         product_level_rate['date'] = dt.strftime('%Y-%m-%d')
#         product_level_rate=product_level_rate.fillna(-1)
#         product_level_rate['rate_f1']=product_level_rate['rate_f1'].apply(lambda x:round(x,5))
#         product_level_rate['rate_f2']=product_level_rate['rate_f2'].apply(lambda x:round(x,5))
#         product_level_rate['rate_f3']=product_level_rate['rate_f3'].apply(lambda x:round(x,5))
#         product_level_rate['sku_rate1']=product_level_rate['sku_rate1'].apply(lambda x:round(x,5))
#         product_level_rate['sku_rate2']=product_level_rate['sku_rate2'].apply(lambda x:round(x,5))
#         product_level_rate['sku_rate3']=product_level_rate['sku_rate3'].apply(lambda x:round(x,5))
#         product_level_rate['mean_rate']=product_level_rate['mean_rate'].apply(lambda x:round(x,5))
#         product_level_rate['template_mean_rate']=product_level_rate['template_mean_rate'].apply(lambda x:round(x,5))
#         product_level_rate['mean_rate_tmp']=product_level_rate['mean_rate_tmp'].apply(lambda x:round(x,5))
#
#         # delete_rate_sql="""
#         # delete from price_prediction_level_rate_price where date=DATE_FORMAT(date_sub(curdate(),interval 10 day),'%Y%m%d')
#         # """
#         # mysql_prediction_processor.execute_sql(delete_rate_sql)
#         delete_td_rate_sql = """
#                 delete from product_sku2_level_rate_final_brand where date=DATE_FORMAT(date_add(curdate(),interval 1 day),'%Y%m%d')
#                 """
#         mysql_prediction_processor.execute_sql(delete_td_rate_sql)
#         insert_sql = """
#         INSERT INTO product_sku2_level_rate_final_brand(date, template_brand, product_id, level_id,
#         product_name, sku_rate1, rate_f1, sku_rate2, rate_f2, sku_rate3,
#         rate_f3, w_f1, w_f2,w_f3,mean_rate,
#         w_mean,template_mean_rate,mean_rate_tmp)
#         VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,  %s,%s, %s, %s, %s, %s, %s)
#         """
#
#
#         mysql_prediction_processor.execute_insert_sql(insert_sql,
#                                            product_level_rate[
#                                                ['date','template_brand', 'product_id', 'level_id', 'product_name',
#                                                 'sku_rate1', 'rate_f1', 'sku_rate2', 'rate_f2', 'sku_rate3', 'rate_f3',
#                                                 'w_f1','w_f2', 'w_f3', 'mean_rate', 'w_mean',
#                                                 'template_mean_rate', 'mean_rate_tmp']
#                                            ].to_records(index=False).tolist())
#         logger.info('saving predict data to mysql done')
#         mysql_price_num = product_level_rate.shape[0]
#         return mysql_price_num
#
# def merge_avg_product_level_rate(product_level_rate):
#     tmp=product_level_rate
#     product_avg_level_rate_df=mysql_prediction_processor.load_sql(product_avg_level_rate_sql)
#     product_level_rate.rename(columns={'mean_rate': 'mean_rate_td', 'template_mean_rate': 'template_mean_rate_td'}, inplace=True)
#     product_avg_level_rate_df['product_id']=product_avg_level_rate_df.product_id.astype(np.int64)
#     product_level_rate['product_id']=product_level_rate.product_id.astype(np.int64)
#     product_avg_level_rate_df['level_id']=product_avg_level_rate_df.level_id.astype(np.int64)
#     product_level_rate['level_id']=product_level_rate.level_id.astype(np.int64)
#     product_level_rate=product_level_rate.merge(product_avg_level_rate_df,left_on=['product_id','level_id'],right_on=['product_id','level_id'])
#     product_level_rate = product_level_rate.fillna(-1)
#     return product_level_rate
#
# def process_product_level_rate(sql):
#
#     model=productLevelRate(sql)
#     base_level,base_product_info=model.fun1()
#     product_level_df=model.fun2(base_level,base_product_info)
#     # product_level_df=model.fun3(product_level_df)
#     return product_level_df
#
# def process_level_rate_final():
#     product_level_df1=process_product_level_rate(SETTLE_DATA)
#     product_level_df2=process_product_level_rate(SETTLE_DATA_2)
#     product_level_df3=process_product_level_rate(SETTLE_DATA_3)
#
#     tmp1=product_level_df1[['template_brand','product_id','level_id','product_name','sku_rate','rate_f']]
#     tmp1.rename(columns={'rate_f':'rate_f1','sku_rate':'sku_rate1'},inplace=True)
#     tmp2=product_level_df2[['product_id','level_id','sku_rate','rate_f']]
#     tmp2.rename(columns={'rate_f':'rate_f2','sku_rate':'sku_rate2'},inplace=True)
#
#
#     tmp3=product_level_df3[['product_id','level_id','sku_rate','rate_f']]
#     tmp3.rename(columns={'rate_f':'rate_f3','sku_rate':'sku_rate3'},inplace=True)
#
#     tmp1=tmp1.merge(tmp2,left_on=['product_id','level_id'],right_on=['product_id','level_id'])
#     tmp1=tmp1.merge(tmp3,on=['product_id','level_id'])
#     # tmp1['mean_rate']=tmp1[['rate_f1','rate_f2','rate_f3']].mean(axis=1)
#
#     tmp1.loc[tmp1.rate_f1>0 ,'w_f1']=0.5
#     tmp1.loc[tmp1.rate_f2>0 ,'w_f2']=0.3
#     tmp1.loc[tmp1.rate_f3>0 ,'w_f3']=0.2
#
#     tmp1['rate_f1_w']=tmp1.w_f1*tmp1.rate_f1
#     tmp1['rate_f2_w']=tmp1.w_f2*tmp1.rate_f2
#     tmp1['rate_f3_w']=tmp1.w_f3*tmp1.rate_f3
#     tmp1['mean_rate'] = tmp1[['rate_f1_w', 'rate_f2_w', 'rate_f3_w']].sum(axis=1)/tmp1[['w_f1', 'w_f2', 'w_f3']].sum(axis=1)
#     tmp1['w_mean'] = tmp1[['w_f1', 'w_f2', 'w_f3']].sum(axis=1)
#     template_level_rate=process_template_level_rate_final()
#     template_level_rate.rename(columns={'mean_rate':'template_mean_rate'},inplace=True)
#     template_level_rate.level_id= template_level_rate['level_id'].astype(np.int64)
#     level_rate_merge_df=pd.merge(tmp1,template_level_rate[['template_brand','level_id','template_mean_rate']],left_on=['template_brand','level_id'],right_on=['template_brand','level_id'])
#     level_rate_merge_df['mean_rate_tmp']=level_rate_merge_df.mean_rate
#     result_df=fun33(level_rate_merge_df)
#     # result_df_tmp=fun33(level_rate_merge_df)
#     result_df.to_csv('/data/sunpengwei/tmp/product_level_rate.csv', encoding='utf-8-sig')
#     return result_df
#
# def predict_price_data(result_df):
#     # predict_price_sql
#     price_df=mysql_prediction_processor.load_sql(predict_price_sql)
#     price_df['product_id'] = price_df.product_id.astype(np.int64)
#     price_df['product_level_key'] = price_df.product_level_key.astype(np.int64)
#
#     result_df['product_id'] = result_df.product_id.astype(np.int64)
#     result_df['level_id'] = result_df.level_id.astype(np.int64)
#
#     price_df=price_df.merge(result_df[['product_id','level_id','mean_rate']],left_on=['product_id','product_level_key'],right_on=['product_id','level_id'])
#
#     price_df['score']=1000-price_df.product_level_key+price_df.sale_num*1000
#     data_sku_base_level_df = price_df.loc[price_df.level_sub.isin([ 'A', 'B','D'])]
#
#     # a= data_sku_base_level_df.groupby(by=['product_id','product_sku_id'], as_index=False)['score'].max()
#     # index= data_sku_base_level_df.groupby(by=['product_id','product_sku_id'])['score'].idxmax() price_df=price_df.loc[price_df.product_sku_key==114662]
#
#     # 找出数据中score得分最高的level
#     base_level = data_sku_base_level_df.loc[
#         data_sku_base_level_df.sort_values(['product_sku', 'score']).drop_duplicates('product_sku', keep='last').index]
#     base_level['base_price']=base_level['avg_sell_price']/base_level['mean_rate']
#
#     price_df=price_df.merge(base_level[['product_sku','base_price']],on='product_sku')
#     price_df.loc[(price_df.product_level_name.isin(['S'])) & (price_df.min_sell_price>0),'forecast_reference_price']=price_df.loc[(price_df.product_level_name.isin(['S'])) & (price_df.min_sell_price>0),'min_sell_price']
#
#     price_df['level_rate_price']=price_df.base_price*price_df.mean_rate
#     price_df.loc[(price_df.product_level_name.isin(['S'])) & (price_df.min_sell_price > 0), 'level_rate_price'] = \
#             price_df.loc[(price_df.product_level_name.isin(['S'])) & (price_df.min_sell_price > 0), 'min_sell_price']
#
#     # price_df['level_rate_price']=price_df.base_price*price_df.mean_rate
#     price_df['avg_predict_basep_price'] = price_df[['saleprice', 'forecast_reference_price']].mean(axis=1)
#
#     price_df['predict_level_price_rate']=abs(price_df['forecast_reference_price']-price_df['level_rate_price'])/price_df.level_rate_price
#     price_df['saleprice_level_price_rate']=abs(price_df['saleprice']-price_df['level_rate_price'])/price_df.level_rate_price
#     price_df['mean_price_rate']=abs(price_df['avg_predict_basep_price']-price_df['level_rate_price'])/price_df.level_rate_price
#     price_df['qty']=price_df['qty'].astype(np.int64)
#
#     price_df['forecast_reference_pricet'] = price_df['forecast_reference_price']
#     price_df['salepricet'] = price_df['saleprice']
#     price_df['qtyt'] = price_df['qty']
#
#     price_df['process_price'] = price_df['forecast_reference_price']
#     price_df['process_price_f'] = price_df['forecast_reference_price']
#     price_df['flag'] = 0
#     price_df['diff_rate']=price_df['saleprice_level_price_rate']
#     # price_df=price_df.loc[price_df.product_sku_key==2612812]
#     resDf = pd.DataFrame(columns=price_df.columns.tolist())
#
#     grouped = price_df.groupby(['product_id','product_sku_key'])
#     for name, group in grouped:
#         size=group.shape[0]
#         if group.mean_rate.min()<=0:
#             continue
#
#         group=group.sort_values(['mean_rate','level_id'],ascending=[False,True])
#
#         # group['levelname']=group.product_level_name
#         group.reset_index(drop=True, inplace=True)
#
#         if group.loc[0,'product_level_name']=='S':
#             flag_a=group.loc[group.product_level_name=='A+']
#             if not flag_a.empty:
#                 a_inndex=flag_a.index[0]
#                 if flag_a.loc[a_inndex,'sale_num']>0:
#                     for idex in range(a_inndex-1,-1,-1):
#                         if group.loc[idex,'level_rate_price']<group.loc[idex+1,'level_rate_price']:
#                             process_price =group.loc[idex+1,'level_rate_price']/group.loc[idex+1,'mean_rate']*group.loc[idex,'mean_rate']
#                             group.loc[idex, 'level_rate_price']=process_price
#                             group.loc[idex, 'process_price']=process_price
#
#         for i in range(size):
#             predict_rate=group.loc[i,'predict_level_price_rate']
#             saleprice_rate=group.loc[i,'saleprice_level_price_rate']
#             mean_price_rate=group.loc[i,'mean_price_rate']
#             level_sub=group.loc[i,'level_sub']
#             n_p=1
#             if group.loc[i,'forecast_reference_price']-group.loc[i,'level_rate_price']<0:
#                 n_p=-1
#
#             qty=group.loc[i,'qty']
#             if level_sub in ['S','A','B'] and (predict_rate>0.03 or (level_sub in ['S'] and group.loc[0, 'sale_num']>0)):
#                 if i==0: #第一个等级大于异常范围时，如果人工价格大于预测价的下一个等级，则取人工价，否则取下个等级的预测价 *1.05
#                     if group.loc[i, 'sale_num']>=5 :
#                         continue
#                     elif group.loc[i, 'sale_num']>0:
#                         group.loc[i, 'process_price'] = min(group.loc[i, 'level_rate_price'],
#                                                             group.loc[i + 1, 'level_rate_price'] * (1 + 0.5))
#                         continue
#
#                     #     group.loc[i, 'process_price'] = group.loc[i, 'saleprice']
#                     # else:
#                     group.loc[i, 'process_price'] = min(group.loc[i, 'level_rate_price'] * (1 + 0.03 * n_p),group.loc[i+1, 'level_rate_price'] * (1 + 0.15))
#
#                 elif predict_rate>saleprice_rate:
#                     if saleprice_rate<0.03:
#                         if saleprice_rate>mean_price_rate:
#                             group.loc[i,'process_price']=group.loc[i,'avg_predict_basep_price']
#                         else:
#
#                             group.loc[i, 'process_price'] = group.loc[i, 'saleprice']
#                     else:
#                         # if qty>0:#售卖数量大于0，则基准价可信，
#                         #     group.loc[i, 'process_price'] = group.loc[i, 'saleprice']*0.95
#                         # else:
#                         group.loc[i, 'process_price']=group.loc[i, 'level_rate_price'] * (1+0.03*n_p)
#                 else:
#                     # if qty > 0:
#                     #     group.loc[i, 'process_price'] = group.loc[i, 'saleprice'] * 0.95
#                     # else:
#                     group.loc[i, 'process_price'] = group.loc[i, 'level_rate_price'] * (1+0.03*n_p)
#                 group.loc[i,'flag']=1
#
#             elif level_sub  in ['C','D','E'] and predict_rate>0.06:
#                 if i==0:
#                     group.loc[i, 'process_price'] = group.loc[i, 'saleprice']
#                 elif predict_rate>saleprice_rate:
#                     if saleprice_rate<0.06:
#                         if saleprice_rate>mean_price_rate:
#                             group.loc[i,'process_price']=group.loc[i,'avg_predict_basep_price']
#                         else:
#
#                             group.loc[i, 'process_price'] = group.loc[i, 'saleprice']
#                     else:
#                         # if qty>0:
#                         #     group.loc[i, 'process_price'] = group.loc[i, 'saleprice']*0.95
#                         # else:
#                         group.loc[i, 'process_price']=group.loc[i, 'level_rate_price'] * (1+0.05*n_p)
#                 else:
#                     # if qty > 0:
#                     #     group.loc[i, 'process_price'] = group.loc[i, 'saleprice'] * 0.95
#                     # else:
#                     group.loc[i, 'process_price'] = group.loc[i, 'level_rate_price'] * (1+0.05*n_p)
#                 group.loc[i,'flag']=1
#             elif level_sub not in ['S','A','B','C', 'D', 'E'] and predict_rate > 0.12:
#                 if i == 0:
#                     group.loc[i, 'process_price'] = group.loc[i, 'saleprice']
#                 elif predict_rate > saleprice_rate:
#                     if saleprice_rate < 0.1:
#                         if saleprice_rate > mean_price_rate:
#                             group.loc[i, 'process_price'] = group.loc[i, 'avg_predict_basep_price']
#                         else:
#
#                             group.loc[i, 'process_price'] = group.loc[i, 'saleprice']
#                     else:
#                         # if qty > 0:
#                         #     group.loc[i, 'process_price'] = group.loc[i, 'saleprice'] * 0.95
#                         # else:
#                         group.loc[i, 'process_price'] = group.loc[i, 'level_rate_price'] * (1+0.07*n_p)
#                 else:
#                     # if qty > 0:
#                     #     group.loc[i, 'process_price'] = group.loc[i, 'saleprice'] * 0.95
#                     # else:
#                     group.loc[i, 'process_price'] = group.loc[i, 'level_rate_price'] * (1+0.07*n_p)
#                 group.loc[i, 'flag'] = 1
#             # print('dsd')
#         group['process_price_f'] = group['process_price']
#         # logger.info('group product_sku_key:={}'.format(group.loc[0, 'product_sku_key']))
#         for i in range(size):
#             level_sub = group.loc[i, 'level_sub']
#
#             if i==size-1:#bugfix:J,B排序时，走最后一步时出现 数组越界
#                 if group.loc[i, 'process_price_f'] > group.loc[i - 1, 'process_price_f']:
#                     group.loc[i, 'process_price_f'] = group.loc[i - 1, 'process_price_f'] * 0.98
#                 continue
#
#             if i>0 and level_sub in ['S','A','B'] and group.loc[i, 'process_price_f'] < group.loc[i + 1, 'process_price_f'] :
#                 # if group.loc[i, 'process_price_f']>group.loc[i-1, 'process_price_f']:
#                 #     group.loc[i, 'process_price_f']=group.loc[i-1, 'process_price_f']-10
#
#                 if group.loc[i, 'process_price_f'] < group.loc[i - 1, 'process_price_f']:
#                     next=i+1
#                     # for next in range(i+1,size):
#                     #     if group.loc[next, 'flag']!=1 or level_sub not in ['S','A','B']:
#                     #         break
#                     if next<size-1:
#                         group.loc[i, 'process_price_f']=group.loc[next, 'process_price_f']+(group.loc[i-1, 'process_price_f']-group.loc[next, 'process_price_f'])/(next-i+1)
#                     else:
#                         group.loc[i, 'process_price_f'] = group.loc[i-1, 'process_price_f']*0.98
#         group['diff_rate']=group['process_price_f']/group['saleprice']-1
#         resDf=resDf.append(group)
#     return resDf
#
#
# def insert_data_to_incre(date):
#     dt = check_date_str(date) + datetime.timedelta(days=1)
#     # today = datetime.date.today();
#     today=format_date_string(dt)
#     query_sql = """select DATE_FORMAT(date_add(curdate(),interval 1 day),'%Y%m%d') as date, product_sku_key, product_sku_name, product_level_key,
#                         product_level_name, product_key, product_name, product_category_id, product_category_name, product_brand_id,
#                         product_brand_name, predict_origin, forecast_reference_price,is_new_product,POLY_pred_price,
#                         rank,price_3,price_2,price_1,saleprice,qty,mean_rate,score,base_price,min_sale_price as min_sell_price,sale_num,level_rate_price,avg_predict_basep_price,
#                         predict_level_price_rate,saleprice_level_price_rate,mean_price_rate,process_price,flag,diff_rate
#                         from price_prediction_level2_rate_price_brand where date=DATE_FORMAT(date_add(curdate(),interval 0 day),'%Y%m%d') """
#     lsql=query_sql
#     df_all_data = mysql_prediction_processor.load_sql(lsql)
#     if(df_all_data.shape[0]>0):
#         mysql_prediction_processor.execute_sql(
#             "delete from price_prediction_level2_rate_price_brand where date=DATE_FORMAT(date_add(curdate(),interval 1 day),'%Y%m%d')")
#
#
#         # df_all_data['create_date']=today
#         df_all_data = df_all_data.fillna(-1)
#         mysql_prediction_processor.execute_insert_sql(INSERT_LEVEL_RATE_PRICE_SQL, df_all_data[
#                                                ['date', 'product_sku_key', 'product_sku_name', 'product_level_key',
#                                                 'product_level_name', 'product_key', 'product_name',
#                                                 'product_category_id', 'product_category_name','product_brand_id','product_brand_name',
#                                                 'predict_origin', 'forecast_reference_price','is_new_product','POLY_pred_price',
#                                                 'rank','price_3','price_2','price_1','saleprice','qty','mean_rate','score','base_price',
#                                                 'min_sell_price','sale_num','level_rate_price','avg_predict_basep_price',
#                                                  'predict_level_price_rate','saleprice_level_price_rate','mean_price_rate',
#                                                 'process_price','flag','diff_rate' ]].to_records(index=False).tolist())
#         df_size=df_all_data.shape[0]
#         logger.info('(通知消息)sku2手机数据等级比率不处理，拷贝数据完成! 时间:{} date={} 数据 category_id=1 ，dataSize={}'.format(today,date, df_all_data.shape[0]))
#         # dingding_messager.send_message('(通知消息)sku2手机数据插入完成! 时间:{} date={} category_id@{} ，dataSize={} '.format(today,date,category_id, df_all_data.shape[0]))
#         save_data_to_gp(df_all_data)
#         return df_size
#     else:
#         logger.info('(通知消息)sku2手机数据插入表数为空! 时间@{}  date={}数据  category_id=1 ，dataSize={} '.format(today,date, df_all_data.shape[0]))
#         # dingding_messager.send_message(
#         #     '(通知消息)sku2手机数据插入表数为空! 时间@{}  date={} category_id@{} ，dataSize={} '.format(today,date, category_id, df_all_data.shape[0]))
#     return 0
#
# def query_price_sku2_level_data():
#     query_sql = """select DATE_FORMAT(date_add(curdate(),interval 1 day),'%Y%m%d') as date, product_sku_key, product_sku_name, product_level_key,
#                            product_level_name, product_key, product_name, product_category_id, product_category_name, product_brand_id,
#                            product_brand_name, predict_origin, forecast_reference_price,is_new_product,POLY_pred_price,
#                            rank,price_3,price_2,price_1,saleprice,qty,mean_rate,score,base_price,min_sale_price as min_sell_price,sale_num,level_rate_price,avg_predict_basep_price,
#                            predict_level_price_rate,saleprice_level_price_rate,mean_price_rate,process_price,flag,diff_rate
#                            from price_prediction_level2_rate_price_brand where date=DATE_FORMAT(date_add(curdate(),interval 0 day),'%Y%m%d') """
#     lsql = query_sql
#     df_all_data = mysql_prediction_processor.load_sql(lsql)
#     save_data_to_gp(df_all_data)
#
# def main():
#     try:
#         # result_df = process_level_rate_final()
#         # save_product_level_rate(result_df)
#         rs_file='result_df.pkl'
#         rs_price_file = 'result_price_df.pkl'
#         result_df=None
#         flag=1
#         if flag==1:
#             result_df = process_level_rate_final()
#             save_product_level_rate(result_df)
#             ##存入pickle
#             train_data = open(rs_file, 'wb')
#             pickle.dump(result_df,train_data)
#             train_data.close()
#         else:
#             train_data = open(rs_file, 'rb')
#             result_df = pickle.load(train_data)
#             print('dsd')
#         if (len(sys.argv) > 1):
#             date = sys.argv[1]
#             today = format_date(date)
#         else:
#             today = get_today();
#         # today = get_today()
#         md_date = format_date_string(today)
#         mysql_price_num=0
#         if (today.weekday() in [0, 3]):
#             result_df = merge_avg_product_level_rate(result_df)
#             df = predict_price_data(result_df)
#             price_data = open(rs_price_file, 'wb')
#             pickle.dump(df, price_data)
#             train_data.close()
#
#             # train_data = open(rs_price_file, 'rb')
#             # df = pickle.load(train_data)
#
#             mysql_price_num = save_complement_data(df)
#             # df.loc[df.product_sku_key==1977248]
#             logger.info('skulevel手机模型执行完成 end！！！')
#         else:
#             mysql_price_num=insert_data_to_incre(md_date)
#             print(1)
#         # df.to_csv('/data/sunpengwei/tmp/predict_price_level_rate.csv', encoding='utf-8-sig')
#         print('1')
#         dingding_messager.send_message('c端sku2等级比率处理数据完成，处理完成={}条'.format(mysql_price_num))
#         logger.info('等级比率处理数据 {}'.format(mysql_price_num))
#         exit(0)
#     except Exception as e:
#         logger.info('error {}'.format(e))
#         dingding_messager.send_message('c端sku2等级比率处理数据失败：{}'.format(e))
#         raise TypeError('等级比率报错:') from e
#         exit(1)
#
# if __name__ == '__main__':
#     main()
#     # query_price_sku2_level_data()
#
#
#
