# coding=utf-8
# encoding:utf-8
import datetime
import pandas as pd
import sys
from pyspark import SparkConf
from pyspark.sql import SparkSession, DataFrame, Window, functions, Row
from pyspark.sql.functions import to_date, udf, datediff, lead
from pyspark.sql.types import ArrayType, DateType

from features.Seasonality import Seasonality, Util


class FeatureEngineering:

    def __init__(self):
        pass

    @staticmethod
    def main_process(df_clean):
        # type: (DataFrame) -> DataFrame
        segment_cols = ['sub_geo', 'business_type', 'category', 'category_seasonality']

        df = df_clean.groupBy(*tuple(segment_cols + ['date'])).sum('quantity')
        df = df.withColumnRenamed("sum(quantity)", 'quantity')

        print("生成segment范围内的当日总量 ———— sum(quantity) ——> quantity")
        df = FeatureTool.fill_missing_date(df, segment_cols, debug=False)

        df = FeatureTool.calculate_yearly_avg_and_week_sum_by_segment(df, segment_cols, debug=True)
        df = df.withColumn('seasonality_rate', df['quantity'] / df['avg_year_segment'])
        print("生成segment范围内的当日比年内变动趋势 —— seasonality_rate")

        df.persist()
        df.show()
        print(df.count())
        # df.toPandas().to_csv('output_season2.csv', header=True, index=False, encoding='utf-8')
        return df


class FeatureTool:

    def __init__(self):
        pass

    @staticmethod
    def flat_map_genarate_seasonality_by_segment(key, rows, segments, dt):
        parts = key.split("@")
        print(parts, segments)
        # rows = Util.fill_missing_date(key, rows, segments, target_column=[Constant.sum])
        df = Util.generate_df_from_rows(rows).sort_values(['date']).reset_index(drop=True)

        # print df
        # df.to_csv('feature1.csv', header=True, index=False)
        s = Seasonality()
        print(dt)
        df = s.generate_seasonality(df, dt)
        print(df)
        # print df
        # df.to_csv('feature.csv', header=True, index=False)
        exit(0)

        # df = s.add_another_cycle_seasonality(df)
        # df = Feature.add_seasonality_1(df, Constant.seasonality, Constant.seasonality_1)
        # df = Feature.add_seasonality_13(df, Constant.seasonality, Constant.seasonality_13)
        # for i in range(len(segments)):
        #     df[segments[i]] = parts[i]
        # cast_int = [Constant.sum]
        # row_list = self.generate_rows_from_df(df, cast_int)
        # return row_list

    @staticmethod
    def fill_missing_date(df, cols_list, debug=False, interval=7):
        # type: (DataFrame, list) -> DataFrame
        select_cols = cols_list + ['date', 'quantity']
        df = df.select(*tuple(select_cols))
        # if debug:
        #     print("Original Data")
        #     df.show()

        def udf_s_e(start_datetime, excluded_diff_from_next_date):
            # type: (datetime.datetime, int) -> list
            date_list = []
            if excluded_diff_from_next_date > 1:
                for i in range((excluded_diff_from_next_date / interval) - 1):
                    date_list.append(start_datetime + datetime.timedelta(days=(i + 1) * interval))
                return date_list
            else:
                return []

        fill_dates = udf(udf_s_e, ArrayType(DateType()))

        # 定义数据窗，在规定列字段cols_list内，进行空缺日期查找
        w = Window.orderBy(*tuple(cols_list + ['date']))
        temp_df = df.withColumn("diff", datediff(lead("date", 1).over(w), "date")).filter("diff > 1")
        # if debug:
        #     print('df.withColumn("diff", datediff(lead("date", 1).over(w), "date"))')
        #     temp_df.show(70)
        temp_df = temp_df.withColumn("next_dates", fill_dates("date", "diff"))
        # if debug:
        #     print("After mark interval date")
        #     temp_df.show(truncate=False)
        temp_df = temp_df \
            .withColumn("quantity", functions.lit(0)) \
            .withColumn("date", functions.explode("next_dates"))
        # if debug:
        #     print("convert every list to rows")
        #     temp_df.show(truncate=False)

        result = df.union(temp_df.select(*tuple(select_cols))).orderBy("date")

        if debug:
            print("union missing date into original data")
            result.sort(cols_list + ['date']).show(140)
            print("Compare to original data")
            df.sort(cols_list + ['date']).show(140)
        return result

    @staticmethod
    def calculate_yearly_avg_and_week_sum_by_segment(df, cols_segment, debug=False):
        # type: (DataFrame, list, bool) -> DataFrame
        df = df.withColumn('year', df['date'][:4])

        if debug:
            df_g = df
            df_g.groupBy(*tuple(cols_segment + ["year"])).avg('quantity').sort(['year'] + cols_segment).show(50)
        # =============================== main function ==============================================
        window_yearly = Window.partitionBy(*tuple(cols_segment + ["year"])) \
            .orderBy("date") \
            .rowsBetween(-sys.maxsize, sys.maxsize)

        df = df.select(*tuple(cols_segment +
                              [df.date, df.quantity, df.year,
                               functions.avg('quantity').over(window_yearly).alias('avg_year_segment')])
                       )
        # ============================================================================================
        if debug:
            df.sort(['date'] + cols_segment).show(200)
        return df


class TestFeatureEngineering:

    def __init__(self):
        pass

    @staticmethod
    def test_featuretool_fill_missing_date(spark):
        dfp = pd.DataFrame({'date': ['2016-09-10 00:00:00',
                                     '2016-09-11 00:00:00',
                                     '2016-09-14 00:00:00',
                                     '2016-09-16 00:00:00',
                                     '2016-09-17 00:00:00',
                                     '2016-09-20 00:00:00',
                                     '2016-08-10 00:00:00',
                                     '2016-08-24 00:00:00'],
                            'quantity': [1, 2, 6, 1, 4, 2, 9, 18],
                            'col1': ['b', 'a', 'b', 'a', 'b', 'a', 'b', 'b'],
                            'col2': ['h', 'h', 'h', 'h', 'h', 'h', 'i', 'i']})
        df = spark.createDataFrame(dfp)
        df = df.withColumn('date', to_date('date'))
        df = df.withColumn('quantity', df['quantity'].cast('Int'))
        df_return = FeatureTool.fill_missing_date(df, ['col1', 'col2'], debug=False, interval=2)
        assert df_return.count() == 17
        # df_return.sort(['col1', 'col2', 'date']).show()
        test_row = df_return.filter('date="2016-09-12"').sort(['col1', 'col2', 'date']).first()
        assert test_row['quantity'] == 0

    @staticmethod
    def test_calculate_yearly_avg_and_week_sum_by_segment(spark):
        # type: (SparkSession) -> None
        df = spark.read.csv('file:///G:/!Data/clean_order_20180219_0.csv', header=True)
        df = df.withColumn('date', to_date('date'))
        df = df.withColumn('quantity', df['quantity'].cast('Int'))
        df = df.withColumn("category_seasonality", functions.lit('unknown'))

        FeatureTool.calculate_yearly_avg_and_week_sum_by_segment(df,
                                                                 ['sub_geo', 'business_type',
                                                                  'category', 'category_seasonality'],
                                                                 debug=True)

    @staticmethod
    def test_flat_map_genarate_seasonality():
        dt = '2018-02-19'
        rows = [Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-05-08', sum=13553017, seasonality_rate=0.7858451076902032),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-04-17', sum=19799943, seasonality_rate=1.1480608590024557),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-01-30', sum=0, seasonality_rate=0.0),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-09-18', sum=6454763, seasonality_rate=0.37426677210319587),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-07-17', sum=39374694, seasonality_rate=2.283064401579279),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-07-10', sum=32618592, seasonality_rate=1.8913250786111167),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-03-06', sum=457, seasonality_rate=2.6498248634560324e-05),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-03-13', sum=54259, seasonality_rate=0.003146101690727809),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-04-24', sum=11130789, seasonality_rate=0.6453969680980942),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-11-06', sum=7, seasonality_rate=4.058812701136155e-07),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-02-20', sum=100, seasonality_rate=5.798303858765935e-06),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-06-26', sum=3558293, seasonality_rate=0.20632064032519817),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-01-02', sum=4, seasonality_rate=2.319321543506374e-07),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-01-09', sum=7, seasonality_rate=4.058812701136155e-07),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-02-06', sum=637, seasonality_rate=3.693519558033901e-05),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-10-23', sum=0, seasonality_rate=0.0),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-08-28', sum=53713402, seasonality_rate=3.114466260840459),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-05-15', sum=4737114, seasonality_rate=0.27467226385614135),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-06-19', sum=4156350, seasonality_rate=0.24099780243381796),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-09-25', sum=14736254, seasonality_rate=0.8544527843195495),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-03-20', sum=41888374, seasonality_rate=2.428815206016307),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-07-03', sum=12348252, seasonality_rate=0.7159891722061418),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-09-04', sum=20, seasonality_rate=1.159660771753187e-06),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-10-16', sum=2, seasonality_rate=1.159660771753187e-07),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-06-05', sum=231024460, seasonality_rate=13.395500178873165),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-08-21', sum=25070560, seasonality_rate=1.453667247894229),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-07-24', sum=86374057, seasonality_rate=5.008230280003689),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-02-27', sum=476, seasonality_rate=2.7599926367725853e-05),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-01-23', sum=45, seasonality_rate=2.609236736444671e-06),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-02-13', sum=13, seasonality_rate=7.537795016395716e-07),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-04-03', sum=0, seasonality_rate=0.0),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-06-12', sum=6358034, seasonality_rate=0.36865813076365017),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-08-07', sum=8792451, seasonality_rate=0.509813025613104),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-01-16', sum=2, seasonality_rate=1.159660771753187e-07),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-10-30', sum=0, seasonality_rate=0.0),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-09-11', sum=326297, seasonality_rate=0.018919691542037483),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-03-27', sum=4380381, seasonality_rate=0.2539878005516499),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-10-02', sum=0, seasonality_rate=0.0),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-04-10', sum=511, seasonality_rate=2.962933271829393e-05),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-05-29', sum=16010334, seasonality_rate=0.9283278141233146),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-07-31', sum=42929828, seasonality_rate=2.489201873485579),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-10-09', sum=540, seasonality_rate=3.131084083733605e-05),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-05-22', sum=8472167, seasonality_rate=0.4912419860820942),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-08-14', sum=88222482, seasonality_rate=5.115407578105082),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2017-05-01', sum=1059, seasonality_rate=6.140403786433126e-05),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-11-28', sum=747, seasonality_rate=4.925745412844037),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-10-03', sum=0, seasonality_rate=0.0),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-09-19', sum=6, seasonality_rate=0.039564220183486244),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-09-26', sum=231, seasonality_rate=1.5232224770642202),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-11-07', sum=642, seasonality_rate=4.233371559633028),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-10-31', sum=110, seasonality_rate=0.7253440366972478),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-08-29', sum=48, seasonality_rate=0.31651376146788995),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-11-14', sum=24, seasonality_rate=0.15825688073394498),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-12-19', sum=0, seasonality_rate=0.0),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-12-26', sum=43, seasonality_rate=0.2835435779816514),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-10-24', sum=607, seasonality_rate=4.002580275229358),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-09-05', sum=4, seasonality_rate=0.026376146788990827),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-08-01', sum=2, seasonality_rate=0.013188073394495414),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-08-08', sum=0, seasonality_rate=0.0),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-08-22', sum=48, seasonality_rate=0.31651376146788995),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-11-21', sum=776, seasonality_rate=5.116972477064221),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-07-25', sum=2, seasonality_rate=0.013188073394495414),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-12-05', sum=0, seasonality_rate=0.0),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-10-17', sum=56, seasonality_rate=0.36926605504587157),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-08-15', sum=0, seasonality_rate=0.0),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-10-10', sum=142, seasonality_rate=0.9363532110091743),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-12-12', sum=0, seasonality_rate=0.0),
                Row(sub_geo=u'PRC', business_type=u'Other', category=u'Other', category_seasonality=u'unknown',
                    date=u'2016-09-12', sum=0, seasonality_rate=0.0)]
        segments_seasonality = ['sub_geo', 'business_type', 'category', 'category_seasonality']
        key = 'PRC@Other@Other@unknown'
        FeatureTool.flat_map_genarate_seasonality_by_segment(key, rows, segments_seasonality, Util.get_date(dt))


if __name__ == '__main__':
    TestFeatureEngineering.test_flat_map_genarate_seasonality()
    exit(0)

    local = True

    conf = SparkConf()
    if local:
        conf = conf.setMaster("local[8]")
        conf = conf.set('spark.sql.warehouse.dir', 'file:///d:/tmp')
    sparkSession = SparkSession.builder.appName("Test PredictionTool").config(conf=conf).getOrCreate()
    sc = sparkSession.sparkContext
    sc.setLogLevel("ERROR")

    # TestFeatureEngineering.test_featuretool_fill_missing_date(sparkSession)
    # TestFeatureEngineering.test_calculate_yearly_avg_and_week_sum_by_segment(sparkSession)

    if local:
        df_main_clean = sparkSession.read.csv('file:///G:/!Data/clean_order_20180219_0.csv', header=True)
        df_main_clean = df_main_clean.withColumn('quantity', df_main_clean['quantity'].cast('Int'))
    else:
        df_main_clean = sparkSession.read.parquet('/user/proj_scp/prep/sellin/clean_data/clean_order_20180219_0')

    df_main_clean = df_main_clean.withColumn('date', to_date('date'))
    df_main_clean = df_main_clean.filter('date>"2014-01-01" and date<"2019-01-01"')
    df_main_clean = df_main_clean.withColumn("category_seasonality", functions.lit('unknown'))
    # df_main_clean = df_main_clean.filter('sub_geo="ANZ" and business_type="Commercial" and category="DT"')
    FeatureEngineering.main_process(df_main_clean)
    # -------------- 12476 rows --------------
