import pyspark

from Modules.mod_spark import Spark
from pyspark.sql.types import StructField, StructType, StringType, IntegerType


class Task_2_1:
    DATA: list = [(1, 'Equestrian', 'summer'),
                  (2, 'Artistic Gymnastics', 'summer'),
                  (3, 'Athletics', 'summer'),
                  (4, 'Badminton', 'summer'),
                  (5, 'Boxing', 'summer'),
                  (6, 'Cycling Road', 'summer'),
                  (7, 'Diving', 'summer'),
                  (8, 'Judo', 'summer'),
                  (9, 'Tennis', 'summer'),
                  (10, 'Weightlifting', 'summer'),
                  (11, 'Alpine Skiing', 'winter'),
                  (12, 'Biathlon', 'winter'),
                  (13, 'Figure Skating', 'winter'),
                  (14, 'Nordic Combined', 'winter'),
                  (15, 'Luge', 'winter'),
                  (16, 'Short Track Speed Skating', 'winter'),
                  (17, 'Speed Skating', 'winter'),
                  (18, 'Ski Mountaineering', 'winter'),
                  (19, 'Snowboard', 'winter'),
                  (20, 'Skeleton', 'winter')
                  ]

    HEADER: pyspark.sql.types.StructType = StructType([
        StructField('row_id', IntegerType(), False),
        StructField('discipline', StringType(), False),
        StructField('season', StringType(), False)
    ])

    def start(self):
        session = Spark().create_session('Study')
        dataframe = Spark().create_df(session, self.DATA, self.HEADER)
        # dataframe.show()
        dataframe.toPandas().to_csv(
            path_or_buf=r'Task_2/export/Olympic disciplines by season.csv',
            sep=';',
            lineterminator='\t'
        )
        reading = session.read.options(header=True, delimiter=';').csv(r'Task_2/Athletes.csv')
        reading.createTempView('ATHLETES')
        counter = session.sql('select Discipline, count(*) as count from ATHLETES group by Discipline')
        counter.write.parquet(path=r'Task_2/export/Athletes.parquet', mode='overwrite')
        # counter.show()
        counter.createOrReplaceTempView('COUNTER')
        dataframe.createOrReplaceTempView('DATAFRAME')
        unification = session.sql('select C.count, DF.discipline, DF.season from COUNTER C, DATAFRAME DF where '
                                  'C.Discipline = DF.discipline')
        unification.write.parquet(path=r'Task_2/export/unification.parquet', mode='overwrite')
        unification.show()
        session.stop()
