import findspark
import pyspark
from pyspark.sql import SparkSession


class Spark:
    PATH_SPARK_BIN: str = r'/home/alex/spark-3.3.1-bin-hadoop3'
    HOST: str = 'localhost'

    def create_session(self, name_application: str) -> pyspark.sql.session.SparkSession:
        if self.HOST == 'localhost':
            findspark.init(self.PATH_SPARK_BIN)
            return (SparkSession
                    .builder
                    .appName(name_application)
                    .enableHiveSupport()
                    .getOrCreate())
        else:
            return (SparkSession
                    .builder
                    .master(self.HOST)
                    .appName(name_application)
                    .enableHiveSupport()
                    .getOrCreate())

    def create_df(self, session: pyspark.sql.session.SparkSession, data: list,
                  schema: pyspark.sql.types.StructType) -> pyspark.sql.dataframe.DataFrame:
        return session.createDataFrame(data, schema)
