import os

from Modules.mod_spark import Spark
from Modules.mod_psql import PSQLDB
from Modules.mod_logging import Logging
from Task_1.Task_1_1.modules.module_insert_data import ImportFromDB


def insert_or_update_db(data: list, name_table: str, name_schema: str, pk):
    """
    :param pk:
    :param data: List of dictionaries with data
    :param name_table:
    :param name_schema:
    :return:
    """
    connect_db = PSQLDB().connection()
    columns: list = list(data[0].keys())
    exclude: str = ', '.join(['EXCLUDED.' + h for h in columns])
    for values in data:
        values_insert: list = []
        for _, value in values.items():
            if value is None:
                values_insert.append('null')
            else:
                values_insert.append(f"'{value}'")
        query: str = f"""insert into {name_schema}.{name_table}({', '.join(columns)})
                        values
                        ({', '.join(values_insert)})
                        on conflict({', '.join(pk)})
                        do update set({', '.join(columns)}) = ({exclude});"""
        err = PSQLDB().insert(connect=connect_db, sql=query)
        if err == 0:
            Logging().write_log_to_file(f"{name_schema}.{name_table}: {values_insert}", 'info')
        else:
            Logging().write_log_to_file(f"{name_schema}.{name_table}: {values_insert}", 'error')

    PSQLDB().exit_db(connect_db)


class Delta:
    def __init__(self, path: str):
        """
        :param path to directory with deltas:
        You must specify the path to the directory with deltas.
        A spark session is automatically created
        """
        self.name_schema: str = 'mirror'
        self.path = path
        self.session = Spark().create_session('DELTA')

    def upload_pgsql(self, name_table: str, primary_keys: list = None):
        """
        Upload data to postgresql
        :param name_table: The name of the table to load the data into
        :param primary_keys: The list of keys in the table, if empty, the get_keys function will be called to look up
                                private keys
        """
        """check for the presence of the schema we need"""
        self.check_schema()
        history: list = self.session. \
            read. \
            format('jdbc'). \
            option("url", f"jdbc:postgresql://{PSQLDB().HOST}:5432/{PSQLDB().DATABASE}"). \
            option("dbtable", f"{self.name_schema}.LOG"). \
            option("user", "postgres"). \
            option("password", PSQLDB().PASSWORD). \
            option("driver", "org.postgresql.Driver"). \
            load(). \
            collect()
        """check in the logs the previously loaded deltas by id"""
        ids_history: list = [str(j['id']) for j in history]
        for _id in list(os.walk(self.path))[0][1]:
            print(_id)
            if _id not in ids_history:
                print('in')
                postgresql = self.session.read.format('jdbc'). \
                    option("url", f"jdbc:postgresql://{PSQLDB().HOST}:5432/{PSQLDB().DATABASE}"). \
                    option("dbtable", f"{self.name_schema}.{name_table}"). \
                    option("user", "postgres"). \
                    option("password", PSQLDB().PASSWORD). \
                    option("driver", "org.postgresql.Driver"). \
                    load()
                read = self.session.read.options(header=True, delimiter=';').csv(f"{self.path}/{_id}/{name_table}.csv")
                """for the correct search for unique values"""
                lower_column: dict = {}
                for h in read.dtypes:
                    lower_column[h[0].lower()] = h[0]
                read = read.withColumns(lower_column)
                postgresql = postgresql.withColumns(lower_column)
                unique = read.subtract(postgresql)
                print('unique')
                unique.show()
                if primary_keys is None:
                    primary_keys = ImportFromDB(self.name_schema).get_keys()[f"{self.name_schema}.{name_table}"]
                print('primary keys:', primary_keys)
                if unique.collect():
                    insert_or_update_db([j.asDict() for j in unique.collect()],
                                        name_table,
                                        self.name_schema,
                                        primary_keys)
                    con = PSQLDB().connection()
                    Logging().set_log(con, f"{_id};{name_table}", 'inserted', self.name_schema)
                    PSQLDB().exit_db(con)
        """saving to csv file"""
        self.session.read.format('jdbc'). \
            option("url", f"jdbc:postgresql://{PSQLDB().HOST}:5432/{PSQLDB().DATABASE}"). \
            option("dbtable", f"{self.name_schema}.{name_table}"). \
            option("user", "postgres"). \
            option("password", PSQLDB().PASSWORD). \
            option("driver", "org.postgresql.Driver"). \
            load().toPandas().\
            to_csv(path_or_buf=f"{self.path}/mirror_{name_table}.csv", sep=";", index=False)

    def check_schema(self) -> bool:
        """
        :param name_schema:
        :return: Determines if there is a schema in the database and if not, then asks to enter the name of the schema with overwriting the name_schema variable
        """
        while bool(PSQLDB(). \
                           select_db(
            f"""select exists (select * from pg_catalog.pg_namespace where nspname = '{self.name_schema}');""")[0][
                       0]) is False:
            self.name_schema = input('Please provide schema name: ')
        return True
