from abc import ABC

from common.database.abs_database import AbsDataBase
from common.log.log_handler import LogHandler
from config.config import *

import io
import pandas as pd
import psycopg2.pool

logger = LogHandler()


class GreenplumDB(AbsDataBase, ABC):
    _pool = None

    @property
    def pool(self):
        return GreenplumDB._pool

    @pool.setter
    def pool(self, pool):
        GreenplumDB._pool = pool

    def __init__(self):
        if GreenplumDB._pool is None:
            logger.gp(logger.INFO, "start to init greenplum connection pool.")
            try:
                GreenplumDB.pool = psycopg2.pool.SimpleConnectionPool(GP_MIN_CONN, GP_MAX_CONN, user=GP_USER,
                                                                      password=GP_PASSWORD,
                                                                      host=GP_HOST,
                                                                      port=GP_PORT,
                                                                      database=GP_DBNAME)
                logger.gp(logger.INFO, "greenplum connection pool created successfully.")
            except (Exception, psycopg2.DatabaseError) as error:
                logger.error("failed to connect greenplum.")
                raise error
    def _get_connection(self):
        return GreenplumDB.pool.getconn()

    def _recycle_connection(self, connection):
        return GreenplumDB.pool.putconn(connection)

    def close_connection(self):
        pass

    def execute_query(self, sql):
        df = None
        connection = None
        try:
            connection = self._get_connection()
            if connection:
                df = pd.read_sql(sql, connection)
        except (Exception, psycopg2.DatabaseError) as error:
            logger.gp(logger.ERROR, "Error while connecting to PostgreSQL")
        finally:
            self._recycle_connection(connection)
            logger.gp(logger.INFO, "recycled a greenplum connection.")
        return df

    def execute_update(self, sql):
        pass

    def save_dataframe(self, table_name, df):
        connection = None
        cursor = None
        try:
            connection = self._get_connection()
            cursor = connection.cursor()
            sql_drop = "drop table if exists {}".format(table_name)
            cursor.execute(sql_drop)

            dtypes = []
            for dtype in df.dtypes:
                if dtype == float:
                    dtypes.append("numeric(30,8)")
                elif dtype == int:
                    dtypes.append("BIGINT")
                else:
                    dtypes.append("varchar")
            columns = ["\"" + column + "\"" + " " + dtype for column, dtype in zip(df.columns, dtypes)]
            sql = "create table {}({})".format(table_name, ",".join(columns))
            cursor.execute(sql)
            data_io = io.StringIO()
            df.to_csv(data_io, sep="|", index=False)
            data_io.seek(0)
            copy_cmd = "COPY %s FROM STDIN HEADER DELIMITER '|' CSV" % table_name
            cursor.copy_expert(copy_cmd, data_io)

            connection.commit()
            logger.gp(logger.INFO, "put away a greenplum connection. output df saved.")
        except (Exception, psycopg2.DatabaseError) as error:
            logger.gp(logger.ERROR, "error while saving dataframe into greenplum. ")

        finally:
            if cursor:
                cursor.close()
            if connection:
                self._recycle_connection(connection)
            logger.gp(logger.INFO, "recycled a greenplum connection.")

        return 1

    def query_column_name(self, table_name):
        schema, table_name = table_name.split('.')
        columns = self.execute_query(
            "select column_name from information_schema.columns where table_name='{}' and table_schema='{}'".format(
                table_name, schema)).values
        columns = columns['column_name'].tolist()
        return columns
