from abc import ABC

import pymysql
from common.utils.format_util import py_to_java
from config.config import *
from common.database.abs_database import AbsDataBase
from common.log.log_handler import LogHandler
from urllib.parse import quote_plus as urlquote
import json
import pandas as pd
from sqlalchemy import create_engine

logger = LogHandler()


class MysqlDB(AbsDataBase, ABC):
    _pool = None

    @property
    def pool(self):
        return MysqlDB._pool

    @pool.setter
    def pool(self, pool):
        MysqlDB._pool = pool

    def __init__(self):
        pass

    def _get_connection(self):
        return pymysql.connect(db=MYSQL_DBNAME,
                               user=MYSQL_USER,
                               password=MYSQL_PASSWORD,
                               host=MYSQL_HOST,
                               port=int(MYSQL_PORT))

    def _recycle_connection(self, connection):
        connection.cursor().close()
        connection.close()

    def close_connection(self):
        pass

    def execute_query(self, sql):
        conn = self._get_connection()
        cursor = conn.cursor()
        df = None
        try:
            cursor.execute(sql)
            meta = cursor.fetchall()
            columns = [desc[0] for desc in cursor.description]
            df = pd.DataFrame(meta, columns=columns)
        except Exception as error:
            logger.mysql(logger.ERROR, f"error while executing sql: {error}")
        finally:
            cursor.close()
            conn.close()
        return df

    def execute_update(self, sql):
        conn = self._get_connection()
        cursor = conn.cursor()
        logger.mysql(logger.INFO, "going to execute sql -> " + sql)
        try:
            cursor.execute(sql)
            conn.commit()
        except Exception as error:
            logger.mysql(logger.ERROR, f"error while executing sql: {error}")
        finally:
            cursor.close()
            conn.close()


    def save_dataframe(self, table_name, df):
        connect_url = f'mysql+pymysql://{MYSQL_USER}:{urlquote(MYSQL_PASSWORD)}@{MYSQL_HOST}:{MYSQL_PORT}/{MYSQL_DBNAME}?charset=utf8'
        mysql_connect = create_engine(connect_url)
        try:
            pd.io.sql.to_sql(df, table_name, mysql_connect, if_exists="replace")
            logger.mysql(logger.INFO, f"uploaded table: {table_name}")
        except Exception as error:
            logger.mysql(logger.ERROR, f"error while saving dataframe into mysql: {error}")


    def query_by_model_id(self, model_id):
        return self.execute_query("select * from model where id = '{}'".format(model_id)).iloc[-1]

    def get_task_data_json(self, task_id):
        series = self.execute_query("select * from task where id = '{}'".format(task_id)).iloc[-1]
        data_json = json.loads(series["data_json"])
        return data_json

    def get_progress_id(self, model_id):
        sql = "select progress_id from model where id={} ".format(model_id)
        df = self.execute_query(sql)
        return df[0][0]

    def update_record(self, model_id, record, table):
        update = ["{}=\"{}\"".format(k, v) for k, v in record.items()]
        update = ",".join(update)
        sql = "update {} set {} where id={} ".format(table, update, model_id)
        self.execute_update(sql)

    def update_task(self, data_json, task_id):
        data = py_to_java(str(data_json))
        data = "'" + data + "'"
        sql = "update task set data_json={} where id={} ".format(data, task_id)
        self.execute_update(sql)
