import os, sys, json
import os
import base64
from io import BytesIO
from PIL import Image
from sqlalchemy import create_engine
from sqlalchemy import text
from typing import List, Tuple, Dict, Any, Union, Optional
# from database_init import get_database_connection, initialize_database
from database_schema import DatabaseSchema
from config import DATABASES
import configparser
from config import DATASET_DIR
from collections import Counter
from langchain.text_splitter import RecursiveCharacterTextSplitter

def get_database_connection(
        database_name: str,
    ):
    """ Get the database connection from the database name.
    @param:
        database_name: str, database name
    @return:
        database connection
    数据库连接
    """
    config = configparser.ConfigParser()
    config.read('db_config.ini')

    DB_NAME = database_name
    HOST = config['postgresql']['HOST']
    PORT = config['postgresql']['PORT']
    USER_NAME = config['postgresql']['USER_NAME']
    PASSWD = config['postgresql']['PASSWD']
    schema = 'perceptual_model'

    # Connect to database
    conn_string = f'postgresql+psycopg2://{USER_NAME}:{PASSWD}@{HOST}:{PORT}/{DB_NAME}'
    db = create_engine(conn_string, client_encoding='utf8')
    try:
        db_auto = db.execution_options(isolation_level="AUTOCOMMIT")
        # This is added from SQL alchemy v2.0
        # You have to COMMIT to put the results into database, and this options allows it
        # See https://docs.sqlalchemy.org/en/20/core/connections.html
        conn = db_auto.connect()
        print("connection to '%s'@'%s' success!" % (DB_NAME, HOST))
    except Exception as e:
        print("connection to '%s'@'%s' failed." % (DB_NAME, HOST))
        print(e)

    return conn

class DatabasePopulation():
    """ Populate the database and vectorstore with real data."""
    def __init__(self,
                 database: Optional[str] = None,
                 database_path: Optional[str] = None,
                 connect_to_db: bool = True,
                 from_scratch: bool = False
        ) -> None:
        """ Initialize the database/vectorstore population object.
        """
        super(DatabasePopulation, self).__init__()
        assert database is not None, "Database must be provided."
        self.database = database
        self.database_schema: DatabaseSchema = DatabaseSchema(self.database) if connect_to_db else None
        self.database_conn = get_database_connection(self.database) if connect_to_db else None
        if from_scratch:
            if connect_to_db:
                initialize_database(self.database_conn, self.database_schema)

    def close(self):
        """ Close the opened DB connnection for safety."""
        if self.database_conn is not None and hasattr(self.database_conn, 'close'):
            self.database_conn.close()

    def get_database_connection(
        database_name: str,
    ):
        config = configparser.ConfigParser()
        config.read('db_config.ini')

        DB_NAME = database_name
        HOST = config['postgresql']['HOST']
        PORT = config['postgresql']['PORT']
        USER_NAME = config['postgresql']['USER_NAME']
        PASSWD = config['postgresql']['PASSWD']

        # Connect to database
        conn_string = f'postgresql+psycopg2://{USER_NAME}:{PASSWD}@{HOST}:{PORT}/{DB_NAME}'
        db = create_engine(conn_string, client_encoding='utf8')
        try:
            db_auto = db.execution_options(isolation_level="AUTOCOMMIT")
            # This is added from SQL alchemy v2.0
            # You have to COMMIT to put the results into database, and this options allows it
            # See https://docs.sqlalchemy.org/en/20/core/connections.html
            conn = db_auto.connect()
            print("connection to '%s'@'%s' success!" % (DB_NAME, HOST))
        except Exception as e:
            print("connection to '%s'@'%s' failed." % (DB_NAME, HOST))
            print(e)

        return conn

    def populate(self,
            on_conflict: bool = 'ignore',
            verbose: bool = False
        ):
        """ Populate the database with data according to the config."""
        root = os.path.join(DATASET_DIR, self.database,"tables_image")
        for table in self.database_schema.database_schema["database_schema"]:
            table_name = table['table']['table_name']
            columns = [col['column_name'] for col in table['columns']] 
            if table_name == 'tables':
                values = []
                json_path = os.path.join(DATASET_DIR, self.database, f"{self.database}_tables.json")
                with open(json_path, 'r', encoding="utf-8") as f:
                    schema = json.load(f)
                for i, k in enumerate(schema):
                    hits = []
                    for dirpath, _, files in os.walk(root):
                        for f in files:
                            if k['table_title'].replace("/","") in f.lower() and f.lower().endswith((".png", ".jpg", ".jpeg")):
                                hits.append(os.path.join(dirpath, f))

                    images = []
                    for hit in hits:
                        with open(hit, "rb") as f:
                            img_bytes = f.read()
                            img_base64 = base64.b64encode(img_bytes).decode("utf-8")
                            images.append(img_base64)
                    values.append([i, k["table_content"], k['table_title'], k['table_description'], k['table_use'], images])
                print(values)
                insert_sql = self.get_insert_sql(
                    values=values,
                    table_name=table_name,
                    columns=columns,
                    on_conflict=on_conflict
                )
                print(insert_sql)
                self.insert_values_to_database(insert_sql= insert_sql, table_name=table_name, values = values, verbose=verbose)
        return

  
    def _validate_insert_sql_arguments(self, table_name: str, column_names: List[str], values: List[List[Any]]) -> None:
        """ Validate the arguments.
        """
        assert table_name in self.database_schema.tables, f"Table {table_name} not found in the database schema of {self.database}."
        assert len(column_names) == len(values[0]), f"Column names and values must have the same length, but got {len(column_names)} columns and {len(values[0])} values."
        columns = self.database_schema.table2column(table_name)
        assert all([col in columns for col in column_names]), f"Column names must be in the table {table_name}, but got {column_names}."
        return
    

    def get_insert_sql(
            self,
            values: List[List[Any]],
            table_name: str,
            columns: List[str] = [],
            on_conflict: str = 'ignore'
        ) -> str:
        """ Generate the SQL insert statement for the given values and table."""
        """ Given the table name, columns and values, return the INSERT INTO SQL statement.
        @param:
            `values`: List[List[Any]], values, num_rows x num_columns, please use 2-dim List even with a single value, required
            `table_name`: str, table name, which table to insert, required
            `columns`: List[str], column names, optional, if not provided, insert all columns of the current table in the database
            `on_conflict`: str, ON CONFLICT clause when primary key conflicts occurred, optional, default to 'ignore', chosen from 'raise', 'ignore', 'replace'. Please refer to DuckDB doc "https://duckdb.org/docs/sql/statements/insert#on-conflict-clause" for details about ON CONFLICT
        @return:
            ```sql
                INSERT [OR REPLACE/IGNORE] INTO table_name (column1, column2, ...)
                VALUES
                    (value1, value2, ...),
                    (value1, value2, ...),
                    ...
                ;
            ```
        """
        assert on_conflict in ['raise', 'ignore', 'replace'], f"on_conflict argument must be chosen from 'raise', 'ignore', 'replace', but got {on_conflict}."
        assert table_name in self.database_schema.tables, f"Table {table_name} not found in the database schema of {self.database}."

        if not columns:
            columns = self.database_schema.table2column(table_name)
        self._validate_insert_sql_arguments(table_name, columns, values)
        pk = self.database_schema.get_primary_keys(table_name)
        # note that, the insertion of values must strictly follow the order of the columns
        if on_conflict == 'ignore':
            conflict_str = f"ON CONFLICT ({pk[0]}) DO NOTHING"
        elif on_conflict == 'replace':
            conflict_str = f"ON CONFLICT ({pk[0]}) DO NOTHING"
            
        column_str = ', '.join(columns)
        column_str = ', '.join(columns)
        value_str = ', '.join(f':{col}' for col in columns)
        insert_sql = f"INSERT INTO {table_name} ({column_str})\nVALUES\n({value_str})\n{conflict_str};"
        return insert_sql

    
    def insert_values_to_database(self, insert_sql: str, table_name: str, values: List[List[Any]], verbose: bool = False) -> None:
        """ Insert parsed cell values into the database.
        """
        columns = self.database_schema.table2column(table_name)
        data_dicts = [dict(zip(columns, row)) for row in values]
        print(data_dicts)
        try:
            # values = self.truncate_extremely_long_text_values(values)
            self.database_conn.execute(text(insert_sql), data_dicts)
        except Exception as e:
            print(f"Error inserting values into database {self.database}: {e}")
        return
    
if __name__ == "__main__":
    data = DatabasePopulation(database="DHF_reservoir", from_scratch=True)
    data.populate(on_conflict='ignore', verbose=True)