import logging
import os
import sys
import pandas as pd
import sqlalchemy
import shutil
import configparser
from sqlalchemy import Integer, MetaData, PrimaryKeyConstraint, Table, Column, String, Double, TIMESTAMP, UniqueConstraint, inspect
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.dialects.mysql import Insert
from sqlalchemy.orm import sessionmaker

if __name__ == "__main__":
    config = configparser.ConfigParser()
    config.read('config.ini')
    # Extract log file path
    log_file_path = config['LOGGING']['log_file_path']

    # Configure logging
    logging.basicConfig(filename=log_file_path, level=logging.INFO)
    # Get logger instance
    logger = logging.getLogger(__name__)

    # Extract database connection parameters
    db_engine = config['DATABASE']['engine']
    db_host = config['DATABASE']['host']
    db_port = config['DATABASE']['port']
    db_name = config['DATABASE']['name']
    db_user = config['DATABASE']['user']
    db_password = config['DATABASE']['password']

    # Construct the database URL
    db_url = f'{db_engine}://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}'
    # Create an engine and bind it to the database
    engine = sqlalchemy.create_engine(db_url)
    metadata = MetaData()
    five_ghg_table = Table('five_ghg_table3', metadata, autoload_with=engine)

    qdate = sys.argv[1]
    directory = './DataLog_User/'+qdate.replace("-", "/")
    print(directory)
    for filename in os.listdir(directory):
      file_path = os.path.join(directory, filename)
      if (os.path.isfile(file_path)):
        df_2401 = pd.read_fwf(file_path)
        # Convert UTC to 'Asia/Shanghai' time zone
        df_2401['DATE'] = pd.to_datetime(df_2401['EPOCH_TIME'], unit='s', utc=True).dt.tz_convert('Asia/Shanghai').dt.tz_localize(None)
        df_2401 = df_2401.rename(columns={'CO2_dry':'CO2_CORR', 'CH4_dry':'CH4_CORR', 'MPVPosition':'MULTI_POS_VALVE',
                                          'solenoid_valves':'SOLENOID_VALVES'})
        # Add New column 
        df_2401['THIS_VALUE'] = df_2401['SOLENOID_VALVES']
        # Set time series index
        df_2401['timestamp'] = pd.to_datetime(df_2401['EPOCH_TIME'], unit='s', utc=True)
        print(df_2401)
        print("+++++++++")
        df_2401 = df_2401.set_index('timestamp')
        # Resample and average for 5-minute intervals
        numeric_result = df_2401[["CO","CO2","CO2_CORR","CH4","CH4_CORR","H2O"]].resample("5T").apply(lambda x: x[-2:].mean())
        # Process non-numeric (text) columns, keeping the first value in each interval
        non_numeric_result = df_2401[["DATE","MULTI_POS_VALVE","SOLENOID_VALVES","THIS_VALUE"]].resample("5T").last()
        
        # Combine results   
        result = pd.concat([numeric_result, non_numeric_result], axis=1)
        # Append the station_code
        result['STATION_CODE'] = '58450'
        print(result)
        print("-----")
        # Create a session
        Session = sessionmaker(bind=engine)
        session = Session()
        
        # Iterate over the DataFrame and upsert each row to the database
        for index, row in result.iterrows():
            # Create a dictionary with column names and values
            data_dict = row.to_dict()
            
            stmt = Insert(five_ghg_table).values(**data_dict).on_duplicate_key_update(data_dict)
            # Execute the statement
            session.execute(stmt)
        session.commit()

    logger.info("save to five db finished.")
