import logging
import os
import sys
import pandas as pd
import sqlalchemy
import bz2
import shutil
import configparser
from sqlalchemy import Integer, MetaData, PrimaryKeyConstraint, Table, Column, String, Double, TIMESTAMP, UniqueConstraint, inspect
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.dialects.mysql import Insert
from sqlalchemy.orm import sessionmaker

def unzip_bz2(bz2_file, output_file):
    with bz2.open(bz2_file, 'rb') as f_in:
        with open(output_file, 'wb') as f_out:
            shutil.copyfileobj(f_in, f_out)

def read_fwf(fwf_path, station_id, inst_id):
    # Read FWF file
    # Because P1301 bzip2 has an error in last column name, we ignore all column names
    df = pd.read_fwf(fwf_path, header=None, skiprows=[0])
    # add 3 new columns
    df.insert(0, column='STATION_ID', value=station_id)
    df.insert(1, column='INST_ID', value=inst_id)
    # convert datetime to UTC
    t_ts = pd.to_datetime(df.iloc[:, 2].astype(str) + ' ' + df.iloc[:, 3].astype(str) + ' +08:00', format='%m/%d/%y %H:%M:%S.%f %z', utc=True).apply(lambda x: x.floor('s'))
    df.insert(2, column='TS', value=t_ts)
    return df

def transfer_2401(df, type):
    df_2401 = pd.DataFrame()
    df_2401['STATION_ID'] = df['STATION_ID']
    df_2401['INST_ID'] = df['INST_ID']
    df_2401['TS'] = df['TS']
    df_2401['DATE'] = df[0]
    df_2401['TIME'] = df[1]
    df_2401['FRAC_DAYS_SINCE_JAN1'] = df[2]
    df_2401['FRAC_HRS_SINCE_JAN1'] = None
    df_2401['JULIAN_DAYS'] = None
    df_2401['EPOCH_TIME'] = None
    df_2401['ALARM_STATUS'] = None
    df_2401['INST_STATUS'] = None
    df_2401['CavityPressure'] = df[4]
    df_2401['CavityTemp'] = df[3]
    df_2401['DasTemp'] = df[5]
    df_2401['EtalonTemp'] = df[6]
    df_2401['WarmBoxTemp'] = None
    df_2401['species'] = None
    df_2401['MPVPosition'] = df[11]
    df_2401['OutletValve'] = df[9]
    df_2401['solenoid_valves'] = df[10]
    if type == '1301':
        df_2401['CO'] = None
        df_2401['CO2'] = df[14]
        df_2401['CO2_dry'] = df[15]
        df_2401['CH4'] = df[20]
        df_2401['CH4_dry'] = df[16]
        df_2401['peak84_raw'] = None
        df_2401['b_h2o_pct'] = None
        df_2401['H2O'] = df[17]        
    if type == '1302':
        df_2401['CO'] = df[16]
        df_2401['CO2'] = df[14]
        df_2401['CO2_dry'] = df[15]
        df_2401['CH4'] = None
        df_2401['CH4_dry'] = None
        df_2401['H2O'] = df[20]
        df_2401['b_h2o_pct'] = df[57]
        df_2401['peak84_raw'] = df[54]
    
    df_2401['h2o_reported'] = None
    df_2401['peak_14'] = None

    # do some filter
    filtered_df = df_2401[df_2401['solenoid_valves'].isin([0,2,3])]
    return filtered_df


def create_2401Table():
    metadata = MetaData()

    # 定义表结构
    data_table = Table('rdt_2401', metadata,
                       Column('id', Integer, primary_key=True, autoincrement=True),
                       Column('STATION_ID', String(10)),
                       Column('INST_ID', String(10)),
                       Column('TS', TIMESTAMP(timezone=True)),
                       Column('DATE', String(16)),
                       Column('TIME', String(16)),
                       Column('FRAC_DAYS_SINCE_JAN1', Double),
                       Column('FRAC_HRS_SINCE_JAN1', Double),
                       Column('JULIAN_DAYS', Double),
                       Column('EPOCH_TIME', Double),
                       Column('ALARM_STATUS', Double),
                       Column('INST_STATUS', Double),
                       Column('CavityPressure', Double),
                       Column('CavityTemp', Double),
                       Column('DasTemp', Double),
                       Column('EtalonTemp', Double),
                       Column('WarmBoxTemp', Double),
                       Column('species', Double),
                       Column('MPVPosition', Double),
                       Column('OutletValve', Double),
                       Column('solenoid_valves', Double),
                       Column('CO', Double),
                       Column('CO2', Double),
                       Column('CO2_dry', Double),
                       Column('CH4', Double),
                       Column('CH4_dry', Double),
                       Column('H2O', Double),
                       Column('h2o_reported', Double),
                       Column('b_h2o_pct', Double),
                       Column('peak_14', Double),
                       Column('peak84_raw', Double),
                       UniqueConstraint("STATION_ID", "INST_ID", "TS", name="rdt2401_uix"))
    # 创建表
    metadata.create_all(engine)
    return data_table

def start(file_path):
    file_name = os.path.basename(file_path)
    # parse filename to get station_id, inst_id, gas_type
    station_id = file_name[9:14]
    inst_id = file_name[51:55]
    gas_type = file_name.split('-')[-3]  # 'CO2CH4' or 'CO2CO'
    logger.info(f"get bzip2 info: {station_id}, {inst_id}, {gas_type}")
    try:
        logger.info(f"Starting decompress file:{file_path}")
        output_file = "bz2_unzipped"
        unzip_bz2(file_path, output_file)
        logger.info(f"Decompress file successfully!, output file:{output_file}")
        
        df_fwf = read_fwf(output_file, station_id, inst_id)
        if gas_type == 'CO2CH4':
            t_df = transfer_2401(df_fwf, '1301')
        if gas_type == 'CO2CO':
            t_df = transfer_2401(df_fwf, '1302')
        logger.info(f"convert to 2401 df Done!")
        return t_df
    except Exception as e:
        logger.exception(f'An error occurred: {str(e)}')
        

if __name__ == "__main__":
    config = configparser.ConfigParser()
    config.read('config.ini')
    # Extract log file path
    log_file_path = config['LOGGING']['log_file_path']

    # Configure logging
    logging.basicConfig(filename=log_file_path, level=logging.INFO)
    # Get logger instance
    logger = logging.getLogger(__name__)

    # Extract database connection parameters
    db_engine = config['DATABASE']['engine']
    db_host = config['DATABASE']['host']
    db_port = config['DATABASE']['port']
    db_name = config['DATABASE']['name']
    db_user = config['DATABASE']['user']
    db_password = config['DATABASE']['password']

    # Construct the database URL
    db_url = f'{db_engine}://{db_user}:{db_password}@{db_host}:{db_port}/{db_name}'

    # Create an engine and bind it to the database
    engine = sqlalchemy.create_engine(db_url)
    metadata = MetaData()
    inspector = inspect(engine)
    if (inspector.has_table('rdt_2401')):
        logger.info(f"Table rdt_2401 exists in the database.")
        rdt_table = Table('rdt_2401', metadata, autoload_with=engine)
    else:
        rdt_table = create_2401Table()
  
    ##file_path = './1302/Z_CAWN_I_58448_20221104100000_O_GHG-FLD-CO2CO-CRDS-S024.bz2'
    #file_path = './1301/Z_CAWN_I_58448_20221016130000_O_GHG-FLD-CO2CH4-CRDS-S024.bz2'
    ##df_2401 = start(file_path)
    qdate = sys.argv[1]
    directory = './DataLog_User/'+qdate.replace("-", "/")
    print(directory)
    for filename in os.listdir(directory):
      file_path = os.path.join(directory, filename)
      if (os.path.isfile(file_path)):
        df_2401 = pd.read_fwf(file_path)
        # add 3 new columns
        station_id='58450'
        inst_id='S024'
        df_2401.insert(0, column='STATION_ID', value=station_id)
        df_2401.insert(1, column='INST_ID', value=inst_id)
        # convert datetime to UTC
        #postgres  t_ts = pd.to_datetime(df_2401.iloc[:, 2].astype(str) + ' ' + df_2401.iloc[:, 3].astype(str), format='%Y-%m-%d %H:%M:%S.%f', utc=True).apply(lambda x: x.floor('s'))
        t_ts = pd.to_datetime(df_2401.iloc[:, 2].astype(str) + ' ' + df_2401.iloc[:, 3].astype(str) +'Z') #mariadb
        t_ts = t_ts.dt.tz_convert('Asia/Shanghai').dt.tz_localize(None)
        df_2401.insert(2, column='TS', value=t_ts)
        
        # Create a session
        Session = sessionmaker(bind=engine)
        session = Session()
        
        # Iterate over the DataFrame and upsert each row to the database
        for index, row in df_2401.iterrows():
            # Create a dictionary with column names and values
            data_dict = row.to_dict()
            
            # Create an upsert statement using SQLAlchemy's insert() function
            if db_engine == 'postgresql':
              stmt = insert(rdt_table).values(**data_dict).on_conflict_do_update(constraint='rdt2401_pk', set_=data_dict)
            elif db_engine == 'mysql+pymysql':
              stmt = Insert(rdt_table).values(**data_dict).on_duplicate_key_update(data_dict)
            # Execute the statement
            session.execute(stmt)
        session.commit()

    logger.info("save to db finished.")
