#!/bin/env python3

import os
import re
from urllib import parse
import argparse
import pandas as pd
import psycopg2
import psycopg2.extras
import pendulum
import concurrent.futures

def parse_time(string):
    match = re.match(r'(\d{4}\d{2}\d{2})(\d{2})?(\d{2})?', string)
    if match.group(1) and match.group(2) and match.group(3):
        return pendulum.from_format(string, 'YYYYMMDDHHmm')
    if match.group(1) and match.group(2):
        return pendulum.from_format(string, 'YYYYMMDDHH')
    elif match.group(1):
        return pendulum.from_format(string, 'YYYYMMDD')

def check_table(engine):
    # check table is exists
    table_name = "tb_n_sound_{}".format(args.time.format('YYYYMM'))
    #if not sa.inspect(engine).has_table(engine, table_name):
    #    metadata = MetaData(engine)
    #    # Create a table with the appropriate Columns
    #    Table(table_name, metadata,
    #      Column('id', Integer, primary_key=True, autoincrement=True),
    #      Column('time', Date), Column('stnid', String),
    #      Column('lon', Float), Column('lat', Float),)
    #    # Implement the creation
    #    metadata.create_all()

def write_pg(df_stnid, tbl_name, tbl_data_name):
    df_stnid = df_stnid.reset_index()

    # get id
    # print("SELECT * FROM {} WHERE time={} and stnid='{}'".format(tbl_name,df_stnid.loc[0,'time'],str(df_stnid.loc[0,'stnid'])))
    cur.execute("SELECT * FROM {} WHERE time={} and stnid='{}'".format(tbl_name,df_stnid.loc[0,'time'],str(df_stnid.loc[0,'stnid'])))
    row = cur.fetchone()
    if row is None:
        print(f"INSERT INTO {tbl_name} (time,stnid,lon,lat) VALUES ({df_stnid.loc[0,'time']},'{df_stnid.loc[0,'stnid']}',{df_stnid.loc[0,'lon']},{df_stnid.loc[0,'lat']})")
        cur.execute(f"INSERT INTO {tbl_name} (time,stnid,lon,lat) VALUES ({df_stnid.loc[0,'time']},'{df_stnid.loc[0,'stnid']}',{df_stnid.loc[0,'lon']},{df_stnid.loc[0,'lat']})")
        # print("SELECT * FROM {} WHERE time={} and stnid='{}'".format(tbl_name,df_stnid.loc[0,'time'],str(df_stnid.loc[0,'stnid'])))
        cur.execute("SELECT * FROM {} WHERE time={} and stnid='{}'".format(tbl_name,df_stnid.loc[0,'time'],str(df_stnid.loc[0,'stnid'])))
        row = cur.fetchone()
    if row is None:
        print(f'[Warning]: failed {df_stnid.loc[0,"stnid"]}')
        return
    verdor_id = row[0]
    # delete old data
    #print(f"DELETE FROM {tbl_data_name} WHERE id={verdor_id}")
    #cur.execute(f"DELETE FROM {tbl_data_name} WHERE id={verdor_id}")
    # intert new data
    print("INSERT data")
    miss = 0
    tuples = [(verdor_id,row['var'].lower(),miss,str(row['pres']),str(row['obs']),str(row['inv']),miss,miss,str(row['inc']),row['time']) for index, row in df_stnid.iterrows()]
    query  = "INSERT INTO %s (id,var,lev,pres,obs,inv,qc,error,inc,time) VALUES %%s" % (tbl_data_name)
    psycopg2.extras.execute_values(cur, query, tuples)

    conn.commit()

def run(df_type, tbl_type):
    '''
    0            1      2       3    4      5     6     7     8     9
    'obs_type','var','stnid','lon','lat','pres','inv','obs','inc','time'
    '''
    
    tbl_name = "tb_n_{}_{}".format(tbl_type, args.time.format('YYYYMM'))
    tbl_data_name = "tb_n_{}data_{}".format(tbl_type, args.time.format('YYYYMM'))
    
    df_type = df_type.reset_index()
    # delte this time
    cur.execute("DELETE FROM {} WHERE time={}".format(tbl_name,df_type.loc[0,'time']))
    cur.execute("DELETE FROM {} WHERE time={}".format(tbl_data_name,df_type.loc[0,'time']))
    # grouped station id
    grouped_df = df_type.groupby('stnid')
    print(f'[Notice]: {tbl_type} station number: {len(grouped_df)}')
    #for name,group in grouped_df:
    #    write_pg(group, tbl_name, tbl_data_name)
    with concurrent.futures.ProcessPoolExecutor(max_workers=1) as executor:
        future_parse = {executor.submit(write_pg, group, tbl_name, tbl_data_name): group for name,group in grouped_df}
        for future in concurrent.futures.as_completed(future_parse):
            future.result()
 
if __name__ == '__main__':
    '''
    write efso impact to pg database
    '''
    parser = argparse.ArgumentParser(description='write efso result to pg database...')
    parser.add_argument('-t', '--time', help='file time (YYYYMMDDHH[MM]).', type=parse_time, required=True)
    parser.add_argument('-i', '--input', help='EFSO result file.', required=True)
    parser.add_argument('-ip', help='Database ip address.', required=True)
    args = parser.parse_args()

    # Read impact file
    type_dic = {'RAOB':'sound', 'AMDAR':'airep','PROFILER':'profiler'}
    col_name = ['obs_type','var','stnid','lon','lat','pres','inv','obs','inc']
    df = pd.read_csv(args.input, names=col_name, delim_whitespace=True)
    if len(df) < 1:
        print(f'[Error]: Empty file {args.input}!')
        exit(1)
    df['time'] = args.time.format('YYYYMMDDHH')
    df = df[ (df['pres'] >100) & (df['pres'] <= 1000)]
    df['pres'] = df['pres'] * 100
    # Open Database
    conn   =  psycopg2.connect(host=args.ip,database="efso",user="fso",password="fSO@2017")
    cur    =  conn.cursor()
    # engine          = create_engine('postgresql+psycopg2://fso:%s@10.36.8.200/efso' % parse.unquote_plus('fSO@2017'), pool_recycle=3600)
    # db_connect      = engine.connect()

    for obs_type in df.drop_duplicates(subset=['obs_type'])['obs_type']:
        print(obs_type)
        df_type = df.query(f'obs_type == "{obs_type}"')
        run(df_type, type_dic[obs_type])
    cur.close()
    conn.close()
