import re
import time
import boto3

from framework.common.pt_logger import logger

s3_resource = boto3.resource('s3')
athena = boto3.client('athena')
glue = boto3.client('glue')


def fregex(pattern, text, index=0):
    matcher = re.search(pattern, text, flags=re.IGNORECASE)
    if matcher:
        return matcher.group(index)
    else:
        return ''


def convert_to_list(athena_result, include_header=True):
    columns = athena_result["ResultSetMetadata"]["ColumnInfo"]
    rows = athena_result["Rows"]
    table_list = []
    for idx, r in enumerate(rows):
        # if include_header and idx == 0:
        row_list = []
        for d in r["Data"]:
            for k, v in d.items():
                row_list.append(v)
        table_list.append(row_list)
    return table_list


class QueryExecutor(object):
    def __init__(self, temp_output_s3_path="s3://testjimmy/datalake/query_result/", interval=3):
        self.temp_output_s3_path = temp_output_s3_path
        self.interval = interval
        self.queryExecutionId = None

    def cats_query(self, sql_statement, s3_path):
        timestamp = str(time.time()).replace(".", "_")
        # s3_path = f"s3://testjimmy/datalake/cats_result/{timestamp}"
        cats_query_statement = f"""
CREATE TABLE output_{timestamp}
WITH(
format='JSON',
external_location='{s3_path}') AS {sql_statement}
"""
        # cats_query_statement = f"CREATE TABLE output_{timestamp} WITH (format=\'JSON\', external_location='{external_location}') AS {sql_statement}"
        # JimmyMo: before start, we need cleanup the designated s3_path
        logger.info(f"ready to clean s3 folder {s3_path}")
        bucket = fregex(r"s3://(.+?)/(.+)", s3_path, 1)
        key_prefix = fregex(r"s3://(.+?)/(.+)", s3_path, 2)
        bucket = s3_resource.Bucket(bucket)
        bucket.objects.filter(Prefix=key_prefix).delete()
        logger.info("done")

        startQueryExecutionResp = athena.start_query_execution(
            QueryString=cats_query_statement,
            # ClientRequestToken='string',
            # QueryExecutionContext={
            #     'Database': 'test',
            #     'Catalog': 'pt_sample_nested'
            # },
            ResultConfiguration={
                'OutputLocation': self.temp_output_s3_path,
                # 'OutputLocation': 's3://testjimmy/datalake/query_result/',
                # 'EncryptionConfiguration': {
                #     'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
                #     'KmsKey': 'string'
                # }
            },
            # WorkGroup='string'
        )
        self.queryExecutionId = startQueryExecutionResp['QueryExecutionId']
        self.polling()
        logger.info(f"execute successful, please go to {s3_path} check the result")
        logger.info(f"cleaning table output_{timestamp} in database 'default'")
        response = glue.delete_table(DatabaseName="default", Name=f"output_{timestamp}")
        logger.info(f"deleted")
        logger.info("done")
        # bucket = fregex(r"s3://(.+?)/(.+)", s3_path, 1)
        # key = fregex(r"s3://(.+?)/(.+)", s3_path, 2)
        # obj = s3.get_object(Bucket=bucket, Key=key)
        # print(obj)


    def adhoc_query(self, sql_statement):
        startQueryExecutionResp = athena.start_query_execution(
            QueryString=sql_statement,
            # ClientRequestToken='string',
            # QueryExecutionContext={
            #     'Database': 'test',
            #     'Catalog': 'pt_sample_nested'
            # },
            ResultConfiguration={
                'OutputLocation': self.temp_output_s3_path,
                # 'OutputLocation': 's3://testjimmy/datalake/query_result/',
                # 'EncryptionConfiguration': {
                #     'EncryptionOption': 'SSE_S3'|'SSE_KMS'|'CSE_KMS',
                #     'KmsKey': 'string'
                # }
            },
            # WorkGroup='string'
        )
        self.queryExecutionId = startQueryExecutionResp['QueryExecutionId']
        return self.polling()

    def polling(self):
        while True:
            # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/athena.html#Athena.Client.get_query_execution
            # QUEUED indicates that the query has been submitted to the service
            # RUNNING indicates that the query is in execution phase
            # SUCCEEDED indicates that the query completed without errors
            # FAILED indicates that the query experienced an error and did not complete processing
            # CANCELLED indicates that a user input interrupted query execution.
            time.sleep(self.interval)
            getQueryExecutionResp = athena.get_query_execution(
                QueryExecutionId=self.queryExecutionId
            )

            status = getQueryExecutionResp['QueryExecution']['Status']
            logger.info(f"QueryExecution {self.queryExecutionId} stauts is {status['State']}...")
            if status['State'] == "SUCCEEDED":
                break
            elif status['State'] == "FAILED":
                if 'StateChangeReason' in status:
                    logger.critical(f"exception: {status['StateChangeReason']}")
                raise Exception(f"QueryExecution {self.queryExecutionId} has failed")
            elif status['State'] == "CANCELLED":
                raise Exception(f"QueryExecution {self.queryExecutionId} has been cancelled...")

        if status['State'] == 'SUCCEEDED':
            getQueryResultResp = athena.get_query_results(
                QueryExecutionId=self.queryExecutionId,
                # NextToken='string',
                # MaxResults=123
            )
            return getQueryResultResp['ResultSet']
        else:
            raise Exception(f"QueryExecution {self.queryExecutionId} got un-expected execution status {status['State']}")


if __name__ == "__main__":
    # timestamp = time.time()
    # loc = f"s3://testjimmy/datalake/cats_result/{timestamp}"
#     athena_sql = '''
# CREATE TABLE output
# WITH(
# format='JSON',
# external_location='s3://testjimmy/xyz123/') AS SELECT * FROM "coreproducts"."manual_testingttt_2021_07_01" where rawnodeid='12897221'
#     '''
    athena_sql = 'SELECT * FROM "coreproducts"."manual_testingttt_2021_07_01" where rawnodeid=\'12897221\''
    # '''
    timestamp = str(time.time()).replace(".", "_")
    s3_path = f"s3://testjimmy/datalake/cats_result/{timestamp}"
    athenaQuery = QueryExecutor()
    result = athenaQuery.cats_query(athena_sql, s3_path)
    print(result)