import json
import boto3
import requests


def lambda_handler(event, context):
    """
    This lambda function is to monitor the Spider based on the DynamoDB status.
    1. every 5 minutes, this lambda is triggered
    2. check the spiderList of table SpiderProcessQueue in DynamoDB
    3. update the status with completed if the spider is done
    4. if all the status is not completed, looping this lambda function every 5 minutes.
    5. if all the status is completed, trigger the AWS Glue or EMR.
    :param event:
    :param context:
    :return:
    """
    print(event)
    spider_group_jobs = get_spider_group_jobs()
    all_spiders_completed = True
    for spider_group_job in spider_group_jobs:
        spider_group_job_json = {}   # create a json updated in DynamoDB
        job_id = spider_group_job['jobId']
        spider_type = spider_group_job['type']
        spider_list = spider_group_job['spiderList']
        spider_group_job_json['jobId'] = job_id
        spider_group_job_json['type'] = spider_type

        for spider in spider_list:
            spider_job_json = {}
            spider_code = spider['spiderCode']
            status = spider['status']
            sqs_queue_name = spider['sqsQueueName']
            spider_job_json['jobId'] = job_id
            spider_job_json['spiderCode'] = spider_code
            spider_job_json['sqsQueueName'] = sqs_queue_name

            if str(status).lower() == "in progress":
                is_completed = True if check_sqs_empty(sqs_queue_name) else False
                if is_completed:
                    # update the status in the DynamoDB
                    spider_job_json['status'] = 'Completed'
                    update_spider_group_job(json.dumps(spider_job_json))

                    # log the updated status in MongoDB
                    if spider_type == 'AMZ_Core':
                        payload = """
                        {
                            jobId: '%s',
                            currentStep: 'Data',
                            spiderStep:{
                              spiderCode: '%s',
                              status:'Succeeded'
                            },
                            dataStep:{
                              status: 'In Progress'
                            }
                        }""" % (job_id, spider_code)
                        log_update_spider(payload)
                else:
                    all_spiders_completed = False
            elif str(status).lower() == "not started":
                all_spiders_completed = False

        if all_spiders_completed:
            # delete job in DynamoDB
            print('before delete_spider_group_job ')
            delete_spider_group_job(job_id)
            print('after delete_spider_group_job ')

            payload = ""
            if spider_type == "AMZ_BrowseTree":
                payload = "{\"jobId\": \"%s\",\"spiderStep\":{\"spiderCode\": \"AMZ_BrowseTree\",\"status\":\"Completed\" }} " % job_id
                print('spider_type:', spider_type, 'payload:', str(payload))

            if spider_type == 'AMZ_Core':
                # update the log in MongoDB
                payload = "{\"jobId\": \"%s\", \"currentStep\":\"Data\", \"jobStatus\":{\"status\":  \"Succeeded\"},\"spiderStep\":{\"spiderCode\": \"BestSeller\", \"status\":\"Succeeded\"},\"dataStep\":{\"status\": \"In Progress\"}}" % job_id
                # trigger AWS Glue
                trigger_aws_glue(job_id, "")
            print('before log_update_spider ')
            log_update_spider(payload)
            print('before log_update_spider ')
    return {
        "statusCode": 200,
        "body": json.dumps({
            "message": "success",
        }),
    }


def trigger_aws_glue(job_id, last_job_id):
    """
    Send a event to EventBridge in order to invoke the EMR
    :param job_id: pass batchId from the SQS
    :param job_id: pass previous Job Id from the SQS
    :return:
    """
    print('Start to send eventBridge message')
    entries = [
        {
            "DetailType": "Spider State-change Notification",
            "Source": "pt.spider",
            "Detail": "{\"state\":\"COMPLETED\",\"metadata\":{\"SpiderName\":\"CoreProductsSpider\"},\"payload\":{\"tt\":\"AllCoreProducts\",\"batchId\": \"%s\", \"lastBatchId\": \"%s\"}}" % (job_id, last_job_id)
        }
    ]
    print("putting success event to EventBus, event detail is {}...".format(entries))
    event_bridge_client = boto3.client('events')
    response = event_bridge_client.put_events(
        Entries=entries
    )
    print("End to send eventBridge message")


def get_spider_group_jobs():
    """
    get group spider job in DynamoDB according to the spider type
    :return: The type and subType with Json format
    """
    # url = f"https://api.amzn-plus.com/prod/analytics/jobs/getSQSJob?jobId={job_id}"
    url = "https://api.amzn-plus.com/prod/analytics/jobs/getAllSQSJobs"
    response = requests.get(url)
    print(response.content)
    return json.loads(response.content)


def update_spider_group_job(spider_group_job):
    """
    update the spider group job in the DynamoDB
    :param spider_group_job:  the json object needs to be updated
    :return:
    """
    url = "https://api.amzn-plus.com/prod/analytics/jobs/updateSQSJob"
    response = requests.put(url, spider_group_job)
    return "success"


def delete_spider_group_job(spider_group_job):
    """
    remove the spider group job in the DynamoDB by group job id
    :param spider_group_job:
    :return:
    """
    url = f"https://api.amzn-plus.com/prod/analytics/jobs/deleteSQSJob?jobId={spider_group_job}"
    response = requests.delete(url)
    print(response)
    return "success"


def log_update_spider(payload):
    """
    log the spider with spider status in MongoDB
    :param job_id:
    :return:
    """
    import http.client

    conn = http.client.HTTPSConnection("api.amzn-plus.com")

    headers = {
        'Content-Type': 'text/plain'
    }
    conn.request("PUT", "/prod/analytics/jobs/updateJobLog", payload, headers)
    res = conn.getresponse()
    data = res.read()
    print(data.decode("utf-8"))


def check_sqs_empty(queue_name):
    """
        Check the property ApproximateNumberOfMessagesDelayed and ApproximateNumberOfMessages in the SQS to judge whether
        the SQS is empty or not.
        :param queue_name:
        :return: True: empty = completed; False: not emtpy = not completed
    """
    # Get the service resource
    sqs = boto3.resource('sqs')

    # Get the queue
    queue = sqs.get_queue_by_name(QueueName=queue_name)

    sqs_client = boto3.client('sqs')
    response = sqs_client.get_queue_attributes(QueueUrl=queue.url,
                                               AttributeNames=['ApproximateNumberOfMessages',
                                                               'ApproximateNumberOfMessagesDelayed'])
    if response and response["ResponseMetadata"]["HTTPStatusCode"] == 200:
        approximate_number_of_messages = response.get("Attributes", dict()).get("ApproximateNumberOfMessages", None)
        approximate_number_of_messages_delayed = response.get("Attributes", dict()) \
            .get("ApproximateNumberOfMessagesDelayed", None)

        print(f"The approximate_number_of_messages of {queue_name} is {approximate_number_of_messages}.")
        print(f"The approximate_number_of_messages_delayed of {queue_name} is {approximate_number_of_messages_delayed}.")

    # Process messages by printing out body and optional author name
    available_msgs = int(approximate_number_of_messages) + int(approximate_number_of_messages_delayed)

    # for message in queue.receive_messages(AttributeNames=['All']):
    #     if message.body is not None:
    #         job_id = json.loads(message.body)['batchId']
    #     print(f'jobId, {job_id}')
    # sqs_client.purge_queue(QueueUrl=queue_name)

    if available_msgs == 0:
        return True
    else:
        return False


if __name__ == "__main__":
    lambda_handler('','')

    # # Test Spider Monitor lambda section
    # spider_group_job = """
    # {
    #     "jobId": "2020-09-11",
    #     "type": "AMZ_Core",
    #     "spiderList": [
    #         {
    #             "spiderCode": "BestSeller",
    #             "isCompleted": false,
    #             "sqsQueueName": "pt-spider-serverless-CoreProductsBestSellers-monitor-queue"
    #         },
    #         {
    #             "spiderCode": "MostWishedFor",
    #             "isCompleted": false,
    #             "sqsQueueName": "pt-spider-serverless-CoreProductsMostWishedFor-monitor-queue"
    #         },
    #         {
    #             "spiderCode": "NewRelease",
    #             "isCompleted": false,
    #             "sqsQueueName": "pt-spider-serverless-CoreProductsNewReleases-monitor-queue"
    #         },
    #         {
    #             "spiderCode": "MoversNShakers",
    #             "isCompleted": false,
    #             "sqsQueueName": "pt-spider-serverless-CoreProductsMoversAndShakers-monitor-queue"
    #         },
    #         {
    #             "spiderCode": "GiftIdea",
    #             "isCompleted": false,
    #             "sqsQueueName": "pt-spider-serverless-CoreProductsGiftIdeas-monitor-queue"
    #         }
    #     ]
    # }"""
    # log_spider_completed("20210918")
    # lambda_handler("", "")
    # # Test Spider Monitor lambda section

    # # Test trigger AWS Glue section
    # trigger_aws_glue("2021-10-25", "")
    # # Test trigger AWS Glue section
