import json
import logging
import time
import boto3
import http.client

log = logging.getLogger(__name__)


def send_event_bridge_msg(msg_detail):
    """
    Send a event to EventBridge in order to trigger the lambda function
    :param msg_detail:  pass from the SQS
    :return:
    """
    print('start to send message to Event Bridge')
    entries = [
        {
            "DetailType": "Spider State-change Notification",
            "Source": "pt.spider",
            "Detail": msg_detail
        }
    ]
    print('putting success event to EventBus, event detail is {}...'.format(entries))
    event_bridge_client = boto3.client('events')
    response = event_bridge_client.put_events(
        Entries=entries
    )
    print('end to send message to Event Bridge')


def trigger_spiders(type):
    """
    trigger different types of spiders
    :param type: types of spider such as AWS_Core
    :return:
    """
    if type == "AWS_Core":
        spider_sub_types = [
                            'CoreProductsBestSellers',
                            'CoreProductsMostWishedFor',
                            'CoreProductsMoversAndShakers',
                            'CoreProductsGiftIdeas',
                            'CoreProductsNewReleases'
                            ]

        for sub_type in spider_sub_types:
            detail = {"state": "STARTED",
                      "metadata": {
                          "SpiderName": "CoreProductsSpider",
                          "InitiatorName": "AllCoreProducts"
                      },
                      "payload": {
                          "tt": f"{sub_type}"
                      }
                      }
            send_event_bridge_msg(json.dumps(detail))


def create_job_log(payload):
    """
    create job log in the mongoDB
    the collection is amzProductAnalyticsJob
    :param payload:
    :return:
    """
    conn = http.client.HTTPSConnection("api.amzn-plus.com")
    headers = {
        'Content-Type': 'text/plain'
    }
    conn.request("POST", "/prod/analytics/jobs/createJobLog", payload, headers)
    res = conn.getresponse()
    data = res.read()
    print(data.decode("utf-8"))


def register_group_spider_job(job_id):
    """
    register the group spider job into DynamoDB
    :param job_id: current datetime
    :return:
    """
    conn = http.client.HTTPSConnection("api.amzn-plus.com")
    payload = """
    {
        "jobId": "%s",
        "type": "AMZ_Core"
    }
    """ % job_id
    headers = {
        'Content-Type': 'text/plain'
    }
    conn.request("POST", "/prod/analytics/jobs/registerSQSJob", payload, headers)
    res = conn.getresponse()
    data = res.read()
    print(data.decode("utf-8"))


def update_spider_job_status(job_id, spider_type):
    """
    update the spider status in the DynamoDB
    :param job_id:
    :param spider_type:
    :return:
    """
    conn = http.client.HTTPSConnection("api.amzn-plus.com")
    payload = "{\"jobId\":\"%s\",\"spiderCode\":\"%s\",\"status\":\"In progress\"}" % (job_id, spider_type)
    headers = {
        'Content-Type': 'text/plain'
    }
    conn.request("PUT", "/prod/analytics/jobs/updateSQSJob", payload, headers)
    res = conn.getresponse()
    data = res.read()
    print(data.decode("utf-8"))


def lambda_handler(event, context):
    print("event:", json.dumps(event))

    current_date = time.strftime('%Y-%m-%d', time.localtime(time.time()))
    create_job_log("{\"jobId\": \"%s\", \"jobCode\": \"AMZ_Core\"}" % current_date)
    print("Job log is created:", current_date)

    register_group_spider_job(current_date)
    print("Group spider job is registered:", current_date)

    trigger_spiders("AWS_Core")
    print("End to trigger Core products spider")

    return "success"


if __name__ == "__main__":
    lambda_handler("", "")
    # update_spider_job_status("2020-09-24", "BestSeller")
