import itertools

import json

from framework.common.pt_logger import logger
from framework.middlewares.proxy_scraperapi_middleware import ProxyScraperApiMiddleware
from framework.middlewares.random_header_middleware import RandomHeaderMiddleware
from framework.middlewares.random_sleep_middleware import RandomSleepMiddleware
from framework.output.s3_output import S3Output
from framework.output.sqs_output import SQSOutput
from framework.request import Request
from framework.retryable_request import RetryableRequest
from framework.core.crawler_task import Task
from framework.core.sqs_task_pool import SQSTaskPool
from framework.serverless_spider import ServerlessSpider
from jobs.amazon.spiders.coreProducts_spider.coreProducts_parser import CoreProductsParser
from jobs.amazon.spiders.coreProducts_spider.departments_map import spider_type_map, departments_keywords_map
from jobs.amazon.util import sqs_util

spider_name = "CoreProductsBestSellers"
# batch_id = "20210422"
s3_bucket = "v2-data-production"
root_url = 'https://www.amazon.com/'

def setup_spider(task_pool, batch_id, task_type):
    request = RetryableRequest(Request()) \
        .middleware(RandomHeaderMiddleware()) \
        .middleware(ProxyScraperApiMiddleware()) \
        .middleware(RandomSleepMiddleware())  # JimmyMo: serverless spider不需要自己控制RandomSleep了，可以依靠lambda本身的能力来实现

    # request = RetryableRequest(Request())\
    #     .middleware(RandomHeaderMiddleware())\
    #     .middleware(RandomSleepMiddleware())

    def get_output_prefix(current_task, result):
        return current_task.metadata["tt"] + "/file"

    def get_asin_candidates_prefix(current_task, results):
        return current_task.metadata["tt"] + "/asin_candidates"

    def get_asin_candidates_output(current_task, results):
        return json.dumps(list(itertools.chain.from_iterable([["%s,%s,%s,%s" % (item["asin"], item["detail_url"], item["review_url"], item["nodeId"]) for item in
                     result.items.get("products")] for result in results])))

    # def get_asin_spider_info(current_task, result):
    #     return ["%s,%s,%s" % (item["asin"], item["detail_url"], item["review_url"]) for item in result.items.get("products")]

    def get_leaf_node_notification(current_task, result):
        return json.dumps({"batchId": result.metadata.get("batchId")})

    def is_leaf_node(current_task, result):
        return result.metadata.get("tp") == "last_page"

    monitor_queue_name = "pt-spider-serverless-%s-monitor-queue" % task_type
    # asin_queue_name = "pt-spider-serverless-ASIN-queue"

    outputs = [S3Output(s3_bucket=s3_bucket, s3_folder="%s/%s" % ("SpiderOutput/core-products", batch_id), get_prefix_func=get_output_prefix, compress=True),
               # SQSOutput(queue_name=asin_queue_name, get_queue_message=get_asin_spider_info),
               S3Output(s3_bucket=s3_bucket, s3_folder="%s/%s" % ("SpiderOutput/asin-candidates", batch_id), get_output_content=get_asin_candidates_output, get_prefix_func=get_asin_candidates_prefix, compress=False),
               SQSOutput(queue_name=monitor_queue_name, get_queue_message=get_leaf_node_notification, condition=is_leaf_node)]
    parsers_map = {
        "default": CoreProductsParser(),
    }
    spider = ServerlessSpider(request, task_pool, parsers_map, outputs)
    return spider


def setup_task_pool(name):
    pool_name = "pt-spider-serverless-%s-queue" % name
    task_pool = SQSTaskPool(name=pool_name)
    sqs_util.create_if_not_existed(queue_name=pool_name)
    return task_pool


def start_worker(task_json):
    batch_id = task_json["metadata"]["batchId"]
    # message_body = json.loads(payload.get("messageBodyText"))
    task = Task.from_json(task_json)
    task_type = task.metadata["tt"]
    task_pool = setup_task_pool(task_type)
    spider = setup_spider(task_pool, batch_id=batch_id, task_type=task_type)
    spider.listen_and_start(task)


if __name__ == "__main__":
    # # Test for Spider Initiator section
    metadata = {'time': '2021-10-15T12:39:33Z'}
    payload = {'tt': 'CoreProductsGiftIdeas'}
    start_initiator(metadata, payload)
    # # Test for Spider Initiator section

    # # Test for Spider Worker section
    # ## Basic task sample
    # task = {
    #     "url": f"https://www.amazon.com/gp/most-gifted/appliances",
    #     "parser": "default",
    #     "metadata": {
    #         "batchId": "2021-10-16",
    #         "SpiderName": "CoreProductsSpider",
    #         "tt": "CoreProductsGiftIdeas",
    #         "td": "init",
    #         "tp": "first_page"
    #     }
    # }

    # task = {
    #     "url": "https://www.amazon.com/gp/most-gifted/electronics",
    #     "parser": "default",
    #     "metadata": {
    #         "batchId": "2021-10-20",
    #         "SpiderName": "CoreProductsSpider",
    #         "tt": "CoreProductsGiftIdeas",
    #         "td": "init",
    #         "tp": "first_page"
    #     },
    #     "desc": "",
    #     "tags": {},
    #     "parent_result": {}
    # }
    # ## Basic task sample

    # start_worker(task)
    # # Test for Spider Worker section
