import json

from framework.common.pt_logger import logger
from framework.core.crawler_task import Task
from framework.core.sqs_task_pool import SQSTaskPool
from framework.middlewares.proxy_scraperapi_middleware import ProxyScraperApiMiddleware
from framework.middlewares.random_header_middleware import RandomHeaderMiddleware
from framework.middlewares.random_sleep_middleware import RandomSleepMiddleware
from framework.output.s3_output import S3Output
from framework.request import Request
from framework.retryable_request import RetryableRequest
from framework.serverless_spider import ServerlessSpider
from jobs.amazon.spiders.asin_spider.dp_parser import DpParser
from jobs.amazon.spiders.asin_spider.lazyWidget_parser import LazyWidgetParser
from jobs.amazon.spiders.asin_spider.offerlist_parser import OfferlistParser
from jobs.amazon.spiders.asin_spider.qa_parser import QaParser
from jobs.amazon.spiders.asin_spider.review_parser import ReviewParser
from jobs.amazon.util import sqs_util

spider_name = "Unset"  #"AsinDetailPage"
# batch_id = "20210422"
s3_bucket = "v2-data-production"


def setup_spider(task_pool, batch_id, task_type):
    request = RetryableRequest(Request()) \
        .middleware(RandomHeaderMiddleware()) \
        .middleware(ProxyScraperApiMiddleware()) \
        .middleware(RandomSleepMiddleware())  # JimmyMo: serverless spider不需要自己控制RandomSleep了，可以依靠lambda本身的能力来实现

    def get_output_prefix(current_task, results):
        return "file"
  
    outputs = [S3Output(s3_bucket=s3_bucket, s3_folder=f"SpiderOutput/{spider_name}_{task_type}/{batch_id}",
                        get_prefix_func=get_output_prefix, compress=True)]

    parsers_map = {
        "dpParser": DpParser(),
        "reviewParser": ReviewParser(),
        "qaParser": QaParser(),
        "offerlistParser": OfferlistParser(),
        "lazyWidgetParser": LazyWidgetParser()
    }
    spider = ServerlessSpider(request, task_pool, parsers_map, outputs)
    return spider


def setup_task_pool(name):
    pool_name = "pt-spider-serverless-%s-queue" % name
    task_pool = SQSTaskPool(name=pool_name)
    sqs_util.create_if_not_existed(queue_name=pool_name)
    return task_pool


def start_worker(task_json):
    batch_id = task_json["metadata"]["batchId"]
    task = Task.from_json(task_json)
    task_type = task.metadata["tt"]
    task_pool = setup_task_pool(task_type)
    spider = setup_spider(task_pool, batch_id=batch_id, task_type=task_type)
    spider.listen_and_start(task)


if __name__ == "__main__":
    pass
    # start_initiator(payload={})
    # start_initiator(payload={"StartingToken": 'eyJDb250aW51YXRpb25Ub2tlbiI6ICIxN3dvRCtGUkdkWFQ2SjNzYWFValJhZjhVUnhUZ1pKcTJ0N1pLNEh5Smt4RWVsbGxmYUNodnhYOTRzT202cWQ3MXFiQ1haMGdDMGNISlN2R2tUYmcvTVpCWmlodEdvdlR5QjBJY0dwT2xyVWljMzVpbWhXTy9DQT09In0='})
