# coding=utf-8
from core.m_logging import logger
from manager.fileState import FileState
from spiders import amazonAnsiListingParser, util
from spiders.nodeSpider import NodeSpider

### default module variable that should be override after import
task_state_dir = "."
task_no = "default_task"
batch_no = "default_batch"

### initialize states - done_state, undone_state
nodeSpider = NodeSpider(batch_no, task_no)

undone_template = {"status": "0", "spider_no": 0, "failed": 0, "list": [], "id": 1}
done_template = {"status": "0", "spider_no": 0, "success": 0, "list": [], "id": 1}

done_state = FileState("%s/done_node.json" % task_state_dir, done_template)
done_state.load_state()

undone_state = FileState("%s/undone_node.json" % task_state_dir, undone_template)
undone_state.load_state()


def scrape(node_no, page_no_list):
    if node_no in done_state.content["list"]:  # skip done nodes
        logger.info("skipped in done_node %s" % node_no)
        return

    if node_no in undone_state.content["list"]:  # skip failed nodes, it shall be processed under another spider
        logger.info("skipped in undone_node %s" % node_no)
        return

    current_node_no = node_no
    current_page_no = 1

    node_state_template = {"node_no": current_node_no, "failed_pages": []}
    node_state = FileState("%s/node/node_%s.json" % (task_state_dir, current_node_no), node_state_template)
    node_state.load_state()

    max_page = 20
    start_url = 'https://www.amazon.com/b/?node=%s' % current_node_no

    try:
        item = nodeSpider.request(current_node_no, current_page_no, start_url, amazonAnsiListingParser.parse_index)
        if item.content["totalPage"].isdigit():
            total_page = int(item.content["totalPage"])
        else:
            total_page = 1

        max_page = total_page if (total_page / 5) <= max_page else max_page
        logger.info("max page is %s..." % max_page)
        if max_page <= 0:
            raise ValueError("max page error, value is %s..." % max_page)
        if max_page <= 1:
            logger.info("completed scraping node %s..." % current_node_no)
            done_state.content["status"] = "10"
            done_state.content["list"].append(current_node_no)
            done_state.content["success"] = int(done_state.content["success"]) + 1
            done_state.save_state()
            logger.info("node %s put in done state..." % current_node_no)
            return

        if not page_no_list or len(page_no_list) <= 0:
            p_list = range(2, max_page + 1)
        else:
            p_list = page_no_list
        page_url_template = item.content["nextPageUrl"][0]

        # loop pages
        for p_no in p_list:
            current_page_no = p_no
            page_url = 'https://www.amazon.com' + util.generate_page_url(page_url_template, p_no)
            logger.info("processing node no %s, page no %s..." % (current_node_no, current_page_no))
            item = nodeSpider.request(current_node_no, current_page_no, page_url, amazonAnsiListingParser.parse_paging)
            if not item.content["success"]:
                node_state.content["failed_pages"].append(current_page_no)
                node_state.save_state()

        # completed this node
        logger.info("completed scraping node %s..." % current_node_no)
        done_state.content["status"] = "10"
        done_state.content["list"].append(current_node_no)
        done_state.content["success"] = int(done_state.content["success"]) + 1
        done_state.save_state()
        logger.info("node %s put in done state..." % current_node_no)
    except Exception, e:
        logger.error("unexpected error(exception) occurred...")
        import traceback

        node_state.content["failed_pages"].append(current_page_no)
        node_state.save_state()

        if current_node_no not in undone_state.content["list"]:
            undone_state.content["list"].append(current_node_no)
            undone_state.content["failed"] = int(undone_state.content["failed"]) + 1
            undone_state.save_state()
            logger.info("node %s put in undone state..." % current_node_no)
        else:
            logger.info("node %s already in undone state, would be skipped this time..." % current_node_no)
        logger.error(
            "node scrape error occurred, node %s is skipped, last processing page is %s... e.message: %s" % (current_node_no, current_page_no, e.message))
        logger.error('traceback.format_exc():\n%s' % traceback.format_exc())