#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2017/11/17 10:01
# @Author  : Yunhao.Cao
# @File    : spider_custom.py
from __future__ import unicode_literals
import re
from bs4 import BeautifulSoup
from logger import Logger
from spider import Spider
from queue import RequestTask

__author__ = 'Yunhao.Cao'

__ALL__ = ["CustomSpider"]


class CustomSpider(Spider):
    def __init__(self):
        pass

    @staticmethod
    def func_list(text, **kwargs):
        """
        列表解析函数
        """
        Logger.info("enter func_list")

        task_queue = kwargs.get("task_queue")

        root_url = "https://creditcard.bankcomm.com"
        soup = BeautifulSoup(text, "html.parser")
        # 获取列表
        act_list = soup.find("div", class_="actlist")
        li_list = act_list.find_all("li", class_=re.compile("clearfix"))

        # 解析列表里的url
        for li in li_list[:5]:
            a = li.find("a")
            detail_url = a.get("href")
            detail_url = root_url + detail_url

            print detail_url

            if task_queue:
                task_queue.put(RequestTask(detail_url))

    @staticmethod
    def func_detail(text, **kwargs):
        """
        详情页解析函数
        """
        Logger.info("detail")
        soup = BeautifulSoup(text, "html.parser")
        text_element = soup.find("div", class_="thsh_txt")
        text_content = text_element.text

    @staticmethod
    def func_proxy_test(text, **kwargs):
        """
        测试代理
        :return:
        """
        Logger.debug(text)

    _rule_list = [
        # 交通银行活动列表页
        Spider.Rule(url=r"^(?:https?://)?creditcard.bankcomm.com/content/pccc/discount/activitylist/more.html$",
                    func=func_list),
        # 交通银行活动详情页
        Spider.Rule(url=r"^(?:https?://)?creditcard.bankcomm.com/content/pccc-biz/discount/data/\d*_\d*.show.html$",
                    func=func_detail),
        # 测试代理
        Spider.Rule(url=r"^(?:https?://)?ip.chinaz.com/getip.aspx$",
                    func=func_proxy_test),
    ]


if __name__ == '__main__':
    """
    对解析函数进行测试
    """
    from downloader import Downloader

    url = "https://creditcard.bankcomm.com/content/pccc/discount/activitylist/more.html"
    text = Downloader.get(url)
    CustomSpider.parse(url, text)
