#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2017/11/7 9:38
# @Author  : Yunhao.Cao
# @File    : main.py
from __future__ import absolute_import, unicode_literals
from bs4 import BeautifulSoup
from multiprocessing import Pool, Manager
from functools import partial
import os
import requests
import time

__author__ = 'Yunhao.Cao'

__ALL__ = []

URL_LIST = [
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/16_1508307563903.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/17_1508309829445.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/23_1509419503289.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/1_1494814066785.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/24_1509434641621.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/27_1509439575715.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/25_1509437373083.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/26_1509438856655.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/22_1509356524583.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/21_1509352507141.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/19_1508900524123.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/15_1508210044476.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/10_1506681726857.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/2_1505728867804.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/8_1506653299930.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/9_1506666927729.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/7_1506425898644.show.html',
    'https://creditcard.bankcomm.com/content/pccc-biz/discount/data/11_1507702041938.show.html',
]


def spider(url):
    """
    一个简单的爬虫
    :param url: 目标链接
    :return: 返回网页的Title
    """
    response = requests.get(url)
    detail_soup = BeautifulSoup(response.text, "html.parser")

    title = detail_soup.title.text
    return title


def worker(worker_id, in_queue, out_queue, lock):
    """
    从输入的队列里取出一个url，爬取此url，将爬虫结果写入输出队列
    :param in_queue: 输入队列
    :param out_queue: 输出队列
    :param lock: 进程同步锁
    :return:
    """

    def _print(msg):
        print("[{}][{}] {}".format(worker_id, pid, msg))

    pid = os.getpid()
    _print("worker{} starts...".format(worker_id))

    while True:
        # 获取锁
        lock.acquire()
        _print("get!")
        url = None
        _print("waiting... ####")
        try:
            # 阻塞获取url
            url = in_queue.get()
        finally:
            # 保证释放锁
            lock.release()
            _print("release!")

        if not url:
            _print("url is empty..")
            continue

        # 写入输出队列
        title = spider(url)
        result = (worker_id, pid, url, title)
        _print("put to queue..[{}]".format(result))
        out_queue.put(result)

        _print("sleep.....")
        time.sleep(5)
        _print("finished")


def _main():
    """
    主函数
    :return:
    """
    QUEUE_SIZE = 10
    PROCESS_POOL_SIZE = 3

    # 进程管理类
    manager = Manager()
    # 进程同步队列
    url_queue = manager.Queue(QUEUE_SIZE)
    result_queue = manager.Queue(QUEUE_SIZE)
    # 进程同步锁
    lock = manager.Lock()

    # 将url加入输入队列
    for i in range(QUEUE_SIZE):
        url_queue.put(URL_LIST[i])

    # 进程池
    pool = Pool(PROCESS_POOL_SIZE)

    partial_worker = partial(worker, in_queue=url_queue, out_queue=result_queue, lock=lock)
    pool.map(partial_worker, range(PROCESS_POOL_SIZE))

    print("join !")
    pool.close()
    pool.join()


if __name__ == '__main__':
    _main()
