# coding: utf-8

# 导入必要的库文件，类似  c的include
import Queue
import socket
import urlparse
from urlparse import urlparse
from urllib2 import Request
from urllib2 import urlopen
from bs4 import BeautifulSoup
from urlparse import urljoin
import threading
import logging
from logging.handlers import TimedRotatingFileHandler
import time
import redis
import thread

# 设置网络超时时间
socket.setdefaulttimeout(10)


# 日志类，辅助性作用，不用太多关心
class log_helper():

    def __init__(self, name):
        lh = logging.handlers.TimedRotatingFileHandler(name + ".log", when='midnight')
        fm = logging.Formatter('%(asctime)s %(filename)s(%(lineno)d) %(levelname)s %(name)s - %(message)s')
        lh.setFormatter(fm)
        lh.suffix = "%Y-%m-%d"
        self.logger = logging.getLogger(name)
        self.logger.addHandler(lh)
        self.logger.setLevel(logging.DEBUG)
        console = logging.StreamHandler()
        console.setLevel(logging.DEBUG)
        console.setFormatter(fm)
        logging.getLogger(name).addHandler(console)


# 生成http请求的  Header
def GetCommonHeader():
    headers = {}
    headers["Connection"] = "keep-alive"
    headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8"
    headers[
        "User-Agent"] = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.86 Safari/537.36"
    # headers["Accept-Encoding"] = "gzip, deflate, sdch"
    headers["Accept-Language"] = "zh-CN,zh;q=0.8"
    return headers


# 声明一些全局变量
def GLobalVal():
    global threadnum  # 线程数量
    global lock
    global queue
    global log
    global spider_interval  # 爬去的时间间隔，0为没有间隔
    global cur_requestnum
    global max_request_num  # 有的连接可能访问超时，设置超时从试的次数
    global failrequestfilename  # 保存失败的的url文件
    global resultfilename  # 保存结果的文件
    global redis_server
    global redis_port
    global redis_conn
    global img_thread
    global img_queue
    threadnum = 10
    img_thread = 10
    queue = Queue.Queue(-1)

    lock = threading.Lock()
    log = log_helper("log")

    spider_interval = 0
    cur_requestnum = 0
    max_request_num = 10

    redis_server = "127.0.0.1"
    redis_port = 6379

    redis_conn = redis.Redis(host=redis_server, port=redis_port)


# 解析函数，自己根据需求修改
def parse(html, sourceurl):
    global redis_conn
    soup = None
    try:
        soup = BeautifulSoup(html, "html.parser")
        for url in soup.find_all("a"):
            try:
                newurl = urljoin(sourceurl, url["href"])
                if newurl.startswith("http") == True:
                    if redis_conn.sismember("complete", newurl) == False:
                        redis_conn.lpush("url", newurl)
                    else:
                        log.logger.info("already spider:%s" % (newurl))
            except Exception as e:
                log.logger.info(str(e))
    except Exception as e:
        log.logger.info(str(e))

    try:
        soup = BeautifulSoup(html, "html.parser")
        for url in soup.find_all("img"):
            try:
                imgurl = urljoin(sourceurl, url["src"])
                if newurl.startswith("http") == True:
                    if redis_conn.sismember("complete", imgurl) == False:
                        redis_conn.lpush("img", imgurl)
                    else:
                        log.logger.info("already spider:%s" % (newurl))
            except Exception as e:
                log.logger.info(str(e))
    except Exception as e:
        log.logger.info(str(e))


def thread_call():
    global threadnum
    global lock
    global queue
    global log

    global spider_interval

    global max_request_num
    global cur_requestnum
    global failrequestfilename
    global resultfilename
    global redis_conn

    while True:
        try:
            url = ''
            html = ''
            try:
                # url = queue.get(timeout=10)
                url = queue.get()
            except Exception as e:
                log.logger.info(u"线程请求完成,准备退出")
                return
            log.logger.info("Get:" + url)
            for i in range(max_request_num):
                log.logger.info(u"请求第%d次" % (i + 1))
                html_tmp = GetHtml(url)
                if html_tmp != None:
                    html = html_tmp
                    break

            if html != "":
                redis_conn.sadd("complete", str(url))
                parse(html, str(url))
        except Exception as e:
            log.logger.info(str(e))
        time.sleep(spider_interval)


def thread_call_img():
    global threadnum
    global lock
    global queue
    global log

    global spider_interval

    global max_request_num
    global cur_requestnum
    global failrequestfilename
    global resultfilename
    global redis_conn

    while True:
        url = ''
        try:
            url = redis_conn.lpop("img")
            if url == None:
                time.sleep(1)
                continue
            html = GetHtml(url)
            if html != None:
                try:
                    filename = urlparse(url).path.split("/")[-1]
                    with open("img/" + filename, "w+") as f:
                        f.write(html)
                except Exception as e:
                    print("*" * 50, e)
        except Exception as e:
            log.logger.info(str(e))


# html页面访问，通用函数
def GetHtml(url):
    headers = GetCommonHeader()
    headers["Host"] = urlparse(url).netloc

    html = None
    try:
        req = Request(url, headers=headers)
        r = urlopen(req)
        html = r.read()

    except Exception as e:
        log.logger.info(str(e))
        html = None

    return html


def Init():
    global threadnum
    global img_thread
    for i in range(threadnum):
        thread.start_new_thread(thread_call, ())

    for i in range(img_thread):
        thread.start_new_thread(thread_call_img, ())


def start():
    global queue
    global redis_conn
    while True:
        try:
            url = redis_conn.lpop("url")
            if url == None:
                time.sleep(1)
                continue
            queue.put(url)
        except Exception as e:
            log.logger.info(str(e))


if __name__ == "__main__":
    GLobalVal()
    Init()

    start()
