#-*-coding=utf8-*-
'''
Created on 2014年4月10日

@author: george
'''

from Queue import Queue
from bs4 import BeautifulSoup
from logger import Logger
from threading import Thread
from traceback import print_exc
import json
import re
import time
import urllib2
import urlparse

from receiveMail import Email_Receiver
from sendmail import send_qiushi_email
from dbmgr import DBMgr


log = Logger.instance()
log.init("log.conf","server")

#USER_AGENT= "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:28.0) Gecko/20100101 Firefox/28.0"
USER_AGENT= "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36"

def _http_request(url,headers='',data='',method=None):
    """method for http request
       url: request API url, string format
       headers: http request header,dict
       data: http post data, dict
    
    """
    
    req = urllib2.Request(url)
    if headers and isinstance(headers,dict):
        for key,val in headers.items():
            req.add_header(key, val)
    if data:
        data = json.dumps(data)
        req.add_data(data)
    
    #method may be 'PUT' or 'DELETE'
    if method:
        req.get_method = lambda: method
    
    res = None
    #request to url
    try:
        res = urllib2.urlopen(req,timeout=30)
    except (urllib2.HTTPError,urllib2.URLError) as e :
        
        if hasattr(e,'reason'):
            msg = e.reason
        elif hasattr(e,'code'):
            msg = e.code
        else:
            msg = "unknown reason"
        log.error("access %s failed as [%s]"%(url,msg))

    #if have some error in request,will return None,
    #or will return html file
    return res


class QiushiSpider(Thread):
    base = "http://www.qiushibaike.com"
    
    url_accessed = Queue()
    url_factory = Queue()
    if url_factory.qsize() == 0:
        url_factory.put(base)
        
    qiushi = []
    qiushi_saved = []
    
    def __init__(self,qiushi_mgr,name='spider'):
        Thread.__init__(self,name=name)
        self.qiushi_mgr = qiushi_mgr
    
    def run(self):
                  
        self.accessed = 0
        
        while True:

            #get a url from url queue
            url = self.url_factory.get()
    
            try:
                log.info("[%s]access url %s,access num is [%d],total accessed is [%d]"
                         %(self.name,url,self.accessed,self.url_accessed.qsize()))
                self.url_accessed.put(url)
                self.accessed += 1
                
                self._handler(url)

            except Exception as e:
                log.error("access failed as %s"%e)
                print_exc()

            time.sleep(2)
        
    def _handler(self,url):
        """
            qiushibaike spider, get qiushi
        
        """
        #request to url
        html = self._normal_get(url)
        if not html:
            return
        
        urls = self._parse_html(html)
        
        for _file in urls:
            if not _file:
                continue
            
            #get completed url
            if "http" not in _file:
                _url = urlparse.urljoin(self.base, _file)
            else:
                _url = _file
            
            #if url have accessed already,or not qiushibaike page,
            #or some script, will not put in queue
            if urlparse.urlparse(_url).netloc != "www.qiushibaike.com" \
                or 'javascript' in _url \
                or _url in self.url_accessed.queue:    
                continue
            
            #put url to factory
            self.url_factory.put(_url)
    
    def _normal_get(self,url):
    
        headers = {"User-Agent":USER_AGENT}
        response = _http_request(url,headers=headers)
        if response and response.code == 200:
            return response.read()
        
    def _parse_html(self,html):
        """
            handle the html context, get body
            use for pages of qiushibaike website 
        
        """
        soup = BeautifulSoup(html)
        
        urls = []
        for link in soup.find_all('a'):
            urls.append(link.get('href'))
            
        div_contents = soup.find_all('div',class_="content")
        
        #check if have content in qiushibaike page
        if div_contents:
            for content in div_contents:
                _title = content.get('title')
                _context = content.get_text(strip=True)
                _img = ''
                
                _next = content.find_next_sibling()
                if _next and _next['class'] \
                    and _next['class'][0] == "thumb" \
                    and _next.a :
                    _img = _next.a.img['src']
                    
                if _title in self.qiushi_saved:
                    continue
                else:
                    self.qiushi_saved.append(_title)
                
                self.qiushi_mgr.save(_title,_context,_img)
                
            
        return urls


class QiushiMgr(object):
    
    table = 'qiushi'
    
    def __init__(self):

        self.save_factory = Queue()
        
    def save(self,title,context,image_path):
        
        _record = {'title':title,
                   'context':context,
                   'image':image_path,
                   'flag':'n'}
        self.save_factory.put(_record)

    def save_to_db(self):

        dbmgr = DBMgr.instance()
        while 1:
            _record = self.save_factory.get()
            try:
                dbmgr.insert(self.table,**_record)
                log.info('save one record to database.')
            except Exception,e:
                print e
            time.sleep(2)

    @classmethod
    def get_qiushi(cls,num):
        dbmgr = DBMgr.instance()
        qiushi_list = dbmgr.query(cls.table,limit=num,flag='n')

        result_list = []
        for qiushi in qiushi_list:
            _id = qiushi[0]
            dbmgr.update(cls.table,{'flag':'r'},**{'id':_id})
            result_list.append((qiushi[1],qiushi[2],qiushi[3]))

        return result_list
        
def get_qiushi(num):
    return QiushiMgr.get_qiushi(num)

def create_spider(num):
    qiushi_mgr = QiushiMgr()
    for i in range(num):
        spider = QiushiSpider(qiushi_mgr,name="spider%d"%i)
        spider.start()

    Thread(target=qiushi_mgr.save_to_db()).start()
    


if __name__ == "__main__":
    
    create_spider(1)
    
