 #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import with_statement
from BeautifulSoup import BeautifulSoup
from datetime import datetime
import threading, re, urlparse, urllib
from cStringIO import StringIO
import traceback, string
import copy
from threading import Semaphore, Thread
try:
    import cPickle as pickle
except:
    import pickle




#from local
import os
import sys
sys.path.append("..")
import lib 
from lib.utils import Logger, DummyLogger, transcode5, timeago
from lib.parameter import *
from lib.database_qzone import Database
from lib.network import clean_url, fetch_url3
from lib.fetch_new_aio import fetch_url
blogs_counter = 0

def add_new_user_ids(new_user_ids):
    if new_user_ids:
        with queuelock:
            if len(queue) < 100:
                for newid in new_user_ids:
                    if newid not in queue:           
                        queue.append(str(newid))
            else:
                new_user_ids = []

class UpdateBot(threading.Thread):
    def __init__(self, uid,num_blogids, logger, dal,queue_no,queue,user,queuelist,bots_semaphore,updatePolicy):
        self.uid = uid
        self.crawledblogids=num_blogids
        self.logger = logger
        self.name = "Updatebot" +":"+ str(queue_no)+":"+str(uid)
        self.preUrl = 'http://b.qzone.qq.com/cgi-bin/blognew/simpleqzone_blog_title?hostuin='
        self.status='No_New_post'
        self.last_time = datetime.now()
        self.user_name = ''
        self.articleNum = 1000
        self.blog_counter = 0
        self.blogtimes=[]
        self.new_user_ids = set()
        self.dal = dal
        self.queue_no=queue_no
        self.user=user
        self.queue=queue
        self.queuelist=queuelist
        self.bots_semaphore=bots_semaphore
        self.updatePolicy=updatePolicy
        threading.Thread.__init__(self, name = self.name)

    def __del__(self):
        del self.dal
    def setuid(self,uid):
        self.uid=uid
    def crawl(self):
        uid = str(self.uid)
        if(0):
         #message = self.dal._checkUser(uid)
         #print message
         if message == "haveCrawded":
             self.logger.log('>ALREADY crawled uid ' + uid, self.name)
             return True

         elif message == "haveCrawded_none":
             self.logger.log('>Without blogs last time uid ' + uid, self.name)
             return True

        #self.logger.log('>Update_CRAWL uid ' + uid, self.name)
        baseUrl = self.preUrl  + uid
        content = fetch_url3(baseUrl, logger = self.logger, retries = 2)
        #print content
        self.processIndex(content, uid, baseUrl)
        #temp = fetch_url(baseUrl, [self.processIndex,2, baseUrl, uid])

    def getPage(self, url, soup):
        links = soup.findAll('span', attrs={"class" : "list_tit"})
        for link in links:
            self.processLink(link)
            #print str(link)

    def GetUpdateStatus(self):
         return self.status
    def processLink(self, link):
        # 1. title detailed blogid
        uid = str(self.uid)
        _a = link.findAll('a')[0]
        if not _a.contents:
            title = 'no title'
        else:
            title = _a.find(text = True)
            if not title:
                title = 'no title'
        if hasattr(_a, 'href'):
            url = transcode5(_a['href'])
            blog_id = re.search("(?<=blogid=)\d+",url)
            if blog_id:
                blog_id = blog_id.group(0)

        # 2. get detailed page
        if not self.dal._checkBlog(uid, blog_id):
             
            #temp = fetch_url(url,[self.processDetailPage,3, url, blog_id, title])
            content = fetch_url3("'"+url+"'", logger = self.logger, retries = 2)
            self.processDetailPage(content, url, blog_id, title)
            self.status='Updated'
        else :
           pass
           #self.logger.log(str(uid)+" ," +str(blog_id) +" already crawled", self.name)
           
    def processDetailPage(self,detailpage, url, blog_id, title):
        self.logger.log(">crawl article " + transcode5(title) + " " + transcode5(url), self.name)
        uid = str(self.uid)
        authorid = uid
        available = True
        detailpage = transcode5(detailpage)
        if detailpage:
            detailpageSoup = BeautifulSoup(detailpage, fromEncoding='utf-8')
            _nbadpage = detailpageSoup.body.findAll('div', attrs={"class" : "container"})
            if _nbadpage:
                 # 3.0 get the time
                tempSoup = _nbadpage[0].findAll('p', attrs={"class" : "blog_info"})
                timesoup = tempSoup[0].findAll('span', attrs={"class": "date"})
                if timesoup:
                    timestr = transcode5(timesoup[0].contents[0]) #_oa = _linktags[0].findAll('a')
                    _t2 = re.search(r"(\d+)[^\d]+(\d+)[^\d]+(\d+)", timestr)
                    year = int(_t2.group(1))
                    month = int(_t2.group(2))
                    day = int(_t2.group(3))
                    time = datetime(year, month, day)
                else:
                    time = datetime(1970, 1, 1)
                num_comment = int(tempSoup[0].contents[3].contents[0])
                num_read = int(tempSoup[0].contents[5].contents[0])
                 # 3.2 get blog_text
                tempSoup = _nbadpage[0].findAll('div', attrs={"class" : "blog_cont"})
                content = tempSoup[0].findAll('p')
                plaintext = ''

                for txt in content:
                    if txt.find(text=True):
                        tmp = txt.findAll(text=True)
                        for x in tmp:
                            plaintext += ''.join(string.strip(x))

                 # 4. write into db
                self.blogtimes.append(time)
                self.dal._commitBlog(blog_id, authorid, title, time, num_comment, num_read, str(tempSoup[0]), transcode5(plaintext))
                self.blog_counter += 1
                if self.articleNum < self.blog_counter + 1:
                     #self.status="updated"
                     self.dal._finishUser(uid)
                #self.dal._commitUser2Blog(uid, blog_id)
                
                if(0):
		        import time
		        global blogs_counter
		        blogs_counter += 1
		        if blogs_counter == 1:
		            global time_counter
		            global time_counterb
		            time_counter = datetime.now()
		            time_counterb = datetime.now()
		        if blogs_counter % 200 == 2:
		            n = datetime.now()
		           # self.logger.warn(str(counter_qq) + " " +str(blogs_counter) + " " + str(n-time_counter) + " " +
		            #                 str((blogs_counter)/(0.002 + time.mktime(n.timetuple())-time.mktime(time_counter.timetuple()))) + " "
		              #             + str(50.0/(0.002 + time.mktime(n.timetuple())-time.mktime(time_counterb.timetuple()))), self.name)
		            time_counterb = n
                 # 5. add new user
                newuserSoup = detailpageSoup.findAll('script', attrs={"type" : "text/javascript"})
                newuserList = re.findall('(?<="replyuin":)\d+',str(newuserSoup))
                for newuserid in newuserList:
                     #print "newuserid",newuserid
                     self.new_user_ids.add(newuserid)
                #add_new_user_ids(self.new_user_ids)
            else:
                available = False
    def new_uids(self):
        return self.new_user_ids
    def processIndex(self, content, uid, baseUrl):
        content = transcode5(content)
        if not content:
            self.logger.log('>NO uid ' + str(uid), self.name)
            self.dal._commitUserNoBlog(self.uid) 
            self.status='blank'
            return 'blank'
        # we could safely assume the page from shuqian.qq.com is encoded in utf-8
        soup = BeautifulSoup(content, fromEncoding='utf-8')
        #print soup.prettify()

        badgearea = soup.find('div', attrs={"class": "error_box"})
        if badgearea:
            self.logger.log(">DIDN'T register qzone service yet user uid " + str(uid), self.name)
            self.dal._commitUserNoBlog(self.uid)
            self.status='blank'
            return 'blank'

        # 1 profile
        [uName, qzoneName, qzoneDescription, articleNum] = self.getProfile(soup)
        if uName == False:
           self.status='No_This_user'
           return False
        self.articleNum = articleNum
        #self.logger.log('articleNums    present:'+str(self.articleNum) +"  past:"+str(self.crawledblogids), self.name)
        if(self.articleNum<=self.crawledblogids):
            #self.logger.log('No new article for '+str(self.uid), self.name)
            self.status='No_New_post'
            return "-1"
        self.dal._commitUser(uid, uName, qzoneName, qzoneDescription, articleNum, done = False)   
        self.crawlProfile()
        self.user_name = uName

        # 2 handle the very first page
        self.getPage(baseUrl, soup)

        # 3 handle other pages
        pagenum = soup.findAll('p', attrs={"class": "total_page"})
        pagenum = re.search(".*?(\d+).*(\d+).*(\d+)",str(pagenum))
        maxpageNum = int(pagenum.group(3))
        basepageurl = "http://b.qzone.qq.com/cgi-bin/blognew/simpleqzone_blog_title?hostuin=" + uid + "&pos="
        for i in range(1, maxpageNum):
            
            pageUrl = basepageurl + str(i*20)
            content2 = fetch_url3("'" +pageUrl + "'", logger = self.logger, retries = 2)
            self.processPage(content2, pageUrl)
            #temp = fetch_url(pageUrl, [self.processPage,1,pageUrl])
        
        return True

    def processPage(self, pageContent, pageUrl):
        pageContent = transcode5(pageContent)
        if not pageContent:
            self.logger.log('>FAILED TO crawl the page %s' % pageUrl, self.name)
            self.dal._finishUser(str(self.uid), status=False)
            return False
        pageSoup = BeautifulSoup(pageContent, fromEncoding='utf-8')
        self.getPage(pageUrl, pageSoup)

    def getProfile(self, soup):
        # section I badge
        # 1.1 uname
        headarea = soup.findAll('meta')[3]['content']
        if not headarea:
            uName = "no name"
        else:
            uName = transcode5(headarea[0])
        
        # 1.2 qzoneName
        if soup.head.title.string:
              qzoneName = transcode5(re.search("(.*?) .*?", soup.head.title.string).group(0))
        else:
              qzoneName = "no qzone name"

        # 1.3 qzoneDescription
        badgearea = soup.findAll('meta')[5]['content']
        if not badgearea:
            qzoneDescription = "no qzone description"
        else :
            qzoneDescription = transcode5(badgearea[0])

        # 1.4 article num
        badgearea = soup.find('span', attrs={"class": "em"})
        if not badgearea:
            self.dal._finishUser(str(self.uid), status=False)
            self.status="Not_finished"
            return [False,False,False,False]
        articleNum = int(badgearea.string)
        return [uName, qzoneName, qzoneDescription, articleNum]

    def crawlProfile(self):
        uid = str(self.uid)
        if self.dal._checkUser(uid) == True:
            self.logger.log('>ALREADY crawled uid ' + uid, self.name)
            return True

        #if self.dal._checkNoneUser(uid) == True:
        #    self.logger.log('>ALREADY crawled uid ' + uid + ' and failed', self.name)
        #    return True

        self.logger.log('>CRAWL uid ' + uid, self.name)
        baseUrl = self.preUrl  + uid
        content = fetch_url3(baseUrl, logger = self.logger, retries = 2)
 
        content = transcode5(content)
        content = content.replace("'", "\\'")
        t= content[11:-3]

        info_all = json.read(t)
        
        age = re.findall('(?<="age":)\d+', content)
        if not age :
            self.logger.log("fail to crawl " + uid, self.name)
            return False

        sex=info_all['sex']

        if sex == -1:
           self.logger.log(uid + "  has the sex equals -1, no detail message", self.name)
           return False
        #cr=re.findall('(?<=cr":)\d+', content)[0]
        #if cr == 0 :
            #self.logger.log(uid + "  has the cr equals 0, no detail message", self.name)
            #return False
        #print content

        str_info = re.findall('.*?"(.*?)".*?', content)
        uin=info_all['uin']
        nickname= info_all['nickname']
        age=age[0]
        birthday=info_all['birthday']
        bloodtype=info_all['bloodtype']
        country=info_all['country']
        province=info_all['province']
        city=info_all['city']
        hco=info_all['hco']
        hp=info_all['hp']
        hc=info_all['hc']
        marriage=info_all['marriage']
        spacename=info_all['spacename']
        descri=info_all['desc']
        interest=info_all['interest']
        career=info_all['career']
        company=info_all['company']
        cco=info_all['cco']
        cp=info_all['cp']
        cb=info_all['cb']
        needpage=-1#needpage=re.findall('(?<=needpage":)\d+', content)[0]
        hcr=-1#hcr=re.findall('(?<=hcr":)\d+', content)[0]	
        mb=-1#mb=re.findall('(?<=mb":)\d+', content)[0]	
        cm=-1#cm=re.findall('(?<=cm":)\d+', content)[0]
        cr=-1
    
        birthday_info = re.search("(\d+)[^\d](\d+)", birthday)
        year = info_all['birthyear']
        month = int(birthday_info.group(1))
        day = int(birthday_info.group(2))
        birthday = date(year,month,day)

        if bloodtype:
            if bloodtype == 0 :  bloodtype = "O"
            elif bloodtype == 1 :  bloodtype = "A"
            elif bloodtype == 2 :  bloodtype = "B"
            elif bloodtype == 3 :  bloodtype = "AB"
            else:  bloodtype = "EE"
        else:  bloodtype = "EE"

        if sex == "1" : sex = "male"
        elif sex == "2" : sex = "fema"
        else : sex = "else"
#        self.logger.log("uin" + str(uin) + "\nnickname" +str(nickname)+"\n sex"+str(sex)+"\n age"+str(age)+"\n birthday"+str(birthday)+
#            "\n bloodtype"+str(bloodtype)+"\n country"+str(country)+" province"+str(province)+"\n city"+str(city)+"\n hco"+str(hco)+
#            "\n hp"+str(hp)+"\n hc"+str(hc)+"\n marriage"+str(marriage)+" spacename"+str(spacename)+"\n descri"+str(descri)+
#            "\n interest"+str(interest)+"\n career"+str(career)+"\n company"+str(company)+"\n cco"+str(cco)+"\n cp"+str(cp)+
#            "\n cb"+str(cb)+"\n cr"+str(cr)+"\n needpage"+str(needpage)+"\n hcr"+str(hcr)+"\n mb"+str(mb)+"\n cm"+str(cm), self.name)

        self.dal._commitProfile(str(uin),nickname,sex,int(age),birthday,bloodtype,country,province,city,hco,hp,
                       hc,int(marriage),spacename,descri,interest,career,company,cco,cp,cb,int(cr),int(needpage),int(hcr),int(mb),int(cm))
        self.dal._finishProfile(uin)

    def _printLink(self, link):
        for i, e in enumerate(link):
            if i == 3:
                for t in link[i]:
                    self.logger.log(t, self.name)
            else:
                print e

    def doit(self):
        ret = 'fail'
        self.new_user_ids=set()
        try:
            self.logger.log("START crawling %s" % self.uid)
            ret = self.crawl()
        except Exception, e:
            sio = StringIO()
            traceback.print_exc(file=sio)
            self.logger.warn('Bot throw exception abnormally', self.name)
            self.logger.warn(sio.getvalue(), self.name)
            self.logger.warn(str(e), self.name)
            sio.close()

        if ret == True:
            #print "True:"
            #parameter.size+=1
            pass
            #add_new_user_ids(self.new_user_ids)
        elif ret == 'blank' :
            #print "blank"
          
            self.dal._commitUserNoBlog(self.uid)
        else:
            #print "nothing"
            #parameter.size+=1
            #print "other wise"
            #self.status="Not_finished"
            self.dal._finishUser(str(self.uid), status=False)

    def botdoaftercrawl(self):
	    length=0
            for i in range(0,len(self.queuelist)):
               print "list ",i,": length:",len(self.queuelist[i])
               length+=len(self.queuelist[i])
            print "Total length:",length

	    listsize=len(self.updatePolicy)+2
	    user=copy.deepcopy(self.user)
            #self.logger.log("update_status:"+self.uid+":"+self.status,self.name )
	    #actiontable={"updated":(1/1.2),"No_New_post":1.2}
	    if(self.status =="updated"):
		        self.user[5]=int(float(self.user[5])*1.2)
	    elif(self.status =="No_New_post"):
		        self.user[5]=int(float(self.user[5])/1.2)
	    elif(self.status =="Not_finished"):
		        self.user[3]=False
	    else:
		   try:
                    self.queue.remove(user)
                   except Exception,e:
                    pass
		   return
	    if(self.status== 'blank' or self.status=='No_This_user'):
                       self.queue.remove(self.user)  
                       return
	    if(not self.user[3]):
		      if(not self.queue_no==0):
		       self.logger.log("move "+str(self.user[1])+" from queue:"+str(self.queue_no)+" to queue"+str(0),self.name)
                       print "length:",len(self.queue)
		       self.queuelist[0].appendleft(self.user)
                       print "length:",len(self.queue)
		       self.queue.remove(self.user)
		       return
		     
	    for i in range(0,listsize-2):
		     if (self.user[5]<=self.updatePolicy[i]):
		           if(not i+1==self.queue_no):
				   self.logger.log("move "+str(self.user[1])+" from queue:"+str(self.queue_no)+" to queue"+str(i+1),self.name)
				   #print "move ",self.user[1]," from queue:",self.queue_no," to queue",i+1
                                   print "length:",len(self.queue)
				   self.queuelist[i+1].appendleft(self.user)
                                   print "length:",len(self.queue)
				   self.queue.remove(self.user)
				   return
		          
	    if(not (listsize-1)==self.queue_no):
		     #print "move ",self.user[1]," from queue:",self.queue_no," to queue",listsize-1
                     self.logger.log("move "+str(self.user[1])+" from queue:"+str(self.queue_no)+" to queue"+str(listsize-1),self.name)
				   #print "move ",self.user[1]," from queue:",self.queue_no," to queue",i+1
                     print "length:",len(self.queue)
		     self.queuelist[listsize-1].appendleft(self.user)  
                     print "length:",len(self.queue)
		     self.queue.remove(self.user)
		     return
            
    def run(self):
        #global bots_semaphore
        self.doit()
        try:
         self.botdoaftercrawl()
        except Exception,e:
         pass
        self.bots_semaphore.release()

#bots_semaphore = threading.Semaphore(32)
#queue = []
#queuelock = threading.RLock()
countlist=[0,0,0,0,0,0]
def crawl(logpath = None):
    logger = Logger(logpath) if logpath else DummyLogger()
    dal = Database()
    currentid, endid = 100000, 10000000000
    global counter_qq
    counter_qq = 0
    while currentid < endid:
        if queue:
            qqid = str(queue[0])
            del queue[0:1]
        else:
            qqid = str(currentid)
            currentid += 1
        counter_qq += 1
        bot = Bot(qqid, logger, dal)
        bots_semaphore.acquire()
        bot.start()

#crawl('./crawlqzone_new.log')
def testbot(logpath = None):
   logger = Logger(logpath,True) if logpath else DummyLogger()
   dal = Database()
   currentid, endid = 100000, 10000000000
   qqid=100029
   global counter_qq
   counter_qq = 0
   
   bot = UpdateBot(qqid,4, logger, dal,queue_no,user,queuelist,bots_semaphore,updatePolicy)
   bot.doit()
   print bot.GetUpdateStatus()
   print bot.blog_counter  
   print bot.articleNum
   print bot.blogtimes
#testbot('./crawlqzone_new.log')
 
