# -*- coding: utf-8 -*- 

import urllib2
import cookielib
from BeautifulSoup import BeautifulSoup,SoupStrainer
import time
import MySQLdb
import re

import m_main
import m_product



class Spider(object):#爬虫类基类
    def __init__(self):
        self.products=[]#返回后的产品组
        self.tasks=[]#返回后的任务组
        
class Spiders_Tmall_Findcustomer(Spider):
    def __init__(self,netreturn):
        Spider.__init__(self)
        global RUNDATE#用来记录任务的时间
        contents=netreturn.contents
        #print netreturn.task.url
        #print contents
        #天猫的页面编码为GBK，BeautifulSoup默认使用为utf8．
        if netreturn.task.url[-4:]=='.htm':
            customer=m_product.Customer()
            customer._insertupdate='update'
            customer.fullname=netreturn.task.refdata['customerName']
            links=SoupStrainer('div',{'class':'hp-user-info'})
            soup = BeautifulSoup(contents,links)
            time.sleep(1.5)
            #print contents
            #有部分用户无法使用UserID
            customer.userid=soup.find('span',{'class':'action sns-widget-friendFollow follow-ui-green '})['data-userid']
            customer.usercode=re.search(r'http://my.taobao.com/(.*?)/',str(soup.find('li',{'class':'atten-item fans'}).a['href'])).group(1)#rong
            try:
                #print soup.find('bd')#.find('img')
                customer.buycredit=re.search(r'_(.*?_.)\.',str(soup.find('div',{'class':'bd'}).find('img')['src'])).group(1)
            except:
                pass
            try:
                customer.sex=str(soup.find('div',{'class':'line'}).contents[0]).replace('\t','').replace('\r\n','')
            except:
                pass
            try:
                soup2=soup.find('span',{'id':'J_HomePageConstellation'})
                try:
                    customer.born_month=soup2['data-month']
                    customer.born_day=soup2['data-day']
                    try:
                        customer.postcode=soup2.contents[0]
                    except:
                        pass
                except:
                    pass
            except:
                pass
            customer.usetag='y'
            links3=SoupStrainer('div',{'class':'fans-wrap'})
            soup3= BeautifulSoup(contents,links3)
            customer.fans_count=re.search('\((.*?)\)',str(soup3.find('li',{'class':'item s-active '}).a.contents[0])).group(1)
            customer.follows_count=re.search('\((.*?)\)',str(soup3.find('li',{'class':'item '}).a.contents[0])).group(1)
            self.products.append(customer)
        
        link_detail=SoupStrainer('div',{'class':'fans-list clearfix'})
        soup_detail=BeautifulSoup(contents,link_detail)
        if soup_detail!=None:
            for tt in soup_detail.findAll('div',{'class':'fans-item'}):
                #print tt
                customer2=m_product.Customer()
                customer2._insertupdate='insert'
                customer2.userid=re.search(r'userId=(.*?)&',str(tt.find('div',{'class':'bd'}).find('a',{'class':'pic'}).img['src'])).group(1)
                try:
                    customer2.fullname=tt.find('div',{'class':'bd'}).find('a',{'class':'name'}).contents[0]
                except:
                    pass
                
                customer2.usercode=tt.find('div',{'class':'bd'}).find('a',{'class':'pic'})['href'][21:]
                try:
                    customer2.buycredit=re.search(r'_(.*?_.)\.',str(tt.find('div',{'class':'bd'}).find('img',{'class':'rank'})['src'])).group(1)
                except:
                    pass
                #print customer2.id
                
                self.products.append(customer2)
                
        link_next=SoupStrainer('p',{'class':'pg-list'})
        soup_link=BeautifulSoup(contents,link_next)
        task_next=m_main.Task()
        task_next.ordertag=1
        task_next.todownloader='Downloader_Common'
        task_next.to_spider='Spiders_Tmall_Findcustomer'
        try:
            task_next.url=soup_link.find('a',{'class':'next'})['href']
            self.tasks.append(task_next)
        except:
            pass
        
class Spiders_Findcustomer_Judgerecord(Spider):
    def __init__(self,netreturn):
        Spider.__init__(self)
        global RUNDATE#用来记录任务的时间
        contents=netreturn.contents
        print contents
        


            
   
        
                