#coding:utf-8
import urllib, urllib2
from lxml import etree
from Queue import Queue
import threading
import random
import time
import os


queue = Queue()

class YintaiProduce(threading.Thread):
    query = raw_input('please input keyword:  ')
    query = query.strip().replace(' ', '%20')
    # query = u'丝袜'
    # query = query.encode('gbk')
    start_urls = "http://www.yintai.com/product/search.aspx?keyword=%s" % query.decode('gbk').encode('utf-8')
    count = 0
    errnum = 0
    
    def run(self):
        global queue
        self.getlist()
        
    def getlist(self, url=start_urls):
        YintaiProduce.count += 1
        print 'This is page:', YintaiProduce.count
        try:
            content = urllib2.urlopen(url).read()
            tree = etree.HTML(content)
            pic_urls = tree.xpath('//div[@class="p-listImgBig"]/a/img/@src|//div[@class="p-listImgBig"]/a/img/@img-src')
            if pic_urls:
                for pic in pic_urls:
                    print "Proceduced", pic
                    queue.put(pic)
                    time.sleep(random.random())
                    
            else:
                print 'No pics in %s' %url
        
            next_url = tree.xpath('//div[@class="p-page"]/div/form/a[@class="b-p-down"]/@href')
            if next_url:
                next_url = 'http://www.yintai.com' + next_url[0]
                self.getlist(next_url)
            else:
                print 'All page down!'
                while True:
                    if queue.empty():
                        'queue is empty , exit'
                        time.sleep(60)
                        os._exit(0)
                
        except Exception as e:
            print e, url
            YintaiProduce.errnum += 1
            if YintaiProduce.errnum <5 :
                print 'Retry 5 times'
                os._exit(0)
            else:
                self.getlist(url)

        
class Yintaidownpic(threading.Thread):
    picnum = 0
    
    def run(self):
        global queue
        while True:
            url = queue.get()
            queue.task_done()
            self.downpic(url)
            time.sleep(random.random())
        
    def downpic(self, url):
        Yintaidownpic.picnum += 1
        print '=================downloading %s ==========' % Yintaidownpic.picnum
        try:
            pic_name = url.split('/')[-1].encode('utf-8')
            pic_url = url.replace('bigimage', 'zoomimage').encode('utf-8') 
            urllib.urlretrieve(pic_url, 'd:\\pic\\'+pic_name)
        except:
            pass
    
if __name__ == '__main__':
    dir_path = 'd:\\pic'
    if not os.path.exists(dir_path):
        os.makedirs(dir_path)
    Yintai_Producer = YintaiProduce()
    Yintai_Producer.start()

    for i in range(5):
        Yintai_Consumer = Yintaidownpic()
        Yintai_Consumer.start()
