#!/usr/bin/python
#coding=utf-8
import sys
import os
reload(sys)
sys.setdefaultencoding("utf8")
import requests
from pyquery import PyQuery as pq
import re
from timeit import Timer
import threading

pNum = 100
total = 0
successNum = 0
listUrls = []
listUrls = []
errorListUrls = []
abandonListUrls = []
localDir = '/Users/mac/Pictures/zms/'
resultUrls = []

def downloadImageFile(imgUrl,index,title,ext):
    global successNum
    i=0
    while True:
        try:
            r = requests.get(imgUrl, stream=True,timeout=10) # here we need to set stream = True parameter
            with open('%s%s.%s%s' % (localDir,index,title,ext), 'wb') as f:
                for chunk in r.iter_content(chunk_size=1024000000):
                    if chunk: # filter out keep-alive new chunks
                        f.write(chunk)
                        f.flush()
                f.close()
            successNum = successNum+1
            print '%s:%s下载完成,成功(%d)' %(index,title,successNum)
            break
        except Exception,e:
            print e.message
            i = i +1
            print '下载%s:%s超时，进行第%s重试' %(index,title,i)
            continue
        '''finally:
            print '%s:%s下载完成' %(index,title)
            break'''

def requestUrl(url):
    i = 0
    global total
    while (True):
        try:
            response = requests.get(url,timeout=10)
            response.encoding = 'utf-8'
            if response.status_code == 200:
                #text = urllib2.urlopen(url, timeout=10).read()
                doc = pq(response.text)
                zurl = doc(".notice").next().find('a').attr('href')
                index = re.findall(r'\d+',url)[0] #索引
                try:
                    ext = zurl[zurl.rfind('.'):] #文件扩展
                except Exception,e:
                    print '是文本忽略下载'
                    break
                title = doc('title').text().split('-')[0].strip().replace(r'/',r'|')
                filePath = '%s%s.%s%s' % (localDir,index,title,ext)
                '''if os.path.isfile(filePath) and os.path.getsize(filePath)>0:
                    #print '文件已存在'
                    break'''
                if zurl :
                    resultUrls.append(zurl)
                    downloadImageFile(zurl,index,title,ext)
                    total = total+1
                    print '完成%d个,进度%d%%' %(total,int(float(total)/22964*100))
                    break
                break
            else:
                print 'url:%s访问失败,失败原因:%d' %(url,response.status_code)
                errorListUrls.append(url)
                break
        except Exception,e:
            print e.message
            if i >2 :
                print '遇到错误的url:%s，丢弃' %(url)
                abandonListUrls.append(url)
                print '目前错误url总数:%d' %(len(abandonListUrls))
                break
            i = i+1
            print 'url:%s访问超时,正在重试第%s' %(url,i)


def crawlerZanMeiShi():
    #22964
    for i in range(1,22964) :
        listUrls.append('http://www.zanmeishi.com/tab/%s.html' %(i))
    #    分割任务数组为二维，启动进程数量
    jobs = [listUrls[i:i+pNum] for i in range(0,len(listUrls),pNum)]
    for job in jobs:
        threads = []
        for url in job :
            thread = threading.Thread(target=requestUrl,args=(url,))
            threads.append(thread)
            thread.daemon = True
            thread.start()

        for t in threads:
            t.join()






if __name__ == '__main__':
    #crawlerZhaoGePu()
    try:
        t = Timer(stmt="crawlerZanMeiShi()", setup="from __main__ import crawlerZanMeiShi")
        print 'by requests: %s seconds'%t.timeit(number=1)
    except KeyboardInterrupt:
        errorHanle = open('error.txt','a')
        resultHanle = open('result.txt','a')
        abandonHanle = open('abandonHanle.txt','a')
        abandonHanle.writelines([line+'\n' for line in abandonListUrls])
        errorHanle.writelines([line+'\n' for line in errorListUrls])
        resultHanle.writelines([line+'\n' for line in resultUrls])
        errorHanle.close()
        abandonHanle.close()
        sys.exit(0)
    print 'finished!'

    #crawlerZanMeiShi()
