#encoding:utf8
import urllib
import os
import json
import time

def getLogger(uri,maxSize=1000000):
    import logging
    from logging.handlers import RotatingFileHandler
    logger=logging.getLogger('A')
    logger.setLevel(logging.DEBUG)
    logfile=RotatingFileHandler(uri,maxBytes=maxSize,backupCount=1000)
    logger.addHandler(logfile)
    return logger

logger=getLogger('mzt.log')

def Schedule(a,b,c):
    '''
    a:已经下载的数据块,b:数据块的大小,c:远程文件的大小
    '''
    per =100.0*a*b/c
    if per>100:
        per=100
    logger.debug(per)
def oneDownload(surl,turl,Schedule):
    try:
        urllib.urlretrieve(surl,turl,Schedule)
        logger.debug('%s,succeed'%surl)
        return (0,)
    except IOError,e:
        logger.debug('%s,failed'%surl)
        return (1,)
def download(surl,turl):
    result=1
    while result==1:
        result =oneDownload(surl,turl,Schedule)[0]
        time.sleep(2)
def consume():
    import redis
    import re
    r=redis.Redis(host='localhost',port=6379,db=1)
    while True:
        rec=r.blpop('list:meizitu:urlInfo',timeout=3)
        if rec ==None:
            logger.debug('all finished')
            break
        
        urlDict=json.loads(rec[1])
        imgUrl=urlDict['imgUrl'][0]
        title=urlDict['title'][0]

        #处理特殊字符
        specialChar='[|]'
        title =re.sub(specialChar,' ',title)
        #创建保存目录
        dictionary='mzt/%s'%title
        try:
            if not os.path.exists(dictionary):
                os.makedirs(dictionary)

            #编号同一目录下的图片
            r.setnx(title,0)
            index =r.get(title)
            r.incr(title)
            #下载图片
            turl='%s/%s.jpg'%(dictionary,index,)
            download(imgUrl,turl)
        except:
            logger.debug('error',exc_info=True)
        
def product():
    fileUrl='D:/Python/desktop/meizitu/meizitu.json'
    with open(fileUrl,'r') as urlfile:
        import redis
        r=redis.Redis(host='localhost',port=6379,db=1)
        rsetName='set:meizitu:urlInfo'
        rlistName='list:meizitu:urlInfo'
        for rec in urlfile:
            duplicated =r.sadd(rsetName,rec)
            if duplicated==1:#元素不存在
                listLen=r.rpush(rlistName,rec)
                #logger.debug('%s rpushed in %s'%(rec,rlistName))

def togeter():
    import threading
    producter =threading.Thread(target=product,args=())
    producter.setDaemon(True)
    producter.start()
    thrList=[]
    size=50
    for i in range(size):
        thr=threading.Thread(target=consume,args=())
        thr.setDaemon(True)
        thr.start()
        thrList.append(thr)
    for i in range(size):
        thrList[i].join()
    
def test():
    import redis
    r=redis.Redis(host='localhost',port=6379,db=0)
    print r.blpop('testlist',timeout=3)==None
if __name__=='__main__':
    togeter()