#!/usr/bin/python
# -*- coding: utf-8 -*-
# Description: wget
# Create at 2008-11-4 下午07:13:03
# Author: MK2[fengmk2@gmail.com]
"""docstrings
"""

import os
import re
import ftplib

def wget(url=None, savedir=None, savepath=None, tries=10, timeout=60, wait=20, 
         resume=False, quiet=True, referer=None, options=None,
         debug=False, 
         command=None):
    """
    url:目标url
    savedir: 保存目录
    savepath: 保存路径, 保存路径和目录不能两个都为None
    tries: 重试次数， 默认10次
    timeout: 超时时间， 默认60秒
    wait: 等到时间， 默认20秒
    resume: 是否继续上一次的下载， 默认False
    quiet: 是否安静模式， 默认True
    command: 自定义wget参数, 如 -P c:/test http://www.test.com/a.rar
    
    return:
    0: success
    1: fail
    2: command invalid
    """
    curdir = os.path.dirname(__file__)
    cmd = os.path.join(curdir, u'wget.exe')
    if command is not None:
        return os.system(u'%s %s' % (cmd, command))
    
    assert url is not None
    assert savedir is not None or savepath is not None
    
    if savepath is None:
        cmd += u' -P %s -t %d -T %d -w %d ' % (savedir, tries, timeout, wait)
    else:
        cmd += u' -O %s -t %d -T %d -w %d ' % (savepath, tries, timeout, wait)
    if resume:
        cmd += u'-c '
    if quiet:
        cmd += u'-q '
    if debug:
        cmd += u'-d '
    if referer:
        cmd += u'--referer="%s" ' % referer
    if options:
        cmd += u'%s ' % options
    cmd += '"%s"' % url
    print cmd
    r = os.system(cmd)
    # todo: 创建时间 
    return r

__g_ftp_cache = None

def upload_to_ftp(localfile, ftpdir):
    global __g_ftp_cache
    #要保证一定能上传到ftp
    serverpath = ftpdir + os.path.basename(localfile)
    print 'saving %s to ftp %s' % (localfile, serverpath)
    while True:
        fp = open(localfile, 'rb')
        if __g_ftp_cache is None:
            __g_ftp_cache = ftplib.FTP('192.168.49.74', 'pe_terminator', 'pe_terminator123')
        try:
            __g_ftp_cache.storbinary("STOR " + serverpath, fp, 2*1024**2)
            break #成功则退出
        except ftplib.error_perm, e:
            if e.args[0][:3] in ("553", "550"): #文件夹不存在
                __g_ftp_cache.mkd(ftpdir)
        except Exception, e:
            print e
            try:
                __g_ftp_cache.quit()
            except:
                pass
            __g_ftp_cache = None
        finally:
            fp.close()
    print 'save %s to ftp %s success' % (localfile, serverpath)

FTP_RE = re.compile(r'[\w]{1,2}tp://([a-zA-Z0-9][-a-zA-Z0-9]{0,62}(?:\.[a-zA-Z0-9][-a-zA-Z0-9]{0,62})+\.?)/', re.I)

def download_urls(urls, savedir, errorfile):
    for url in urls:
        url = url.strip()
        savedir_t = savedir + FTP_RE.match(url).groups()[0]
        ftp_savedir = '/files/' + FTP_RE.match(url).groups()[0] + '/'
        if not os.path.exists(savedir_t):
            os.makedirs(savedir_t)
        saveto = savedir_t + '/' + os.path.basename(url)
        repeat = 0
        while True:
            if os.path.exists(saveto):
                repeat += 1
                saveto = savedir_t + '/' + os.path.basename(url) \
                    + '.%s' % repeat + os.path.splitext(url)[1]
            else:
                break
        print 'save to %s' % saveto
        try:
            r = wget(url, savepath=saveto, resume=True, tries=4, quiet=False, timeout=60)
        except Exception, e:
            print e
            r = -1
        if r != 0:
            print 'down %s error.' % url
            errorfile.write(url + '\r\n')
            errorfile.flush()
        else:
            print 'down %s success.' % url
            upload_to_ftp(saveto, ftp_savedir)
        
if __name__ == '__main__':
    import sys, shutil, time
    t = sys.argv[1]
    urls_dir = 'd:/ftp_url_down/urls_%s/' % t
    f_urls_dir = 'd:/ftp_url_down/f_urls_%s/' % t
    if not os.path.exists(urls_dir):
        os.makedirs(urls_dir)
    if not os.path.exists(f_urls_dir):
        os.makedirs(f_urls_dir)
    errorfile = open('d:/ftp_url_down/urls_%s/error.txt' % t, 'ab')
    save_dir = 'f:/down_%s/' % t
    while True:
        for urlfile in [os.path.join(urls_dir, name) for name in os.listdir(urls_dir)]:
            if not os.path.exists(urlfile):
                continue
            print urlfile
            if not urlfile.endswith('.lst'):
                continue
            runfilename = urlfile+'.doing'
            os.rename(urlfile, runfilename)
            time.sleep(1)
            f = open(runfilename, 'rb')
            try:
                download_urls(f.readlines(), save_dir, errorfile)
                f.close()
                os.rename(runfilename, urlfile+'.finish')
                time.sleep(1)
                shutil.move(urlfile+'.finish', f_urls_dir)
            except Exception, e:
                f.close()
                raise
        time.sleep(10)
    errorfile.close()