#!/usr/bin/python
# -*- coding: utf-8 -*-
# Description: FTP Spider
# Create at 2008-11-1 下午02:04:11
# Author: MK2[fengmk2@gmail.com]

"""A FTP Spider
"""

import os
import sys
import re
import ftplib
import urllib
import urllib2
import optparse
import time
import socket
from web.spider import Spider
import MySQLdb

urllib.socket.setdefaulttimeout(10)
DEFAULT_FTPIP_PATTERN = r'(ftp://[a-zA-Z0-9][-a-zA-Z0-9]{0,62}(?:\.[a-zA-Z0-9][-a-zA-Z0-9]{0,62})+\.?)/'
DEFAULT_FILE_EXT_PATTERN = r'^.*?\.((exe)|(dll)|(msi)|(sys)|(rar)|(zip)|(cab))$'#r'^.*?\.((exe)|(dll)|(msi)|(rar)|(zip)|(cab))$'

ftplib.FTP.debugging = 0

class FTPSpider(object):
    """A FTP Spider Clinet """
    
    LIST_PATTERN = re.compile(r"""^([\w\-]) #type
        [\w\-]+
        \s+
        \d+
        \s+
        \w+
        \s+
        \w+
        \s+
        (\d+) #size
        \s+
        (\w+\s+\d+\s+[\d\:]+) #date
        \s+
        (.*) #name
        """, 
        re.IGNORECASE | re.VERBOSE)
    
    def __init__(self, ftpip, username='anonymous', pwd='anonymous@gmail.com', 
                 start_path='/', maxsize=None, 
                 re_pattern=None, ignorecase=True, grab_dir=False):
        """docstrings
        
        description
        """
        if ftpip.startswith('ftp://'):
            ftpip = ftpip.replace('ftp://', '')
        self.ftpip = ftpip
        self.start_path = start_path
        self.re_pattern = re_pattern
        self.maxsize = maxsize
        if isinstance(re_pattern, basestring):
            flags = 0
            if ignorecase:
                flags = re.IGNORECASE
            self.re_pattern = re.compile(re_pattern, flags)
        self.username = username
        self.pwd = pwd
        self.grab_dir = grab_dir
        self.ftp = ftplib.FTP(self.ftpip, self.username, self.pwd)
        print self.ftp.pwd()
        self.error_count = 0
        self.timeout_error_count = 0
        self._last_list = ''
        self._cur_list = ''
        self._last_path = None
        self._last_path_repeat_count = 0 #防止重复访问一个路径
    
    def _dir_callback(self, line):
        m = self.LIST_PATTERN.match(line)
        if m:
            self._cur_list += line
            self._dirs.append(m.groups())
    
    def list(self, ftp, path):
        self._dirs = []
        try:
            ftp.dir(path, self._dir_callback)
        except Exception, e:
            self.error_count += 1
            if self.error_count > 10:
                raise
            if 'timed out' in str(e):
                self.timeout_error_count += 1
                if self.timeout_error_count == 2: #2次超时就退出
                    raise
            print e
        if self._cur_list == self._last_list: #避免死循环
            return []
        self._last_list = self._cur_list
        return self._dirs
    
    def urlencode(self, url):
        return urllib.quote(url, ':/=;@&?+$,#')
    
    def walk(self, path=None):
        if path is None:
            path = self.start_path
        for d, size, date, name in self.list(self.ftp, path):
            if d == 'd':
                if name.strip() and not name.startswith('.') and not name.startswith('/') \
                        and name.strip() not in ('.', '..', '/', '...', '\\', '-'):
                    if self._last_path == name:
                        self._last_path_repeat_count += 1
                    else:
                        self._last_path_repeat_count = 0
                    if self._last_path_repeat_count > 10: #防止重复目录死循环
                        self._last_path_repeat_count = 0
                        continue
                    self._last_path = name
                    nextpath = path + name + '/'
                    if self.grab_dir:
                        yield self.urlencode(nextpath), 0
                    print 'ftp://%s%s --> %s' % (self.ftpip, path, nextpath)
                    for r in self.walk(path + name + '/'):
                        yield r
            else:
                if self.maxsize is not None and int(size) > self.maxsize:
                    continue
                if self.re_pattern is not None and not self.re_pattern.match(name):
                    continue
                yield self.urlencode(path + name), int(size)

"""
CREATE TABLE  `ftp_address` (
  `id` int(11) NOT NULL auto_increment,
  `address` varchar(100) NOT NULL,
  PRIMARY KEY  (`id`),
  UNIQUE KEY `address` (`address`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8

CREATE TABLE  `ftp_address_new` (
  `id` int(11) NOT NULL auto_increment,
  `address` varchar(100) NOT NULL,
  PRIMARY KEY  (`id`),
  UNIQUE KEY `address` (`address`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8

CREATE TABLE  `downurl` (
  `id` int(11) NOT NULL auto_increment,
  `url` varchar(255) NOT NULL,
  `size` bigint(20),
  PRIMARY KEY  (`id`),
  UNIQUE KEY `url` (`url`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8

"""

__g_cursor = None

def get_cursor():
    global __g_cursor
    if __g_cursor is None:
#        conn = MySQLdb.connect('127.0.0.1', 'root', '112358', 'address')
        conn = MySQLdb.connect('192.168.48.249', 'root', 'sonic', 'peurl')
        __g_cursor = conn.cursor()
    return __g_cursor

def save_ftpip(ftpip):
    if not ftpip.startswith('ftp://'):
        ftpip = 'ftp://' + ftpip
    cursor = get_cursor()
    sql = "select * from ftp_address where address=%s"
    cursor.execute(sql, ftpip)
    if cursor.fetchone():
        return False
    cursor.execute("""insert into ftp_address(address) values(%s)""", ftpip)
    return True

def save_downurl_to_db(url, size):
    cursor = get_cursor()
#    sql = "select * from downurl where url=%s"
#    cursor.execute(sql, url)
#    if cursor.fetchone():
#        return False
    try:
        cursor.execute("""insert into downurl(url, size) values(%s, %s)""", (url, size))
    except Exception, e:
        print e
        return False
    return True
    

def check_ftpip_is_repeat(ftpip, savepath=None):
    ftpip = ftpip.strip()
    if ftpip.startswith('ftp://'):
        ip = ftpip.replace('ftp://', '')
    else:
        ip = ftpip
    try:
        real_ip = socket.gethostbyname(ip)
    except Exception,e: #防止获取ip出异常
        print e
        real_ip = ip
    print '!!!%s -> %s!!!' % (ip, real_ip)
    #check if in database
    if not save_ftpip(ftpip):
        print '!!!%s -> %s is exist!!!' % (ip, real_ip)
        save_ftpip('ftp://%s' % real_ip)
        return True
    # 检测是否域名，如果是要检测域名对应的ip是否在库
    if real_ip != ip:
        if not save_ftpip('ftp://%s' % real_ip):
            print '!!!%s -> %s is exist!!!' % (ip, real_ip)
            return True
    print '!!!%s -> %s is new!!!' % (ip, real_ip)
#    #check local dir
#    if os.path.exists(os.path.join(savepath, ip)):
#        return True
    return False

__g_ftp_cache = None

def save_result(filename, downurl):
    if not downurl:
        print 'nothing to save'
        return
    f = open(filename, 'ab')
    f.write(downurl + '\r\n')
    f.close()
        
def upload_to_ftp(filename):
    global __g_ftp_cache
    #要保证一定能上传到ftp
    serverpath = '/url/%s_%s.lst' % (os.path.basename(filename), time.time())
    while True:
        fp = open(filename, 'rb')
        if __g_ftp_cache is None:
            __g_ftp_cache = ftplib.FTP('192.168.49.74', 'pe_terminator', 'pe_terminator123')
        try:
            __g_ftp_cache.storbinary("STOR " + serverpath, fp, 10*1024**2)
            break #成功则退出
        except Exception, e:
            print e
            try:
                __g_ftp_cache.quit()
            except:
                pass
            __g_ftp_cache = None
        finally:
            fp.close()
    os.rename(filename, filename + '.uploaded')
    print 'save %s to ftp %s' % (filename, serverpath)

def formatsize(size):
    unit = 'Bytes'
    if size >= 1024:
        size = size / 1024
        unit = 'KB'
    if size >= 1024:
        size = size / 1024
        unit = 'MB'
    return '%s %s' % (size, unit)

def grab(ftpip, path='/', maxsize=100*1024**2, re_pattern=DEFAULT_FILE_EXT_PATTERN, 
         savepath='d:/ftp_url', upload=True, rs_count=100):
    if ftpip.startswith('ftp://'):
        ftpip = ftpip.replace('ftp://', '')
    if check_ftpip_is_repeat(ftpip, savepath):
        print '%s had grab, no need to grab again.' % ftpip
        return
    if upload:
        savepath = os.path.join(savepath, ftpip)
    if not os.path.exists(savepath):
        os.makedirs(savepath)
    spider = FTPSpider(ftpip, start_path=path, maxsize=maxsize, re_pattern=re_pattern)
    count = 0
    filepaths = []
    filecount = 1
    try:
        for filepath, size in spider.walk():
            downurl = 'ftp://%s%s' % (ftpip, filepath)
            if not save_downurl_to_db(downurl, size): #已存在
                continue
            filepaths.append(downurl)
            count += 1
            result_path = os.path.join(savepath, '%s_urls_%d.lst' % (ftpip, filecount))
            save_result(result_path, downurl)
            if count == rs_count:
                count = 0
                filecount += 1
                filepaths = []
                if upload:
                    upload_to_ftp(result_path)
            print 'grab -->', filepath, formatsize(size)
        if filepaths: #上传剩余数据
            result_path = os.path.join(savepath, '%s_urls_%d.lst' % (ftpip, filecount))
            filepaths = []
            if upload:
                upload_to_ftp(result_path)
    except Exception, e:
        print 'grab %s error: %s' % (ftpip, e)
        if filepaths:
            result_path = os.path.join(savepath, '%s_urls_%d.lst' % (ftpip, filecount))
            if os.path.exists(path): #异常退出，结果未保存
                os.remove(result_path) #删除原来的记录，避免重复
            for downurl in filepaths:
                save_result(result_path, downurl)
            filepaths = []
            if upload:
                upload_to_ftp(result_path)

def save_grab_url(url, savepath):
    """保存当前抓取的web url"""
    if not os.path.exists(savepath):
        os.makedirs(savepath) 
    f = open(os.path.join(savepath, 'last_grab_url.url'), 'wb')
    try:
        f.write(url)
    finally:
        f.close()
        
def get_last_grab_url(savepath):
    """获取最后抓起的url，作为起点"""
    if not os.path.exists(os.path.join(savepath, 'last_grab_url.url')):
        return None
    f = open(os.path.join(savepath, 'last_grab_url.url'), 'rb')
    try:
        return f.read()
    finally:
        f.close()

def start_trip(templates, start_url, savepath='d:/ftp_url', upload=True, rs_count=100):
    """开始抓取旅程"""
    url = get_last_grab_url(savepath)
    if url is None:
        url = start_url
    ftpweb_spider = Spider(url, templates)
    for level, url, parenturl, response, \
                    content, downurls, softinfo in ftpweb_spider.walk():
        save_grab_url(url, savepath)
        for ftpurl in downurls:
            print ftpurl
            try:
                grab(ftpurl, savepath=savepath, upload=upload, rs_count=rs_count)
            except Exception, e:
                print e
                
def start_trip_ext(savepath='d:/ftp_url', upload=True, rs_count=100):
    """开始抓取旅程，自动获取ftpip"""
    while True:
        ftpip = get_new_ftpip()
        if ftpip:
            print ftpip
            try:
                grab(ftpip, savepath=savepath, upload=upload, rs_count=rs_count)
            except Exception, e:
                print e
        else:
            print 'no new ftpip, waiting 5 seconds to restart...'
            time.sleep(5)
                
def get_new_ftpip():
    try:
        ftpip = urllib2.build_opener().open('http://192.168.49.16:8004/getftpip/').read()
        if not ftpip.strip():
            return None
        return ftpip
    except Exception, e:
        print e
        return None
    
#def upload_local_results(savepath):
#    """上传本地抓取结果"""
#    for path in [os.path.join(savepath, name) for name in os.listdir(savepath)]:
#        if os.path.isdir(path):
#            if len(os.listdir(path)) == 0: #空目录
#                continue
#            if not save_ftpip(os.path.basename(path)): #确保ftpip不会被重新抓取
#                print os.path.basename(path), ' not need to save'
#                continue
#            print os.path.basename(path), '--> db'
#            for filename in [os.path.join(path, name) for name in os.listdir(path)]:
#                f = open(filename, 'rb')
#                rs = []
#                try:
#                    for r in f:
#                        downurl = r.strip()
#                        if save_downurl_to_db(downurl, 0): #保证上传的url都不是重复的
#                            rs.append(downurl)
#                finally:
#                    f.close()
#                print '%d urls --> db' % len(rs)
#                save_results(filename, rs, True) #上传去重的url

def remove_laji_child(ftpspider, path):
    try:
        for d, size, date, name in ftpspider.list(ftpspider.ftp, path):
            if d == 'd' and name.strip() not in ('.', '..', '/', '...', '\\', '-'):
                nextpath = path + name + '/'
                print 'ftp://%s%s --> %s' % (ftpspider.ftpip, path, nextpath)
                childs = ftpspider.list(ftpspider.ftp, nextpath)
                if childs:
                    remove_laji_child(ftpspider, nextpath)
                else:
                    ftpspider.ftp.rmd(nextpath)
                    print 'remove dir %s' % nextpath
            else:
                if int(size) > 100 * 1024 ** 2: #大于100MB
                    ftpspider.ftp.delete(path + name)
                    print 'delete file %s' % (path + name), formatsize(int(size))
                    remove_laji_child(ftpspider, path)
                elif not ftpspider.re_pattern.match(name):
                    ftpspider.ftp.delete(path + name)
                    print 'delete file %s' % (path + name), formatsize(int(size))
#                        remove_laji_child(ftpspider, path)
                else:
                    print path + name, formatsize(int(size))
    except Exception, e:
        print e, path
            
def remove_laji():
#    spider = FTPSpider('192.168.49.16', username='anonymous', pwd='anonymous',
#                       grab_dir=True, re_pattern=DEFAULT_FILE_EXT_PATTERN)
#    while True:
    try:
        spider = FTPSpider('192.168.49.74', username='pe_terminator', pwd='pe_terminator123',
                           grab_dir=True, re_pattern=r'^.*?\.((exe)|(dll)|(msi)|(sys)|(rar)|(zip)|(cab)|(bin))')
        remove_laji_child(spider, '/files/')
        spider.ftp.quit()
    except Exception, e:
        print e
        

def locate32(paths=[r'c:\\', r'd:\\', r'e:\\', r'f:\\'], re_pattern=r'^.*\.exe$'):
    pattern = re.compile(re_pattern, re.IGNORECASE)
    for path in paths:
        for root, subdirs, files in os.walk(path):
            for file in files:
                if pattern.match(file):
                    print os.path.join(root, file)

def main():
    usage = """Usage: %prog type
    
type: 0: only grab, 1: just for luo.
"""
    parser = optparse.OptionParser(usage)
    (options, args) = parser.parse_args()
    if len(args) == 0:
        parser.print_help()
        sys.exit(1)
    if len(args) != 1:
        parser.error("need to specify type")
    t = int(args[0])
    if t == 0: #只爬不下载
        print 'only grab, no down.'
        start_trip_ext()
    elif t == 1: #肥罗专用
        print 'luo ' * 20
        start_trip_ext(upload=False, rs_count=1000)
    elif t == 2:
        print 'down ' * 20
        start_trip_ext(savepath='d:/ftp_url_down', upload=False, rs_count=1000)

if __name__ == '__main__':
#    remove_laji()
    main()      
#    spider = FTPSpider('ftp://kingsoft.com', start_path='/pub/OS/incoming/',
#                        maxsize=100*1024**2, re_pattern=DEFAULT_FILE_EXT_PATTERN)
#    for filepath, size in spider.walk():
#        downurl = 'ftp://%s%s' % (spider.ftpip, filepath)
#        print downurl
#    locate32()
#    upload_local_results(r'D:\ftpspider')
#    pku_templates = [{
#            'same_level': ur"""
#            <a[^<>]*?href=['"](search[^<>]*?word=\.exe[^<>]*?)['"]>[^<>]*?</a>""",
#            'downurl': DEFAULT_FTPIP_PATTERN,
#        }, {}]
#    pku_url = 'http://bingle.pku.edu.cn/cgi-bin/search?BeginWith=740&FType=255&fltByDldable=y&sort=size&submit.x=19&submit.y=6&word=.exe'
#    pku_savepath = 'd:/ftp_url/ftpspider_mmnt'
#    
#    mmnt_templates = [{
#            'same_level': ur"""
#            <A[^<>]*?HREF="(/int/get[^<>]*?st=exe[^<>]*?)"[^<>]*?>[^<>]*?</A>""",
#            'downurl': DEFAULT_FTPIP_PATTERN,
#        }, {}]
#    mmnt_url = 'http://www.mmnt.ru/int/get?in=f&st=exe&ot=440941&sk=9922'
#    mmnt_savepath = 'd:/ftp_url/ftpspider_mmnt'
#    start_trip(mmnt_templates,
#               mmnt_url,
#               savepath=mmnt_savepath,
#               upload=True)

#    fdigg_templates = [{
#            'same_level': ur"""
#            <a[^<>]*?href="(\?word=.exe[^<>]*?)"[^<>]*?>[\s\S]*?</a>
#            """,
#            'same_level_callback': """
#def same_level_callback(cur_url, catcth_rs):
#    return u'http://www.fdigg.net/list.aspx' + catcth_rs
#""",
#            'downurl': DEFAULT_FTPIP_PATTERN,
#        }, {}]
#    fdigg_url = 'http://www.fdigg.net/list.aspx?word=.exe&level=6&page=4'
#    fdigg_savepath = 'd:/ftp_url/ftpspider_fdigg'
#    start_trip(fdigg_templates,
#               fdigg_url,
#               savepath=fdigg_savepath,
#               upload=False)
    
#    metaftp_templates = [{
#            'same_level': ur"""
#            <a[^<>]*?href="(http://www.metaftp.com/ftpserverlist/\d*/)">[^<>]*?</a>
#            """,
#            'downurl': ur"""
#            <td[^<>]*?><font[^<>]*?size="2">([a-zA-Z0-9][-a-zA-Z0-9]{0,62}(?:\.[a-zA-Z0-9][-a-zA-Z0-9]{0,62})+\.?)</font>
#            """,
#            'downurl_callback': """
#def downurl_callback(cur_url, rs):
#    return u'ftp://' + rs
#""",
#        }, {}]
#    metaftp_url = 'http://www.metaftp.com/ftpserverlist/4/'
#    metaftp_savepath = 'd:/ftp_url/ftpspider_metaftp'
#    start_trip(metaftp_templates,
#               metaftp_url,
#               savepath=metaftp_savepath,
#               upload=True)
    
#    globalfilesearch_templates = [{
#            'same_level': ur"""
#            <a[^<>]*?href="(\./index\.aspx\?q=\.exe[^<>]*?)">[^<>]*?</a>
#            """,
#            'downurl': DEFAULT_FTPIP_PATTERN,
#        }, {}]
#    globalfilesearch_url = 'http://globalfilesearch.com/index.aspx?q=.exe&t=Files&s=all&e=&a=0&f=20&m=ALL&c=20'
#    globalfilesearch_savepath = 'd:/ftp_url/ftpspider_globalfilesearch'
#    start_trip(globalfilesearch_templates,
#               globalfilesearch_url,
#               savepath=globalfilesearch_savepath,
#               upload=True)
#    # 肥罗专用
#    filewatcher_templates = [{
#            'same_level': ur"""
#            <a[^<>]*?href="(http://www\.filewatcher\.com/[^<>]*?q=\.exe)">[^<>]*?</a>
#            """,
#            'downurl': DEFAULT_FTPIP_PATTERN,
#        }, {}]
#    filewatcher_url = 'http://www.filewatcher.com/_/?p=2&pt=3&q=.exe'
#    filewatcher_savepath = 'd:/ftp_url/ftpspider_filewatcher'
#    start_trip(filewatcher_templates,
#               filewatcher_url,
#               savepath=filewatcher_savepath,
#               upload=False,
#               rs_count=1000)
    
#    filewatcher_templates = [{
#            'same_level': ur"""
#            <a[^<>]*?href="(http://www\.filewatcher\.com/[^<>]*?q=\.exe)">[^<>]*?</a>
#            """,
#            'downurl': DEFAULT_FTPIP_PATTERN,
#        }, {}]
#    filewatcher_url = 'http://www.filewatcher.com/_/?p=2&pt=3&q=.exe'
#    filewatcher_savepath = 'd:/ftp_url/ftpspider_filewatcher'
#    start_trip(filewatcher_templates,
#               filewatcher_url,
#               savepath=filewatcher_savepath,
#               upload=True,
#               rs_count=100)
    
#    sf_templates = [{
#            'same_level': ur"""
#            <a[^<>]*?href="(search\.php\?String=\.exe[^<>]*?)">[^<>]*?</a>
#            """,
#            'downurl': DEFAULT_FTPIP_PATTERN,
#        }, {}]
#    sf_url = 'http://sf.hit.edu.cn/search.php?String=.exe&Site=&UseVip=&Type=&Dl=&SortType=&Page=2'
#    sf_savepath = 'd:/ftp_url/ftpspider_sf'
#    start_trip(sf_templates,
#               sf_url,
#               savepath=sf_savepath,
#               upload=True)