#coding=utf-8
import urllib,redis,ConfigParser,re,os
import threading
import Queue
import inspect


#公用
c=inspect.currentframe()
namefilepath = os.path.dirname(c.f_code.co_filename)
#队列
queue = Queue.Queue()
#配置文件
config = ConfigParser.ConfigParser()
config.read('./main.ini')

cache = redis.StrictRedis(host=config.get('redis','host'),port=config.getint('redis','port'),db=config.getint('redis','db')) #设置缓存

class ThreadStop(threading.Thread):
    _while = True
    TYPE = ['HTTP','SOCKS4','SOCKS5']
    def __init__(self, group=None, target=None, name=None,
                 args=(), kwargs=None, verbose=None):
        threading.Thread.__init__(self,group,target,name,args,kwargs,verbose)
    def stop(self):
        self._while = False

class getProxyIP(ThreadStop):
    def __init__(self ,queue,cache):
        ThreadStop.__init__(self)
        self.queue = queue
        self.cache = cache
        #   从网页抓去代理 ip ，并整理格式
    def getProxyHtml(self,URL):
    #        抓去代理 ip页面的代码
        page = urllib.urlopen(URL)
        html = page.read()
        page.close()
        #print html
        return html

    def ipPortRe(self,URL):
    #       从页面代码中取出代理 ip和端口
        html = self.getProxyHtml(URL)
        #ip_re = re.compile(r'(((2[0-4]\d|25[0-5]|[01]?\d\d?)\.){3}(2[0-4]\d|25[0-5]|[01]?\d\d?))')
        ip_re = re.compile(r'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\:\d{1,5})(\@[HTTPSOCK45]{1,5}){0,1}')
        ip_port = re.findall(ip_re,html)
        return ip_port
    def run(self):
        while self._while:
            url = self.queue.get()
            #fp = open('test.txt','a+') #测试用
            for i in self.ipPortRe(url): #拿好的代理准备放放代理验证程序中  可能需要重新验证
                ip = None
                #把不规则的组合起来
                if i[1] =='':
                    ip = '%s@HTTP'%i[0]
                else:
                    ip = ''.join(i)
                #把组合起来还有是错误的重新在刷一次
                if ip.count('.') ==3 and ip.count('@') ==1 and ip.count(':'):
                    #在次验证
                    if ip.split('@')[1] in self.TYPE:
                        #fp.writelines(str(ip) +'\n')
                        self.cache.lpush('proxy',ip)
            #fp.close()
            self.queue.task_done()

class putProxy(ThreadStop):
    def __init__(self,queue):
        ThreadStop.__init__(self)
        self.queue = queue
    def geturl(self):
        lines = []
        for i in open('./proxy.txt'):
            lines.append( i )
        return lines
    def run(self):
        lines = self.geturl()
        for i in lines:
            if len(i) > 0:
                self.queue.put(i)
class Spider_job():
    spider_thread_list = []
    t = None
    def __init__(self):
        cache.set('proxy_spider_thread_count',config.getint('thread','proxy_spider_thread_count'))
        cache.set('proxy_spider_thread_wait_time',config.getint('thread','proxy_spider_thread_wait_time'))
        cache.set('spider_thread',config.getint('thread','spider_thread'))
    def spider_start(self):
        '''
        爬虫的线程设置
        '''
        if len(self.spider_thread_list) == 0:
            print 'spider thread is startting...'
            for i in range(int(cache.get('proxy_spider_thread_count'))):
                t = getProxyIP(queue,cache)
                t.setName('spider_%d'%i)
                t.setDaemon(True)
                t.start()
                self.spider_thread_list.append(t)
    def spider_stop(self):
        '''
        控制爬虫停止
        '''
        for i in self.spider_thread_list:
            i.stop()
        self.spider_thread_list = [] #清空线程列表
    def spider_thread_timer(self):
        '''
            不能通过网页更改线程数 bug在queue.get那里被阻止了
        '''
        spider_thread = cache.get('spider_thread')
        if spider_thread == '1':
            self.spider_start()
        elif spider_thread == '0':
            self.spider_stop()
        threading.Timer(1.0,self.spider_thread_timer).start()

    def job_start(self):
        print 'spider is doing...'
        self.t = None
        self.t = putProxy(queue)
        self.t.setName('putProxy_name')
        #self.t.setDaemon(True)
        self.t.start()
        threading.Timer(float(cache.get('proxy_spider_thread_wait_time')),self.job_start).start()

    def run(self):
        threading.Timer(1,self.job_start).start()
        t = threading.Timer(3,self.spider_thread_timer).start()
        queue.join()
if __name__ == '__main__':
    #proxy_spider_thread_wait_time
    #print config.getint('thread','proxy_spider_thread_count')
    t = Spider_job()
    t.run()



