#coding:utf8

import urllib2,time,re,random,pprint
import sys,json,os,threading,Queue
from lxml.etree import HTML
reload(sys)
sys.setdefaultencoding('UTF-8')


class spider(threading.Thread):
    def __init__(self,inque=Queue.Queue(),outque=Queue.Queue(),prefix = None):
        threading.Thread.__init__(self)
        self.inque = inque
        self.outque = outque
        self.prefix = prefix
        self.time_record = {}
        self.savedir = 'data'
        self.count = 0
        self.url = None
        self.response = None
        self.data = None
        self.items = {}

    def run(self):
        while True:
            if self.inque.empty():
                print self.name,'the input queue is empty, task done'
                break
            else:
                tmp = self.inque.get(block=True,timeout=5)
                print '\nactive',{self.name:self.count}
                print 'for now the task',{'input':self.inque.qsize(), 'output':self.outque.qsize()}
                if tmp is not None:
                    self.count += 1
                    self.url = self.prefix + tmp[1]
                    self.txt_name = tmp[0]
                    self.dowload()
                    self.reg_parse()
                    self.itemstojson()
                    time.sleep(random.random()*4)
                else:
                    break

    def dowload(self):
        t0 = time.time()
        self.response = urllib2.urlopen(self.url).read()
        try:
            self.data = self.response.decode('gbk').encode('utf8')
        except Exception as e:
            print e.message
            self.data = self.response

        t1 = time.time()-t0
        self.time_record['download'] = t1

    @property
    def regex_items(self):
        return {
            'source': r'<div class="con_top">(.*?)</div>',
            'title': r'<h1>(.*?)</h1>',
            'content': r'<div id="content">(.*?)</div>'
            }

    def reg_parse(self):
        t0 = time.time()
        items = {}
        data = re.sub(r'\n','',self.data)
        for key,value in self.regex_items.items():
            tmp = re.search(value,data)
            if tmp:
                items[key] = tmp.group(1)
                if key == 'href':
                    items[key] = '\n'.join(re.findall(value,data))
                    with open('href_link.txt','w') as f:
                        f.write(items[key])
        for key in items.keys():
            items[key] = items[key].strip().replace('&nbsp;&nbsp;&nbsp;&nbsp;','\n\n')
            items[key] = re.sub('</?[^>]*?>','',items[key])

        if 'content' in items:
            if 'title' in items:
                dirs = self.savedir+'/'
                if not os.path.exists(dirs):
                    os.makedirs(dirs)
                with open(dirs+self.txt_name+'.txt','w') as f:
                    f.write(items['content'])
                    items['content'] = items['title']
        self.items = items
        self.time_record['parse'] = time.time() - t0

    def itemstojson(self):
        tmp = {'thread_name':self.name,'task_order':self.count}
        self.outque.put(self.time_record.update(tmp))

def extract_href(url):
    data = urllib2.urlopen(url).read()
    reg_pattern = '<a.*?href="(\d{7}\.html)">(.*?)</a>'
    nodes = re.findall(reg_pattern,data)
    urls = {i:j for j,i in nodes}
    print 'num of url',len(urls)
    return urls

if __name__ == '__main__':
    t_start = time.time()
    starturl = 'http://www.xxbiquge.com/17_17232/'
    urls = extract_href(starturl)

    inque = Queue.Queue()
    outque = Queue.Queue()

    for item in urls.items():
        inque.put(item)
    max_thread = 30
    threads = [spider(inque=inque,outque=outque,prefix=starturl) for i in range(max_thread)]
    for task in threads:
        task.start()
    for task in threads:
        task.join()
    record = []
    sizeout = outque.qsize()
    for i in range(sizeout):
        record.append(outque.get())
    with open('record_parse.json','w') as f:
        f.write(json.dumps(record))
    print 'task all done within %.2f seconds'%(time.time()-t_start)






