import re

import eventlet.semaphore as semaphore
import eventlet

import crawler


class Spider(crawler.Crawler):
    def __init__(self, url, max_dep=1, timeout=10, charset='utf-8'):
        crawler.Crawler.__init__(self, url, max_dep, timeout, charset)
        self.s = semaphore.Semaphore()
        self.fp = open('test.txt', 'w')

    def work(self):
        r = re.compile(r'img.*?src="(.*?\.jpg)"')

        while True:
            try:
                node = self.work_queue.get(timeout=self.timeout)
            except:
                break

            result = re.findall(r, node.html)
            self.work_queue.task_done()
            for i in result:
                self.s.acquire()
                self.fp.write(i+'\n')
                self.s.release()

    def run(self):
        pool = eventlet.GreenPool()
        pool.spawn_n(self.fetch)
        pool.spawn_n(self.fetch)
        pool.spawn_n(self.fetch)
        pool.spawn_n(self.fetch)
        pool.spawn_n(self.fetch)
        pool.spawn_n(self.parse)
        pool.spawn_n(self.parse)
        pool.spawn_n(self.work)
        pool.spawn_n(self.work)

        pool.waitall()
        close(self.fp)


if __name__ == '__main__':
    url = 'http://www.mzitu.com'
    timeout = 10
    max_dep = 3
    headers = [("User-Agent", "Mozilla/5.0 (Linux; Android 4.4.2; SAMSUNG-SM-T537A Build/KOT49H) AppleWebKit/537.36 (KHTML like Gecko) Chrome/35.0.1916.141 Safari/537.36")]

    s = Spider(url, max_dep, timeout)
    s.build_opener(headers=headers)
    s.run()
