#!/usr/bin/python
#coding=utf-8
import sys
import cPickle as pickle
import gevent
from gevent import monkey
import urllib2
import requests
from pyquery import PyQuery as pq
import multiprocessing
import time
import logging

import gevent.monkey
gevent.monkey.patch_socket()
from gevent.pool import Pool
import requests

result = []

def check_urls(urls):

    def fetch(url):
        while(True) :
            try :
                response = requests.request('GET', url, timeout=10.0)
                response.encoding = 'utf-8'
                doc = pq(response.text)
                zurl = doc(".notice").next().find('a').attr('href')
                if zurl :
                    file.write(zurl+'\n')
                count = count-1
                print('还剩下%s个' %(count))
                break;
            except Exception ,e:
                continue;


    pool = Pool(1024)
    for url in urls:
        pool.spawn(fetch, url)
    pool.join()

urls = []
file = open('zUrls.txt','a')
for i in range(1,22964) :
    urls.append('http://www.zanmeishi.com/tab/%s.html' %(i))
count = len(urls)
check_urls(urls)