#!/usr/bin/python
#coding=utf-8
import sys
import cPickle as pickle
import gevent
from gevent import monkey
import urllib2
import requests
from pyquery import PyQuery as pq
import multiprocessing
import time
import logging
logging.basicConfig(level=logging.INFO,
                format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
                datefmt='%a, %d %b %Y %H:%M:%S',
                filename='myapp.log',
                filemode='w')
monkey.patch_all()
response = requests.get('http://www.chinachristianbooks.org/Home/ChildrenList.aspx?CategoryId=837feda8-6a64-4b0c-b815-874913a7f7b7&SubCategoryId=00000000-0000-0000-0000-000000000000&ContentId=62e51fc8-8b69-48b8-af6c-1a15c449e8b8')
response.encoding = 'utf-8'
doc = pq(response.text)
urls = [];
titles= [];
for e in doc('td a') :
    urls.append('http://www.chinachristianbooks.org/Home/'+pq(e).attr('href'))
    titles.append(pq(e).find('.Menu_Link').text().encode('utf-8'))
result = {}
finalUrls = []

dict = pickle.load(open('result.txt','r'))

f=open('data.txt','a')
for url in urls[1:] :
    print type(dict[url])
    f.write(dict[url].encode('utf-8')+'\n')
f.close()
sys.exit(0)



def downloadImageFile(imgUrl):
    local_filename = imgUrl.split('/')[-1]
    print "Download Image File=", local_filename
    r = requests.get(imgUrl, stream=True) # here we need to set stream = True parameter
    with open("/home/pandy/"+local_filename, 'wb') as f:
        for chunk in r.iter_content(chunk_size=1024):
            if chunk: # filter out keep-alive new chunks
                f.write(chunk)
                f.flush()
        f.close()
    return local_filename
def worker(url, use_urllib2=False):
    if use_urllib2:
        content = urllib2.urlopen(url).read().lower()
        print content[:40],"\n"
    else:
        try:
            res = requests.get(url,timeout=10)
            res.encoding = 'utf-8'
            threaddoc = pq(res.text)
            #logging.info(threaddoc('#labelHtmlContent').html())
            result[url] = threaddoc('#labelHtmlContent').html()
        except Exception,e:
            logging.info(url)

def by_requests():
    jobs = [gevent.spawn(worker, url, False) for url in urls[1:]]
    gevent.joinall(jobs)
def by_urllib2():
    jobs = [gevent.spawn(worker, url, True) for url in urls]
    gevent.joinall(jobs)

if __name__=='__main__':
    from timeit import Timer
    t = Timer(stmt="by_requests()", setup="from __main__ import by_requests")
    print 'by requests: %s seconds'%t.timeit(number=1)
    f = open('result.txt','wb')
    pickle.dump(result,f,True)
    f.close()
    # t = Timer(stmt="by_urllib2()", setup="from __main__ import by_urllib2")
    # print 'by urllib2: %s seconds'%t.timeit(number=3)
    sys.exit(0)