#coding:utf-8
import urllib2
from crawlerSettings import *

user_agent='Mozilla/5.0 (Windows NT 5.1; rv:2.0) Gecko/20100101 Firefox/4.0'
http_headers={'User-agent':user_agent}

def initHTTPRequest():
    print 'init http request'
    handlers=[]
    if with_cookie:
        import cookielib
        cj=cookielib.CookieJar()
        handlers.append(urllib2.HTTPCookieProcessor(cj))
    if with_dns_cache:
        from dnscache import DNSCacheHTTPHandler,DNSCacheHTTPSHandler,DNSCacheHTTPConnection,DNSCacheHTTPSConnection
        from crawlerObject import DictCache
        cache=DictCache(size=dns_cache_size)
        DNSCacheHTTPConnection.setdnscache(cache)
        DNSCacheHTTPSConnection.setdnscache(cache)
        handlers.append(DNSCacheHTTPHandler)
        handlers.append(DNSCacheHTTPSHandler)
    
    opener=urllib2.build_opener(*handlers)
    opener.addheaders=[('User-agent',user_agent)]
    urllib2.install_opener(opener)

def getpage(url,data=None,headers=None):
    if headers==None:headers=http_headers
    req=urllib2.Request(url,data=data,headers=headers)
    try:
        response=urllib2.urlopen(req)
    except urllib2.URLError,e:
        raise e
    else:
        return response
