#!/usr/bin/env python
# -*- coding:utf-8 -*-
import pycurl
import StringIO
import math
import socket
from urlparse import urlparse
from urlparse import urlunparse
from logger import logger

class getter:
    def __init__(self):
        pass

    def get(self, url):
        curl = self.installCurl(url)
        b = StringIO.StringIO()
        curl.setopt(pycurl.WRITEFUNCTION, b.write)
        try:
            curl.perform()
        except pycurl.error:
            logger.getInstance('simple').info('crawl %s failed, timed out' % url)
            return None
        return b.getvalue()

    def installCurl(self, url):
        curl = pycurl.Curl()
        u = urlparse(url)
        host = "Host:%s" % (u.netloc)
        ip = socket.gethostbyname(u.netloc)
        requestHeader = ['Accept:', host]
        if u.port != None:
            netloc = "%s:%s" % (ip, u.port)
        else:
            netloc = ip
        requestUrl = urlunparse(['http', netloc, u.path, u.params, u.query, u.fragment])
        curl.setopt(pycurl.URL, requestUrl)
        curl.setopt(pycurl.TIMEOUT, 5)
        curl.setopt(pycurl.HTTPHEADER, requestHeader)
        curl.setopt(pycurl.USERAGENT, "some spider")
        curl.setopt(pycurl.MAXREDIRS, 5)        
        curl.setopt(pycurl.FOLLOWLOCATION, 1)
        return curl

if __name__ == "__main__":
    print getter().get('http://www.something.com')
    pass
