#!/usr/bin/env python
#coding=utf-8

__author__ = "xlty.0512@gmail.com"

import urllib, urllib2
import cookielib
import re, time
import gzip
from StringIO import StringIO

def get_content(res):
    """
    gzip 解码
    """
    ce = res.headers.get("Content-Encoding")
    if ce and 'zip' in ce:
        return gzip.GzipFile(fileobj=StringIO(res.read())).read() 
    return res.read()

class FetchedData():
    def __init__(self, url, code, msg, headers, content):
        self.url = url
        self.code = code
        self.msg = msg
        self.content = content
        self.headers = headers

    def __str__(self):
        return "url: %s, code: %s, content-len: %s"%(self.url, self.code, len(self.content)) 

    @classmethod
    def create(clz, response):
        c = None
        if response.code == 200:
            c = get_content(response)
        return FetchedData(response.url, response.code, response.msg, response.headers, c)

class Fetcher():
    headers = {}
    headers['User-Agent'] = """Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 GTB6"""
    headers['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
    headers['Accept-Encoding'] = 'gzip,deflate'
    headers['Accept-Language'] = "zh,en-us;q=0.7,en;q=0.3"
    headers['Accept-Charset'] = "ISO-8859-1,utf-8;q=0.7,*;q=0.7"
    headers['Connection'] = "keep-alive"
    headers['Keep-Alive'] = "115"
    headers['Cache-Control'] = "no-cache"

    def _opener(self):
        """
                        构建自动处理httpcookie的opener
        """
        cj = cookielib.CookieJar()
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj), urllib2.HTTPHandler())
        return opener

    def _wrap(self, url, param=None):
        return urllib2.Request(url, param, self.headers)

    def __init__(self):
        self.opener = self._opener()

    def fetch(self, url, param=None):
        """
                        抓取给定页面
        """
        ret = self.opener.open(self._wrap(url, param))
        data = FetchedData.create(ret)
        ret.close()

        return data

fetcher = Fetcher()
print fetcher.fetch("http://www.1688.com/")
