#!/usr/bin/env python
# coding=utf-8
#-------------------------------------------------------------------------------
# Name:        http_client.py
# Purpose:     a simple client of http,for spider coding
# Author:      vavava
# Created:     04/11/2012
# Copyright:   (c) vavava 2012
# Licence:     <your licence>
#-------------------------------------------------------------------------------

if __name__ == '__main__':
    """ for file debug"""
    import sys,os
    sys.path.insert(0,os.path.join( os.getcwd(), '..' ))

import urllib,urllib.error,http.cookiejar
from vavava.base import  LogAdapter
from gzip import GzipFile
from io import BytesIO

class HttpClient(object):
    """ a simple client of http"""
    def __init__(self,log=None,debug_level=0,req_timeout=10):
        self.log= LogAdapter(log,name="LLTHttpClient")
        self.req_ = urllib.request.Request("http://www.google.com/")
        self.resp_ = None
        self.content_ = None
        self.httpDebugLevel=debug_level
        self.SetDebugLevel(debug_level)

        self.cookie_ = None
        self.proxy_dic_ = None
        self.cookie_enable_ = False
        self.proxy_enable_ = False

        self.req_timeout=req_timeout
        self.handler_list = []
        self.opener_ = None
        self.header_refer_ = "http://www.google.com/"
        self.header_user_agent_ = 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6'
        self.cookie_str=""

    def Get(self,url):
        if self.opener_ is None:
            self._install_opener()
        headers_dic = self._init_header(url)
        try:
            self.req_ = urllib.request.Request(url,headers=headers_dic)
            self.resp_ = self.opener_.open(self.req_,timeout=self.req_timeout)
            self.content_ = self.resp_.read()
            return self.content_
        except urllib.error.URLError as e:
            self.log.exception(e)


    def Post(self,url,post_dic):
        if self.opener_ is None:
            self._install_opener()
        postdata=urllib.parse.urlencode(post_dic).encode('gb2312')
        headers_dic = self._init_header(url)
        try:
            self.req_ = urllib.request.Request(url,data=postdata,headers=headers_dic)
            self.resp_ = self.opener_.open(self.req_)
            self.content_ = self.resp_.read()
            return self.content_
        except urllib.error.URLError as e:
            self.log.exception(e)

    def EnableCookieSupport(self,coockie="",enable=True):
        if enable and self.cookie_ is None:
            self.cookie_ = http.cookiejar.CookieJar(coockie)
        else:
            self.cookie_ = None
        self.cookie_enable_ = enable
        self._install_opener()

    def AddProxy(self,proxy_pair):
        self.proxy_dic_ = proxy_pair
        self.proxy_enable_ = True
        self._install_opener()

    def SetDebugLevel(self,level=0):
        from http.client import HTTPConnection
        HTTPConnection.debuglevel = level
        self.httpDebugLevel=level

    def _install_opener(self):
        if self.opener_ is None:
            self.opener_ = urllib.request.build_opener(
                ContentEncodingProcessor() ) # always support zlib
        if self.cookie_enable_:
            self.opener_.add_handler(
                urllib.request.HTTPCookieProcessor(self.cookie_) )
        if self.proxy_enable_:
            self.opener_.add_handler(
                urllib.request.ProxyHandler(self.proxy_dic_) )

        urllib.request.install_opener(self.opener_)

    def _init_header(self,url):
        headers_dic = {'Referer':url}
        if self.header_user_agent_  is not None:
            headers_dic['Referer'] = self.header_refer_
        if self.header_user_agent_ is not None:
            headers_dic['User-Agent'] = self.header_user_agent_
        if self.cookie_str != "":
            headers_dic['Set-Cookie'] = self.cookie_str
        return headers_dic

#######################ContentEncodingProcessor#################################
# copy from http://www.pythonclub.org/python-network-application/observer-spider
class ContentEncodingProcessor(urllib.request.BaseHandler):
  """A handler to add gzip capabilities to urllib2 requests """

  # add headers to requests
  def http_request(self, req):
    req.add_header("Accept-Encoding", "gzip, deflate")
    return req

  # decode
  def http_response(self, req, resp):
    old_resp = resp
    # gzip
    if resp.headers.get("content-encoding") == "gzip":
        gz = GzipFile( fileobj=BytesIO(resp.read()), mode="r" )
        resp = urllib.request.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code)
        resp.msg = old_resp.msg
    # deflate
    if resp.headers.get("content-encoding") == "deflate":
        gz = BytesIO( deflate(resp.read()) )
        resp = urllib.request.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code)  # 'class to add info() and
        resp.msg = old_resp.msg
    return resp

# deflate support
import zlib
def deflate(data):   # zlib only provides the zlib compress format, not the deflate format;
  try:               # so on top of all there's this workaround:
    return zlib.decompress(data, -zlib.MAX_WBITS)
  except zlib.error:
    return zlib.decompress(data)
#######################ContentEncodingProcessor#################################




# test code  ########################################################################
def test_get():
    url = r'http://www.cqent.net/index.php'
    client = HttpClient()
    client.AddProxy({"http":"http://127.0.0.1:8087"})
    content=client.Get(url)
    print( content.decode('utf8'))

def test_post():
    url = r'http://www.cqent.net/index.php?s=vod-search'
    postdata = {
        '__ppvod__' :'3133627a8f406588b41c90d5d15c7fad',
        'id'	  :'abc',
        'submit'  :'asdfasdf',
        'x'	  :'name'
    }
    client = HttpClient()
    content=client.Post(url,postdata)
    print( content.decode('utf8'))

if __name__ == '__main__':
    test_get()
    #test_post()
