#!/usr/bin/python
# -*- coding: utf8 -*-

import os, codecs
from Web_parser import Web_parser

import urllib2
from urllib import urlencode, urlopen

class Cookies(object):
   u'''
Cookie container. Used in Humanizer.
__init__(cookie_file)   -- Constructor. Only argument is path to cookie file for Load.
Parse(response)         -- Parses cookies from response and changes self state.
Add_to_request(request) -- Adds cookies to request just before sending.
Save()                  -- Saves self state to file Cookies.cookie_file.
Load()                  -- Loads self state from file Cookies.cookie_file.
'''
   def __init__(self, cookie_file=u'cache/cookies.txt'):
      self.cookie_file = cookie_file
      self.cookies = {}
      self.Load()

   def Parse(self, response):
      do_save = False
      for line in [a for a in unicode(response.headers).splitlines()]:
         if line.lower().startswith(u'set-cookie'):
            setcookie, data = line.split(u':', 1)
            name, value = data.strip().split(u'; ', 1)[0].split(u'=', 1)
            if value.strip() == 'deleted':
               if self.cookies.has_key(name.strip()):
                  del self.cookies[name.strip()]
            else:
               self.cookies[name.strip()] = value.strip()
            do_save = True
      if do_save:
         self.Save()

   def Add_to_request(self, request):
      adding = u'; '.join([u'%s=%s' % (name, value)\
                          for name, value in self.cookies.items() ])
      if adding:
         request.add_header(u'Cookie', adding)

   def Save(self):
      if len(self.cookie_file):
         codecs.open(self.cookie_file, 'w', 'utf8').write(\
            u'\n'.join(u'%s=%s' % (k,v) for k,v in self.cookies.items()))

   def Load(self):
      if os.path.isfile(self.cookie_file):
         for line in codecs.open(self.cookie_file, 'r', 'utf8').readlines():
            k, v = line.split('=', 1)
            self.cookies[k.strip()]=v.strip()

   def Flush(self):
      self.cookies = {}

class Humanizer(urllib2.BaseHandler):
   u'''
Base handler for urllib2. Does all work with cookies as browser.
Usage:
   opener  = urllib2.build_opener(Humanizer)
   urllib2.install_opener(opener)
   request = urllib2.Request(url, data, headers)
   handle  = opener.open(request)
'''
   def __init__(self, cookie_file=u'cache/cookies.txt'):
      self.cookie_file = cookie_file
      self.cookies = Cookies(cookie_file)
      self.referer = u''

   def http_request(self, request):
      self.cookies.Add_to_request(request)
      if self.referer:
         request.add_header(u'Referer', self.referer)
      return request

   def http_response(self, request, response):
      self.cookies.Parse(response)
      self.referer = response.url
      return response

   def Flush(self):
      self.cookies.Flush()


class Http_query(Web_parser):
   u'''Base class for Searcher, Messanger and Checker.
For using Auth, override getOpener().
'''
   def __init__(self):
      self.headers = {
               u'User-Agent':
                u'Opera/9.63 (Windows NT 5.1; U; en-GB) Presto/2.1.1',
               u'Accept': u'*/*'
                     }

   def getOpener(self):
      if not self.opener:
         self.opener = urllib2.build_opener(Humanizer)
         urllib2.install_opener(self.opener)
      return self.opener

   def Retrieve(self, url, binary=False):
      u'''Retrieves url, sending human-like headers
      and current cookies'''
      handle = self.Open_url(url)
      if handle:
         print u'Downloading', url
         content = self.Download(handle)
      return content

   def Post_form(self, url, form, save_response = None):
      u'''
      1) POSTs <form>, represented as dict to <url>.
      2) Retrieves response.
      3) Saves response, if <save_response> contains filename.
         (Pass None, or nothing if don't want save response.)
      '''
      assert type(url) == unicode
      assert type(form) == dict
      data    = urlencode(form)
      request = urllib2.Request(url, data, self.headers)
      handle  = self.Open_url(url, request)
      if handle:
         print u'Downloading response:', handle.url
         content = self.Download(handle)
         if save_response: open(save_response, u'wb').write(content)
         if not content: print u'Error. No content'
         return content
      else: print u'Error. No handle'

   def Open_url(self, url, request=None, opener=None):
      if not request: request = urllib2.Request(url, None, self.headers)
      if not opener:  opener  = self.getOpener()
      for i in range(15):
         try: handle = opener.open(request)
         except urllib2.URLError, e:
            if i == 14: print e
         else: return handle
      else:
         print "URLError opening", url

   def Download(self, handle):
      try:
         content = handle.read()
         return content
      except urllib2.HTTPError, e:
         print "HTTPError opening", url
         print e


def main():
   pass

if __name__ == '__main__':
   main()