# from: http://www.voidspace.org.uk/python/articles/cookielib.shtml

import os.path
import cookielib
import urllib2
import urllib, urlparse
import httplib2 # my custom message only version
from StringIO import StringIO

txheaders =  {'User-agent' :
              'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.8) Gecko/2009032711 Ubuntu/8.04 (hardy) Firefox/3.0.8'}
COOKIEFILE = '' #'cookies.lwp' # no write access on GAE, 
# could save this in db.. or cache.. but its already in memory here..

import logging
import time

# GAE doesn't handle cookies on redirection so might have to use urlfetch & 
# get cookies at each redirect..
# plus urlfetch is limited to 1 Mb request & response size... crap crap crap..
# but it could also be one of the other headers..
#        like: Referer, Host, X-Forwarded-For, Via, Vary, ..

from google.appengine.api import urlfetch

COOKIE_JAR = cookielib.LWPCookieJar()

def add_cookies(url, data, headers):
    #logging.debug('adding cookies...')# CJ has cookies: %s' % COOKIE_JAR.as_lwp_str())
     # get any cookies we need
    try: # if cookie header is set, CJ won't reset it
        del headers['Cookie']
    except:
        pass
    ureq = urllib2.Request(url, data=data, headers=headers)
    
    cookies = COOKIE_JAR._cookies_for_request(ureq)
    #logging.debug('cookies from CJ %s' % cookies)
    attrs = COOKIE_JAR._cookie_attrs(cookies)
    if attrs:
        headers['Cookie'] = '; '.join(attrs)
        #logging.debug(('-'*50) + '\nset cookie header to %s' % headers['Cookie'])
    return
    
    COOKIE_JAR.add_cookie_header(ureq) # this doesn't work..., fuck it - get them manually
    
    for k,v in ureq.header_items(): # don't know why this works and .headers doesn't..
        # this could probably fix my other non urlfetch methods..
        #logging.debug('checking header %s' % k)
        if k == 'Cookie':
            headers['Cookie'] = v
            #logging.debug(('-'*50) + '\nset cookie header to %s' % headers['Cookie'])
            break
    ureq = None
import re
def update_cookies(url, data, headers, resp):
    # recreate request in urllib2 style
    ureq = urllib2.Request(url, data=data, headers=headers)
    # response is a file like object w/ read(), info() (HTTPMessage), geturl(), getcode(), headers (HTTPMessage), msg,
    
    # recreate headers string to be parsed by HTTPMessage
    hdr_content = ""
    # headers are in a dict... does this mean they are folded together incorrectly..? only 1 set-cookie instead of many?
    
    for k,v in resp.headers.items():
        if k == 'set-cookie':
            # this definitely won't parse all cookies... it will fail with httpOnly, secure, domain, & other attributes i don't check for..
            # & name=value is the only required attribute..
            # DATE should always be in GMT though..
            
            # SHOULD really file a GAE bug on this..
            
            # i think this is a python bug, according to the GAE issue they are doing it the RFC way
            # but cookielib just isn't parsing combined ones correctly..
            
            import re
            #logging.debug('set-cookie header type is: %s' % type(v)) # its a mofo string
            newv = []
            # find good cookies
            pattern = '[^;,= ]+=[^;, ]+; expires=\w+, \d\d\-\w+\-\d\d\d\d \d\d:\d\d:\d\d \w+; path=[^,]+'
            match = re.search(pattern, v)
            while match:
                cstr = match.group()
                newv.append(cstr)
                v = v.replace(cstr, '')
                #print 'newv: ' + v
                match = re.search(pattern, v)
            v = v.strip(', ')
            # find set-cookies w/ no path
            pattern = '[^;,= ]+=[^;, ]+; expires=\w+, \d\d\-\w+\-\d\d\d\d \d\d:\d\d:\d\d \w+'
            match = re.search(pattern, v)
            while match:
                cstr = match.group()
                newv.append(cstr)
                v = v.replace(cstr, '')
                #print 'newv minus bad: ' + v
                match = re.search(pattern, v)
            # now v should be mosty empty except for commas & spaces.. unless there are horrid unparseable cookies..
            v = newv
            
        if type(v) == list:
            for av in v:
                hdr_content += '%s: %s\n' % (k,av)
                #logging.debug('::::::   %s:%s' % (k, av))
        else:
            hdr_content += '%s: %s\n' % (k,v)
            #logging.debug('::::::   %s:%s' % (k, v))
    hdr_content += "\n"
    # NEED to break into separate set-cookie headers b/c one of them doesn't have a path and thats fubaring everything
    
    uheaders = httplib2.HTTPMessage(StringIO(hdr_content))
    uresp = urllib.addinfourl(StringIO(resp.content), uheaders, url)
    COOKIE_JAR.extract_cookies(uresp, ureq)
    #logging.debug('updated cookies...\n\t%s' % COOKIE_JAR.as_lwp_str())


def urlfetch_w_cookies(url, data=None):
    logging.debug('urlfetch w cookies called')# fetching url: %s' % url)
    
    # data should be a dict or None
    
    headers = {}
    headers['User-agent'] = txheaders['User-agent']

    method = urlfetch.GET
    if data:
        method = urlfetch.POST
    
    while True: # follow redirects
        add_cookies(url, data, headers)
        
        #logging.debug('fetching %s w/ data[%s] and headers: %s' % (url, data, headers))
        resp = urlfetch.fetch(url,
                              payload=data,
                              method=method,
                              headers=headers,
                              follow_redirects=False)
        
        if resp.status_code == 200:
            #logging.debug('handling 200 OK')
            update_cookies(url, data, headers, resp)
            return resp.content, url
        elif resp.status_code in [301, 302, 303, 307]: # redirects (307 only valid for GET & HEAD requests..)
            #logging.debug('handling redirect')
            update_cookies(url, data, headers, resp)
            
            if 'location' in resp.headers: # its a CaselessDict..
                newurl = resp.headers['location'] #getheaders('location')[0]
            elif 'uri' in resp.headers:
                newurl = resp.headers['uri'] #getheaders('uri')[0]
            else:
                return resp.content, url # uhh fail..
            url = urlparse.urljoin(url, newurl).replace(' ', '%20')
        else: # ..?
            logging.debug('unhandled status code %s' % resp.status_code)
            return resp.content, url
            
            
    
    


def get_cjhdrs(req):
    cookies = COOKIE_JAR._cookies_for_request(req)
    attrs = COOKIE_JAR._cookie_attrs(cookies)
    if attrs:
        #logging.debug('adding cookie jar headers..')
        #req.add_unredirected_header("Cookie", "; ".join(attrs))
        return "; ".join(attrs)
    return ''

class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
    def http_error_301(self, req, fp, code, msg, headers):  
        result = urllib2.HTTPRedirectHandler.http_error_301(
            self, req, fp, code, msg, headers)
        logging.debug('got 301 redirect')
        #print 'got 301 redirect: %s' % type(result)
        COOKIE_JAR.extract_cookies(result, req)
        #COOKIE_JAR.add_cookie_header(req)
        #add_cjhdrs(req)
        req.headers['Cookie'] = 'weird=shit!' #get_cjhdrs(req)
        return result                                       

    def http_error_302(self, req, fp, code, msg, headers):
        result = urllib2.HTTPRedirectHandler.http_error_302(
            self, req, fp, code, msg, headers)    
        logging.debug('got 302 redirect')
        #print 'got 302 redirect: %s' % type(result)
        COOKIE_JAR.extract_cookies(result, req)
        #add_cjhdrs(req)
        #COOKIE_JAR.add_cookie_header(req)
        #logging.debug('dir result: %s' % dir(req))
        #logging.debug('res headers: %s' % req.headers)
        #logging.debug('res info: %s' % req.headers)
        #logging.debug('res msg: %s' % result.msg)
        #logging.debug('res url: %s' % result.url)
        
        req.headers['Cookie'] = 'crazy=shit!' #get_cjhdrs(req)
        
        # its always set to craziest poop..
        
        # result is the response
        
        return result



# works outside GAE
def get_w_cookies(url, data=None, close=False):
    logging.debug('get w cookies fetching url: %s' % url)
    
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(COOKIE_JAR),
                                  SmartRedirectHandler())
    urllib2.install_opener(opener)

    req = urllib2.Request(url, data) # fake request to get cookies
    txheaders['Cookie'] = get_cjhdrs(req)
    if not txheaders['Cookie']:
        del txheaders['Cookie']
    req = urllib2.Request(url, data, txheaders) # cookie request
    
    req.headers['Cookie'] = 'craziest=poop!'
    
    handle = urllib2.urlopen(req)

    if close:
        handle.close()
    return handle

def get_w_cookiesOLD(url, data=None, close=False):
    logging.debug('get w cookies fetching url: %s' % url)
    global COOKIEFILE
    
    urlopen = urllib2.urlopen
    Request = urllib2.Request
    cj = cookielib.LWPCookieJar()
    #if os.path.isfile(COOKIEFILE):
    if COOKIEFILE:
        #cj.load(StringIO(COOKIEFILE))
        # bypass load which does file specific stuff
        #logging.debug('trying to load cookiefile w/ contents:\n%s' % COOKIEFILE)
        cj._really_load(StringIO(COOKIEFILE), '[StringIO file]', False, False)
    # Now we need to get our Cookie Jar                                                                            
    # installed in the opener;                                                                                     
    # for fetching URLs                                                                                            
    opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
    urllib2.install_opener(opener)
    # does this affect all further use of urllib2?                                                                 
    # perhaps i should just globally add the cookie handling..                                                     

    req = Request(url, data, txheaders) # cookie request                                                           
    handle = urlopen(req)
    #for index, cookie in enumerate(cj):                                                                           
    #    print index, '  :  ', cookie
    
    #cj.save(strio) # failes b/c it tries to do open, but its really just calling as_lwp_str
    COOKIEFILE = "#LWP-Cookies-2.0\n" + cj.as_lwp_str()
    #logging.debug('saved cookiefile w/ contents:\n%s' % COOKIEFILE)
    if close:
        handle.close()
    return handle


def post_w_cookies(url, data, close=False):
    logging.debug('POST w cookies fetching url: %s' % url)
    return get_w_cookies(url, data, close)

