#! /usr/bin/env python
# coding=utf-8
#############################################################################
#                                                                           #
#   File: fetch.py                                                          #
#                                                                           #
#   Copyright (C) 2008-2009 Du XiaoGang <dugang@188.com>                    #
#                                                                           #
#   Home: http://gappproxy.googlecode.com                                   #
#                                                                           #
#   This file is part of GAppProxy.                                         #
#                                                                           #
#   GAppProxy is free software: you can redistribute it and/or modify       #
#   it under the terms of the GNU General Public License as                 #
#   published by the Free Software Foundation, either version 3 of the      #
#   License, or (at your option) any later version.                         #
#                                                                           #
#   GAppProxy is distributed in the hope that it will be useful,            #
#   but WITHOUT ANY WARRANTY; without even the implied warranty of          #
#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the           #
#   GNU General Public License for more details.                            #
#                                                                           #
#   You should have received a copy of the GNU General Public License       #
#   along with GAppProxy.  If not, see <http://www.gnu.org/licenses/>.      #
#                                                                           #
#############################################################################

import wsgiref.handlers, urlparse, StringIO, logging, base64, zlib, re, hashlib,string
from google.appengine.ext import webapp
from google.appengine.api import urlfetch
from google.appengine.api import urlfetch_errors, memcache
from google.appengine.api.urlfetch import fetch, ResponseTooLargeError, DownloadError

class MainHandler(webapp.RequestHandler):
    Software = "0.0.1"
    # hop to hop header should not be forwarded
    ### No header check needed. It's been done, we should still do it but...
    H2H_Headers = frozenset(["connection", "keep-alive", "proxy-authenticate", "proxy-authorization",
                   "te", "trailers", "transfer-encoding", "upgrade"])
    METHODS = {"GET": urlfetch.GET, "HEAD": urlfetch.HEAD, "DELETE": urlfetch.DELETE,
					"PUT": urlfetch.PUT, "POST": urlfetch.POST}
    Forbid_Headers = ["if-range"]
    Fetch_Max = 3
    
    CACHE_DISALLOW_HEADERS_CACHE_CONTROL = ["private", "no-cache"]
    END2END_SPECIFIC_REVALIDATION_ITEMS = ["etag","if-modified-since", "if-unmodified-since", "last-modifed"]
    cacheAllowFlag = True
    end2endRevalidation = -1 #1 = specific e2e 2 = unspecific e2e 0 = e2e reload -1 = fine with cache
    maxageLen = 36000 #10h
    cacheHit = False
    
    
    def sendErrorPage(self, status, description):
        self.response.headers["Content-Type"] = "application/octet-stream"
        # http over http
        # header
        self.response.out.write("HTTP/1.1 %d %s\r\n" % (status, description))
        self.response.out.write("Server: %s\r\n" % self.Software)
        self.response.out.write("Content-Type: text/html\r\n")
        self.response.out.write("\r\n")
        # body
        content = "<h1>Fetch Server Error</h1><p>Error Code: %d<p>Message: %s" % (status, description)
        self.response.out.write(zlib.compress(content))

    def tryWithRangeHeader(self, path, postdata, method, headers):
        if method == urlfetch.GET:
            headers["Range"] = "bytes=0-65535"
        else:
            self.sendErrorPage(591,
                                    "Fetch server error")
            return
        try:
            resp = urlfetch.fetch(path, postdata, "HEAD", headers)

        except:
            self.sendErrorPage(591,
                               "Fetch server error, tring with range headers, yet the server is not responding")
            return
        range_supported = False
        for h in resp.headers:
            if h.lower() == "accept-ranges":
                if resp.headers[h].strip().lower() == "bytes":
                    range_supported = True
                    break
            elif h.lower() == "content-range":
                range_supported = True
                break
        if range_supported:
            self.sendErrorPage(592,
                                    "Fetch server error, Retry with range header.")
        else:
            self.sendErrorPage(591,
                                    "Fetch server error, Sorry, file size up to Google's limit and the target server doesn't accept Range request.")
        return

    def checkIfCacheAllow(self, headers, method = urlfetch.GET, range_quest = False):#header dict
        end2endTemp = False
        
        if range_quest and method != urlfetch.GET:
            return
        
        for name, value in headers.iteritems():
            nl = name.lower()
            if nl == "cache-control":
                for item in self.CACHE_DISALLOW_HEADERS_CACHE_CONTROL:
                    if value.lower().find(item) != -1:
                        self.cacheAllowFlag = False
                if value.lower().find("maxage=0") != -1:
                    end2endTemp = True
            elif nl == "pragma":
                if value.lower() == "no-store":
                    self.cacheAllowFlag = False
                elif value.lower() == "no-cache":#e2e reload
                    self.end2endRevalidation = 0
                    self.end2endTemp = True
            elif nl == "vary":
                self.cacheAllowFlag = False
            elif nl in self.END2END_SPECIFIC_REVALIDATION_ITEMS:
                self.end2endRevalidation = 1
        if end2endTemp == True:#revalidation chk
            if self.end2endRevalidation == -1:
                self.end2endRevalidation = 2
        if end2endTemp == False:
            self.end2endRevalidation = -1    
             
    def phraseOrignalPost(self):
        method      = self.request.str_POST.get('method')
        path        = self.request.str_POST.get('path')
        headers     = self.request.str_POST.get('headers')
        post_data   = self.request.str_POST.get('postdata')
        range_flag  = self.request.str_POST.get('range')

        new_method      = self.phraseMethod(method)
        new_headers     = self.phraseNewHeaders(headers)
        range_request   = self.phraseRange(range_flag)
        
        self.checkIfCacheAllow(new_headers, range_request)
        
        return (new_method, path, new_headers, post_data, range_request)

    def phraseNewHeaders(self,orig_headers):
        headers = {}
        si = StringIO.StringIO(orig_headers)
        while True:
            line = si.readline()
            line = line.strip()
            if line == "":
                break
             # parse line
            (name, _, value) = line.partition(":")
            name = name.strip()
            value = value.strip()
            headers[name] = value
           
        headers["Connection"] = "close" # predined header
        return headers
    
    def phraseMethod(self,orig_method):
        if orig_method != "GET" and orig_method != "HEAD" and orig_method != "POST":
            raise Exception("Method not allowed")
        else:
            return self.METHODS.get(orig_method)
        
    def phraseRange(self, orig_range_flag):
        if orig_range_flag == "on":
            return True
        elif orig_range_flag == "off":
            return False

    def fetch(self, path, orig_post_data, method, new_headers, range_request):
        if range_request:
            deadline = 10
            redirect = True
        else:
            deadline = 5
            redirect = False
            
        try:
        # fetch
            resp = urlfetch.fetch(path, orig_post_data, method, new_headers, False, redirect, deadline)
            return resp
        
        except (ResponseTooLargeError, DownloadError):
                #to do:retry with range
            logging.info(path)
            logging.info("ResponseTooLargeError or DownloadError, %s" %range_request)
            if range_request == False:
                self.tryWithRangeHeader(path, orig_post_data, method, new_headers)
                raise
            else:
                self.sendErrorPage(591,"ResponseTooLargeError or DownloadError")
                raise 
        except Exception, e:
                logging.warning("urlfetch.fetch(%s) error: %s." % (range_request and "Range" or "", str(e)))
                self.sendErrorPage(591,"urlfetch.fetch(%s) error: %s." % (range_request and "Range" or ""+ str(e)))
                raise
        except:
                self.sendErrorPage(591, "Fetch server error, The target server may be down or not exist. Another possibility: try to request the URL directly.")
                logging.error("fetched url: %s" %path)
                if range_request == True:
                    logging.info("rangeOn")
                else:
                    logging.info("rangeOff")
                raise
            
    def forward(self, resp):
        pattern = re.compile("maxage=(\d*)")
        self.response.headers["Content-Type"] = "application/octet-stream"
        # status line
        self.response.out.write("HTTP/1.1 %d %s\r\n" % (resp.status_code, self.response.http_status_message(resp.status_code)))
        # headers
        # default Content-Type is text
        text_content = True
        try:
            if string.atoi(resp.headers["content-length"]) > 1000000:
                self.cacheAllowFlag = False
        except:
            pass
        for header in resp.headers:
            if header.strip().lower() in self.H2H_Headers:
                # don"t forward
                continue
            if header.lower() == "cache-control":
                try:
                    maxageLen = pattern.search(resp.headers[header]).group(1)
                except:
                    pass#cant find maxAgeLen
            
            # there may have some problems on multi-cookie process in urlfetch.
            # Set-Cookie: "wordpress=lovelywcm%7C1248344625%7C26c45bab991dcd0b1f3bce6ae6c78c92; expires=Thu, 23-Jul-2009 10:23:45 GMT; path=/wp-content/plugins; domain=.wordpress.com; httponly, wordpress=lovelywcm%7C1248344625%7C26c45bab991dcd0b1f3bce6ae6c78c92; expires=Thu, 23-Jul-2009 10:23:45 GMT; path=/wp-content/plugins; domain=.wordpress.com; httponly,wordpress=lovelywcm%7C1248344625%7C26c45bab991dcd0b1f3bce6ae6c78c92; expires=Thu, 23-Jul-2009 10:23:45 GMT; path=/wp-content/plugins; domain=.wordpress.com; httponly
            if header.lower() == "set-cookie":
                self.cacheAllowFlag = False
                scs = resp.headers[header].split(",")
                nsc = ""
                for sc in scs:
                    if nsc == "":
                        nsc = sc
                    elif re.match(r"[ \t]*[0-9]", sc):
                        # expires 2nd part
                        nsc += "," + sc
                    else:
                        # new one
                        self.response.out.write("%s: %s\r\n" % (header, nsc.strip()))
                        nsc = sc
                self.response.out.write("%s: %s\r\n" % (header, nsc.strip()))
                continue
            # other
            self.response.out.write("%s: %s\r\n" % (header, resp.headers[header]))
            # check Content-Type
            if header.lower() == "content-type":
                if resp.headers[header].lower().find("video/x-flv") != -1:
                    self.cacheAllowFlag = False
                elif resp.headers[header].lower().find("video/mp4") != -1:
                    self.cacheAllowFlag = False
                if resp.headers[header].lower().find("text") == -1:
                    # not text
                    text_content = False
        self.response.out.write("\r\n")
        # only compress when Content-Type is text/xxx
        if text_content:
            self.response.out.write(zlib.compress(resp.content))
        else:
            self.response.out.write(resp.content)
            
    def saveToCache(self, url, resp):
        CACHE_NAMESPACE = "gap"
        if resp.status_code not in [200, 203, 300, 301, 410]:
            return #cache not allowed
        self.checkIfCacheAllow(resp.headers)
        try:
	    # memcache is able to do pickle itself.
            h_url = self.hash_url(url)
            #logging.info(str(h_url))
            memcache.add(h_url, resp, time=self.maxageLen, namespace=CACHE_NAMESPACE)
        except Exception,e:
            logging.info("fetch.saveToCache " + url + " " +e.message)
	
    def hash_url(self, url): # namespace now used.
        return hashlib.sha1(url).hexdigest()
        #return url
    def search_cache(self, path):
        CACHE_NAMESPACE = "gap"
        resp = memcache.get(self.hash_url(path), namespace=CACHE_NAMESPACE)

    def phraseCache(self,path, headers):
        if self.end2endRevalidation == -1:# ok for cache
            resp = self.search_cache(path)
            if resp is not None:
                logging.info("hit")
                return resp, True
        if self.end2endRevalidation == 0 or 1 or 2:
            return None, False
    
    def post(self):
        try:
            (method,
             path,new_headers,
             post_data,
             range_request) = self.phraseOrignalPost()  
        except RuntimeError, e:
            self.sendErrorPage(591, "Fetch server error, %s." % str(e))
            return
        except Exception, e:
            self.sendErrorPage(590, "%s." % str(e))
            return
        # fetch, try * times
                 
        (resp,fromCache) = self.phraseCache(path, new_headers)
        if resp is None:
            for i in range(self.Fetch_Max):
                try:
                    resp = self.fetch(path, post_data,
                                      method, new_headers, range_request)
                    break
                except Exception,e:
                    logging.info("fetch.post, %s" %e)
                    return
        # forward
        self.forward(resp)
        if self.cacheAllowFlag and not fromCache:
            self.saveToCache(path, resp)


    def get(self):
        self.response.headers["Content-Type"] = "text/html; charset=utf-8"
        self.response.out.write( \
"""
<html>
    <head>
        <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
        <title>GAppProxy已经在工作了</title>
    </head>
    <body>
        <table width="800" border="0" align="center">
            <tr><td align="center"><hr></td></tr>
            <tr><td align="center">
                <b><h1>%s 已经在工作了</h1></b>
            </td></tr>
            <tr><td align="center"><hr></td></tr>

            <tr><td align="center">
                是一个开源的HTTP Proxy软件,使用Python编写,运行于Google App Engine平台上. 
            </td></tr>
            <tr><td align="center"><hr></td></tr>

            <tr><td align="center">
                更多相关介绍,请参考<a href="http://www.google.com/">项目主页</a>. 
            </td></tr>
            <tr><td align="center"><hr></td></tr>

            <tr><td align="center">
                <img src="http://code.google.com/appengine/images/appengine-silver-120x30.gif" alt="Powered by Google App Engine" />
            </td></tr>
            <tr><td align="center"><hr></td></tr>
        </table>
    </body>
</html>
""" % self.Software)

application = webapp.WSGIApplication([("/fetch.py", MainHandler)])
 
