#! /usr/bin/env python
# coding=utf-8
#############################################################################
#                                                                           #
#   File: proxy.py                                                          #
#                                                                           #
#   Copyright (C) 2008-2009 Du XiaoGang <dugang@188.com>                    #
#                                                                           #
#   Home: http://gappproxy.googlecode.com                                   #
#                                                                           #
#   This file is part of GAppProxy.                                         #
#                                                                           #
#   GAppProxy is free software: you can redistribute it and/or modify       #
#   it under the terms of the GNU General Public License as                 #
#   published by the Free Software Foundation, either version 3 of the      #
#   License, or (at your option) any later version.                         #
#                                                                           #
#   GAppProxy is distributed in the hope that it will be useful,            #
#   but WITHOUT ANY WARRANTY; without even the implied warranty of          #
#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the           #
#   GNU General Public License for more details.                            #
#                                                                           #
#   You should have received a copy of the GNU General Public License       #
#   along with GAppProxy.  If not, see <http://www.gnu.org/licenses/>.      #
#                                                                           #
#############################################################################


import BaseHTTPServer, SocketServer, urllib, urllib2, urlparse, zlib
import socket, os, common, sys, errno, re, threadedprocess
import time, httplib,ConnectionService
import Queue, time,fetchcore,subprocess
import unittest

try:
    import ssl
    ssl_enabled = True
except:
    ssl_enabled = False

# global varibles
globalVars = common.getGlobalVars()
httpd = None
UIPresenter = None
listen_port = globalVars.LISTEN_PORT
local_proxy = globalVars.LOCAL_PROXY
fetch_server = globalVars.FETCH_SERVER
fetch_core = None
DAEMON_THREAD_COUNT = 1
daemonThreads =[]
queue_give_job =None
#due to both "hop to hop" and google's restriction(which is meaningless to
#keep it) #"referer"<<is this banded?
FORBID_HEADERS = frozenset(["connection", "keep-alive", "proxy-authenticate",
                         "proxy-authorization", "te", "trailers",
                         "transfer-encoding", "upgrade",
                         "proxy-connection", "host", "content-length"
                        , "via"])


 

class LocalProxyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
    PostDataLimit = 0x100000
    cached_cert = {}
    
           
        

    def fetch(self, params, error_handeling = True,retry = 0):
        try:
            resp = fetch_core.fetch(params,retry)
            return resp
        except fetchcore.FetchCoreHttpError, e:
            common.logger(1,"LocalProxyHandler.fetch.HttpError",e)
            if error_handeling == False:
                return
            else:
                if e.code == 404:
                    self.send_error(404,
                                 "Local proxy error, Fetchserver not found at the URL you specified, please check it.")
                elif e.code == 502:
                    self.send_error(502,
                                        "Local proxy error, Transmission error, or the fetchserver is too busy.")
                else:
                    self.send_error(e.code)
                return None
        except fetchcore.FetchCoreConnectionError,e:
            #in this case we retry just once
            if retry == 0:
                try:#one more try
                    resp = fetch_core.fetch(params,retry)
                    return resp
                except Exception,error:
                    common.logger(1,"LocalProxyHandler.fetch.ConnectionError.Error",e)
            common.logger(1,"LocalProxyHandler.fetch.ConnectionError",e)
            if error_handeling == False:
                return None
            else:
                self.send_error(504,
                            "Fail to connect to Fetchserver."
                             + "Fetchserver's url error, or Maybe you have some problem with your connection. "
                             + "Requested Path:" + self.path)
                return None
        except fetchcore.FetchCoreEmptyResponse,e:
            #this should not happen, yet it happens when appengine have
            #a run time error
            common.logger(1,"LocalProxyHandler.fetch.EmptyResponse",e)
            if error_handeling == False:
                return None
            else:
                self.send_error(502,"fetch server runtime error, sorry.")
            
        except Exception as e:
            common.logger(1,"LocalProxyHandler.fetch.Exception",e)
            if error_handeling == False:
                return None
            else:
                self.send_error(502, "unknown proxy error")

    def do_CONNECT(self):
        if not ssl_enabled:
            self.send_error(501,
                            "Local proxy error, HTTPS needs Python2.6 or later.")
            self.connection.close()
            return

        # for ssl proxy
        (https_host, _, https_port) = self.path.partition(":")
        if https_port != "" and https_port != "443":
            self.send_error(501,
                            "Local proxy error, Only port 443 is allowed for\
                            https.")
            self.connection.close()
            return
        
        cert_loc = os.path.join(globalVars._dir,"ssl2/" + https_host +".cert")
        if https_host not in self.cached_cert:
            if os.path.isfile(cert_loc):
                self.cached_cert[https_host] = True
            else:
                
                cg.gen_sslcert(https_host)
                self.cached_cert[https_host] = False
                
        # continue
        self.wfile.write("HTTP/1.1 200 OK\r\n")
        self.wfile.write("\r\n")
        ssl_sock = ssl.wrap_socket(self.connection, server_side=True, 
            certfile=cert_loc, keyfile=cg.serverKeyDir)#, ciphers="LOW")
        

        # rewrite request line, url to abs
        try:
            first_line = ""
            while True:
                chr = ssl_sock.read(1)
                # EOF?
                if chr == "":
                    # bad request
                    ssl_sock.close()
                    self.connection.close()
                    return
                # newline(\r\n)?
                if chr == "\r":
                    chr = ssl_sock.read(1)
                    if chr == "\n":
                        # got
                        break
                    else:
                        # bad request
                        ssl_sock.close()
                        self.connection.close()
                        return
                # newline(\n)?
                if chr == "\n":
                    # got
                    break
                first_line += chr
                
        except socket.error,e:
            common.logger(1,"LocalProxyHandler.do_connect.socketError",e)
            ssl_sock.close()
            return
        
        try:
            # got path, rewrite
            (method, path, ver) = first_line.split()
            if path.startswith("/"):
                path = "https://%s" % https_host + path

            # connect to local proxy server
            sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            sock.connect(("127.0.0.1", listen_port))
            sock.send("%s %s %s\r\n" % (method, path, ver))

            # forward https request
            ssl_sock.settimeout(1)
            while True:
                try:
                    data = ssl_sock.read(8192)
                except ssl.SSLError, e:
                    if str(e).lower().find("timed out") == -1:
                        # error
                        sock.close()
                        ssl_sock.close()
                        self.connection.close()
                        return
                    # timeout
                    break
                if data != "":
                    sock.send(data)
                else:
                    # EOF
                    break
            ssl_sock.setblocking(True)

            # simply forward response
            while True:
                data = sock.recv(8192)
                if data != "":
                    ssl_sock.write(data)
                else:
                    # EOF
                    break
            # clean
            sock.close()
            ssl_sock.shutdown(socket.SHUT_WR)
            ssl_sock.close()
            self.connection.close()
        
        except socket.error,e:
            common.logger(1,"LocalProxyHandler.do_connect.socketError",e)
        finally:
            sock.close()
            ssl_sock.close()
        

   
    def do_METHOD(self):
        self.protocol_version = 'HTTP/1.1' # try keep alive
        # check http method and post data
        method = self.command
        if method == "GET" or method == "HEAD":
            # no post data
            post_data_len = 0
        elif method == "POST":
            # get length of post data
            post_data_len = 0
            for header in self.headers:
                if header.lower() == "content-length":
                    post_data_len = int(self.headers[header])
                    break
            # exceed limit?
            if post_data_len > self.PostDataLimit:
                self.send_error(413,
                                "Local proxy error, Sorry, Google's limit,\
                                file size up to 1MB.")
                self.connection.close()
                return
        else:
            # unsupported method
            self.send_error(501, "Local proxy error, Method not allowed.")
            self.connection.close()
            return

        # get post data
        post_data = ""
        if post_data_len > 0:
            post_data = self.rfile.read(post_data_len)
            if len(post_data) != post_data_len:
                # bad request
                self.send_error(400, \
                                "Local proxy error, Post data length error.")
                self.connection.close()
                return

        # do path check
        (scm, netloc, path, params, query, _) = urlparse.urlparse(self.path)
        if not netloc:
            try:
                netloc = self.headers['Host']
                scm = "http"
            except Exception,e:
                common.logger(1,"LocalProxyHandler.do_method.netlocError",e)
                
        if (scm.lower() != "http" and scm.lower() != "https") or not netloc:
            self.send_error(501, \
                            "Local proxy error,\
                            Unsupported scheme(ftp for example).")
            self.connection.close()
            return
        # create new path
        path = urlparse.urlunparse((scm, netloc, path, params, query, ""))

        # remove disallowed header
        dhs = []
        
        for header in self.headers:
            hl = header.lower()            
            if hl in FORBID_HEADERS:
               dhs.append(header)

        for dh in dhs:
            del self.headers[dh]
        
        # create request for GAppProxy
        params = urllib.urlencode({"method": method,
                                   "path": path,
                                   "headers": self.headers,
                                   "postdata": post_data,
                                   "version": common.globalVars.VERSION,
                                   "range": "off"})
        resp = None
        if method == "GET":
            retry = 3
        else:
            retry = 0
        try:
            resp = self.fetch(params,retry = retry)
        except socket.error,e:
            common.logger(1,"LocalProxyHandler.do_method.socketError",e)
            resp = None
            pass
            
        if resp == None:
            self.connection.close()
            return
        

        # parse resp
        # for status line
        status = resp.status
        reason = resp.reason

        # for large response
        if status == 592 and method == "GET":
            self.processLargeResponse(path)
            self.connection.close()
            return

        # normal response
        try:
            self.send_response(status, reason)

        # for headers
            text_content = True
            try:
               if resp.headers["content-type"].lower().find("text") == -1:
                   text_content = False
            except KeyError:
                pass
                    
            resp.headers["accept-ranges"]="none"
            for key in resp.headers:
                self.send_header(key,resp.headers[key])
            for value in resp.setcookie:
                self.send_header("set-cookie",value)
            self.end_headers()
    
            # for page
            data = self.getData(resp)
            if text_content:
                if len(data) > 0:
                    self.wfile.write(zlib.decompress(data))
            else:
                self.wfile.write(data)
        except socket.error, e:
            common.logger(1,"LocalProxyHandler.fetch.send_response.socketError",e)
            # Connection/Webpage closed before proxy return
            #if err == errno.EPIPE or err == 10053 or err == 10054: # *nix, Windows
             #   print e
            #    pass
           # else:
            #if e == 'Errno 10053'or e == 'Errno 10054':
             #   pass
           # else:
            #   raise
            return #already closed connection
        self.connection.close()


    do_GET = do_METHOD
    do_HEAD = do_METHOD
    do_POST = do_METHOD

    def getData(self,resp):
        return resp.payload

    def getPos(self, value):
        m = re.match(r"bytes[ \t]+([0-9]+)-([0-9]+)/([0-9]+)", value)
        common.logger(3,"LocalProxyHandler.getPos",\
                                      "get position:%s" %(value,))
        return (int(m.group(1)), int(m.group(2)) + 1, int(m.group(3)))
    
    def chkPos(self, value, cur_pos):
        m = self.getPos(value)
        if not m or m[0] != cur_pos:
            # Content-Range error, fatal error
            raise Exception('Content-Range error, fatal error, cur_pos = %d' %cur_pos)
        return (m[1], m[2])

    def processOtherParts(self, resp, cur_pos):
        try:
            (next_pos, _) = self.chkPos(resp.headers["content-range"],
                                        cur_pos)
        except Exception, e:
            common.logger(2,"LocalProxyHandler.processOtherParts",\
                                      e)
            raise
        return next_pos

    def phraseRequestCount(self, isFirstPart, cur_pos, content_length, appendCount):#see how many request we are sending this time
        sendRequest = 0
        if isFirstPart:
            self.processCommonParts_cur_pos = cur_pos
            sendRequest = 1
        elif self.isSecPart:
            if self.processCommonParts_cur_pos > content_length:
                sendRequest = 0
            sendRequest = 5 - queue_give_job.qsize()
            self.isSecPart = False
        else:
            if self.processCommonParts_cur_pos > content_length:
                sendRequest = 0
            else:
                sendRequest = 1 + appendCount
        return sendRequest

    def assignJobs(self, sendRequest,part_length, path, queue_handin_work):
        rangeCount = 1
        job_done = self.processCommonParts_cur_pos
        range_range = (job_done , job_done + rangeCount - 1)
        while(sendRequest > 0):#put work
            range_range = (job_done + (rangeCount - 1) * part_length, job_done + rangeCount * part_length - 1)
            rangeCount += 1
            sendRequest -= 1
            # create request for GAppProxy
            
            params = {"method": "GET",\
                    "path": path,
                    "postdata": "",
                    "version": common.globalVars.VERSION,
                    "range": "on",
                      "headers": str(self.headers)}
            
            common.logger(3,"LocalProxyHandler.assignJobs",\
                            "give work range %d %d" %(range_range))
            queue_give_job.put((queue_handin_work, params, range_range))

        self.processCommonParts_cur_pos = range_range[1] + 1#add one because eg 1~9 | 10 = 9 + 1 we start from 10
        
    def processCommonParts(self, cur_pos, partDataStore, queue_handin_work):

        respFind = False
        appendCount = 0
        try:
            resp = partDataStore.pop(cur_pos)#search datastore
            respFind = True

            common.logger(3,"LocalProxyHandler.processCommonParts",\
                            "cur_pos = %d find in datastore" %cur_pos)
        except KeyError:
            pass #not found
            
            
        
        while(not respFind):
            (resp, range_get) = queue_handin_work.get()#search queue
            queue_handin_work.task_done()
            if int(range_get[0]) == cur_pos:
                common.logger(3,"LocalProxyHandler.processCommonParts",\
                             "cur_pos = %d one hit" %cur_pos)
                break
            else:
                common.logger(3,"LocalProxyHandler.processCommonParts",\
                            "cur_pos = %d append range %d ,%d append count %d" \
                      % (cur_pos,range_get[0], range_get[1], appendCount))
                partDataStore[int(range_get[0])]=resp
                appendCount += 1
 
            # parse resp
            # for status line
        if resp == None:
            return (None, None, appendCount)
        else:
            status = 206#checked
            return (resp, status, appendCount)

    def processFirstPart(self, resp, cur_pos):
        headersDenied = ["content-range","accept-ranges","content-length"]
        
        (next_pos, content_length) = self.chkPos(resp.headers["content-range"],
                                                cur_pos)
        for headers in headersDenied:
            if resp.headers.has_key(headers):
                del resp.headers[headers]

        if resp.headers["content-type"].lower().find("text")== -1:
            text_content = False #not text
        else:
            text_content = True

        if content_length == 0:
            # no Content-Length, fatal error
            self.send_error(588,"Large Response fatal error")
            return None

        self.send_response(200, "OK")
        for header in resp.headers:
            self.send_header(header, resp.headers[header])
        for value in resp.setcookie:
            self.send_header("set-cookie",value)

        
        self.send_header("Content-Length", content_length)
        self.send_header("Accept-Ranges", "none")
        self.end_headers()
        return(next_pos, content_length, text_content)


    def processLargeResponse(self, path):
        cur_pos = 0
        part_length = 0x100000 # 1m initial, at least 64k  1048576 == 1m
        part_limit_per_minute = 0x1000000
        first_part = True
        content_length = 0
        text_content = True
        allowed_failed = 10
        continued_success_count = 0
        partsDataStore ={}
        queue_handin_work = Queue.Queue()
        self.isSecPart = True
        appendCount = 0
        startTime = time.time()
        currentQuotaLeft = part_limit_per_minute
        oneMinute = 60
        while allowed_failed > 0:
            next_pos = 0
            
            sendRequest = self.phraseRequestCount(first_part,cur_pos, content_length, appendCount)

            currentQuotaLeft -= part_length * sendRequest
            common.logger(3,"LocalProxyHandler.processLargeResponse",\
                            'current Quota left %d' %currentQuotaLeft)
            currentTime = time.time()
            if currentTime - startTime > oneMinute:
                common.logger(3,"LocalProxyHandler.processLargeResponse",\
                            "cleaning up")
                currentQuotaLeft = part_limit_per_minute
                startTime = time.time()
            if currentQuotaLeft < 0:
                common.logger(3,"LocalProxyHandler.processCommonParts",\
                            "stop downloading")
                time.sleep(oneMinute - (currentTime - startTime)+1)
                
            self.assignJobs(sendRequest,part_length, path,queue_handin_work)
            
            (resp, status, appendCount) = self.processCommonParts(cur_pos, partsDataStore, queue_handin_work)
            #content length will not be used here first time
            if not resp or status != 206:# reduce part_length and try again
                if part_length > 65536:
                    part_length /= 2
                allowed_failed -= 1
                continued_success_count =0
                common.logger(2,"LocalProxyHandler.processCommonParts",\
                            "Exception happened reduce part_length and try again")
                continue
                    
            else:# status == 206
                continued_success_count += 1
                if continued_success_count == 4:
                    part_length *= 1.3
                    
            # for headers
            if first_part:
                try:
                    (next_pos, content_length, text_content) = self.processFirstPart(resp, cur_pos)
                except Exception, e:
                    common.logger(2,"LocalProxyHandler.processfirstPart",\
                            e)
                    raise
                first_part = False
            else:
                try:
                    next_pos = self.processOtherParts(resp, cur_pos)
                except Exception, e:
                    common.logger(2,"LocalProxyHandler.processProcessOtherParts",\
                            e)
                    raise

            # for body
            try:
                data = self.getData(resp)
                if text_content:
                    if len(data) > 0:
                        self.wfile.write(zlib.decompress(data))
                else:
                    self.wfile.write(data)
            except socket.error, (err,_):
            # Connection/Webpage closed before proxy return
                #if e == 'Errno 10053'or e == 'Errno 10054':
               return
                #else:
                #    raise

            # next part?
            if next_pos == content_length:
                return
            cur_pos = next_pos
            
    def log_message(self, format, *args):#dont log here
        return

class ThreadingHTTPServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
    pass

def daemonStart():
    global queue_give_job
    queue_give_job =  Queue.Queue()
    for _ in xrange(0,DAEMON_THREAD_COUNT):
        daemonThreads.append(threadedprocess.ThreadedLargeResponseDaemonProcess(queue_give_job))
    for t in daemonThreads:
        t.setDaemon(True)
        t.start()

def proxyStart(presenter):
    global httpd, UIPresenter,fetch_core
    cm = ConnectionService.getCM()
    cm.start()
    UIPresenter = presenter
    sys.stderr = presenter
    globalVars.FETCH_CORE = fetchcore.FetchCore()
    fetch_core = globalVars.FETCH_CORE
    daemonStart()
    status = "OK"
    if fetch_server == "":
        status = "Fail"
        #raise common.GAppProxyError("GAE part is not ready or your proxy.conf is not set.")
        #raise common.GAppProxyError("Invalid response from load balance server.")
    

    httpd = ThreadingHTTPServer(("127.0.0.1", listen_port), LocalProxyHandler)
    status = {
        "Status": status,
        "HTTPS Enabled": "%s" %(ssl_enabled and "YES" or "NO"),
        "Listen Addr": "127.0.0.1:%d" % listen_port,
        "Local Proxy":  local_proxy,
        "Fetch Server": fetch_server,
        "Ipv4 State": "%s" % (True and "OK" or "No Connection"),
        "Ipv6 State": "%s" % (True and "OK" or "No Connection"),
        "Proxy": ""}
    UIPresenter.init(status)
    try:
        httpd.serve_forever()
    except KeyboardInterrupt:
        fetch_core.shutdown()
        common.logger(3,"proxy.proxyStart","shutdown")

class CertGenerator():
    def __init__(self):
        self.serverKeyDir = os.path.join(globalVars._dir, 'ssl2/_server.key')
        commonNameCsrDir = os.path.join(globalVars._dir, 'ssl2/csr/{CN}.csr')
        commonNameCertDir = os.path.join(globalVars._dir, 'ssl2/{CN}.cert')
        caKeyDir = os.path.join(globalVars._dir, 'ssl2/_ca.key')
        caCertDir = os.path.join(globalVars._dir, 'ssl2/_ca.cert')
        self.csr_cmd = "openssl req -new -key %s -out %s -days 3650" \
                       %(self.serverKeyDir,commonNameCsrDir)
        self.sign_cmd = "openssl x509 -req -in %s -out %s -signkey %s \
-CA %s -CAkey %s -CAcreateserial -days 3650" %(commonNameCsrDir,commonNameCertDir,self.serverKeyDir,caCertDir,caKeyDir)
        self.csr_in = "CN\ntianya\nhaijiao\nLymtics@Enigma\nlocalproxy\n{CN}\nme@localhost.test\n\n\n\n"
        
    def gen_sslcert(self, commonname):#space in dir problem
        
        csr = self.csr_cmd.replace("{CN}",commonname).split(" ")
        sign = self.sign_cmd.replace("{CN}",commonname).split(" ")    
        input = self.csr_in.replace("{CN}",commonname)

        common.logger(3,"gen_sslcert",\
                                "Generating cert for %s" %(commonname,))
        errcsr,outcsr = subprocess.Popen(csr,stdin=subprocess.PIPE,\
                                         stdout=subprocess.PIPE,\
                                         stderr=subprocess.PIPE).communicate(input)
        
        errsign,outsign = subprocess.Popen(sign,\
                                           stdin=subprocess.PIPE,\
                                           stdout=subprocess.PIPE,\
                                           stderr=subprocess.PIPE).communicate("\n\n")
        return errsign == ""

def proxyShutdown():
    httpd.shutdown()
    fetch_core.shutdown()

class fetchcoreUnitTest(unittest.TestCase):
    def test_cert_generator(self):
        cg.gen_sslcert("www.firefox.com")


cg = CertGenerator()

if __name__ == "__main__":
    unittest.main()



