#!/usr/local/bin/python
#-*- coding: utf-8 -*-
#############################################################################
#                                                                           #
#   File: local2.py                                                         #
#                                                                           #
#   Copyright (C) 2008-2011 lyricconch <lyricconch@gmail.com>               #
#                                                                           #
#   This file is part of WallProxyMod.                                      #
#                                                                           #
#   WallProxyMod is free software: you can redistribute it and/or modify    #
#   it under the terms of the GNU General Public License as                 #
#   published by the Free Software Foundation, either version 3 of the      #
#   License, or (at your option) any later version.                         #
#                                                                           #
#   WallProxyMod is distributed in the hope that it will be useful,         #
#   but WITHOUT ANY WARRANTY; without even the implied warranty of          #
#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the           #
#   GNU General Public License for more details.                            #
#                                                                           #
#   You should have received a copy of the GNU General Public License       #
#   along with WallProxyMod.  If not, see <http://www.gnu.org/licenses/>.   #
#                                                                           #
#############################################################################
import urllib2, httplib, re, logging, time, SocketServer
import sys, os, socket, select, threading
from shared import trans
__author__ = "lyricconch@gmail.com"
__version__ = "1.0.13a"

def timeit(fn):
    def dummy(*args, **kw):
        t= time.time()
        ret= fn(*args, **kw)
        print "func=%s, time=%.3f\n"%(fn.func_name, time.time()-t),;
        return ret
    return dummy

def printex(fn):
    def dummy(*args, **kw):
        try: return fn(*args,**kw);
        except Exception,ex:
            print "func=%s, args=%s, kw=%s\n"%(fn.func_name, args, kw),
            print "extype=%s value=%s \n"%(type(ex), ex),
            raise
    return dummy

class ActionError(RuntimeError):
    def __new__(cls, *args, **kw): 
        inst= RuntimeError.__new__(cls, *args, **kw)
        inst.where= args[0] if args else "Unknown"
        inst.args, inst.kw= args[1:], kw        
        logger.log(cls.level, "%s"% inst)
        return inst   
    def __str__(self): return "%s at %s"%(self.__class__.__name__, self.where);
class Crash(ActionError): 
    level= getattr(logging, "CRITICAL")
    def __init__(self, *args):
        traceback.print_exc()
        sys.stderr.flush()
        os.system(("taskkill /F /PID %s" if os.name=="nt" else "kill -l %s")% os.getpid()) 
class Abort(ActionError): level= getattr(logging, "ERROR")
class Retry(ActionError): level= getattr(logging, "WARN")
class Igorn(ActionError): level= getattr(logging, "INFO")
class Trace(ActionError): level= getattr(logging, "DEBUG")

class nothing(Trace):
    __new__= lambda self: RuntimeError.__new__(self)
    __init__=__enter__=__exit__= classmethod(lambda*p: None)
class timeof(Trace):
    def __init__(self, id, t=0): self.i=id; self.t=time.clock if t else time.time 
    def __enter__(self): self.p=[self.t()]; return lambda:self.p.append(self.t())
    def __exit__(self, *exi): t=self.t(); print self.i+",".join(str(t-p) for p in self.p)
class showex(Trace):
    def __exit__(self, *exi): traceback.print_exception(*exi)
class static(Trace):   
    INSTS={}; STATIC={}; SCOPE=None; first=False;
    def __enter__(self):
        if self.where not in self.INSTS: self.INSTS[self.where]={}; self.first=True; 
        self.INSTS[self.where].update(**self.SCOPE)            
        if not self.first: self.SCOPE.clear(); return self.INSTS[self.where];
    def __exit__(self, *exi):
        if not self.first: 
            self.SCOPE.update(**self.INSTS[self.where]); 
            sys.exc_clear(); return True
        self.STATIC.update((var,self.SCOPE[var]) for var in self.SCOPE if var not in self.INSTS[self.where])
        self.INSTS[self.where].update(**self.SCOPE);   
                
class PassErrorProcesser(urllib2.HTTPErrorProcessor):
    def http_response(self, request, response): return response
    def https_response(self, request, response): return response

class Reusable(object):
    _instances= None
    _construct_lock= None
    _root_lock= threading.Lock()
    def __new__(cls, *args, **kw):
        if cls._instances is None and Reusable._root_lock.acquire() and cls._instances is None: 
            cls._instances= {}
            if cls._construct_lock is None:
                cls._construct_lock= threading.Lock()
            for thread in filter(lambda x:x[-7:]=="_daemon", cls.__dict__):
                def daemon_wrapper():
                    logger.info("Daemon thread %s.%s Started", cls.__name__, thread)
                    try: getattr(cls, thread)();
                    except: 
                        if __debug__:  raise Crash("Daemon thread %s.%s Terminated"% (cls.__name__, thread))
                        elif thread=="connpool_daemon": pass
                        elif thread=="dnscache_daemon": LocalHost.CACHE.clear()
                        elif thread=="gaetrans_daemon": GAEServer.DEFAULT_ACTION= GAEServer.PULL                   
                t= threading.Thread(target=daemon_wrapper)
                t.setDaemon(True)
                t.setName(thread[:-7])
                t.start()
            Reusable._root_lock.release()                                
        coargs= args if not kw else (args, tuple(kw.keys()), tuple(kw.values()))  
        try: inst= cls._instances[coargs]; # avoid acquiring lock in most common case
        except KeyError: # now double check required !
            with cls._construct_lock:
                inst= cls._instances.get(coargs) 
                if inst is None: 
                    inst= cls._instances[coargs]= object.__new__(cls);
                    inst._init_lock= threading.RLock()
            logger.debug("New %s instance Created: %s", cls.__name__, coargs)     
        return inst
    
class Adapter():
    FAIL_SAFE= None
    alias= "Adapter"
    def __str__(self): return self.alias

class EndPoint(Adapter, Reusable): 
    KEEP_ALIVE= 115
    MAX_THREAD= 16
    SERVER_ID= "Local HTTPd/1.0"
    _conn_lock= threading.Lock()
    
    def __init__(self, *args):                
        with self._init_lock or nothing():
            if self._init_lock is None: return
            if callable(args[0]):
                self.handle= lambda req: args[0](req, *args[1:])
                self.alias= args[0].func_name
                self.type= "handler"             
            else: 
                code,msgs,head,body,alias= args+("",)*(5-len(args))
                code=str(code); assert re.match("^[1-5]\d\d$", code);
                msgs=str(msgs); assert re.match(r"^[\t\f\x20-\x7E]*$", msgs);
                head=trans.hdr2str(head) if isinstance(head, dict) else str(head).strip()
                if head: head="\r\n".join(h.strip() for h in head.split("\n") if ":" in h)+"\r\n";
                if hasattr(body, "read"): body=body.read();
                self.handle= lambda req, *args, **kw: (code, msgs, head, body)
                self.alias= alias or "Endp"
                self.type= "static"            
            self._init_lock= None 
        logging.debug("Create EndPoint<%s> %s: %s", self.type, self.alias,
            ("callback=%r" if self.type=="handler" else "code=%s")% args[0])
            
    def process(self, request, data):
        code, msgs, head, body= self.handle(request, data); result=[];
        if isinstance(head, dict): head= trans.hdr2str(head);
        result.append("%s %s %s"% (request.ver, code, msgs))  
        result.append("Date: %s"% time.strftime("%a, %d %b %Y %H:%M:%S GMT"))
        result.append("Server: %s"% self.SERVER_ID)
        connctrl= (request.headers.get("connection", "")+
            request.headers.get("proxy-connection", "")).lower()
        request.conn_alive= \
            request.ver=="HTTP/1.1" and "close" not in connctrl or\
            request.ver=="HTTP/1.0" and "keep-alive" in connctrl
        if threading.active_count()>self.MAX_THREAD: 
            request.conn_alive= False        
        value= "keep-alive" if request.conn_alive else "close"
        result.append("Connection: %s"% value)
        if request.conn_alive and self.KEEP_ALIVE:
            # request.connection.settimeout(self.KEEP_ALIVE)
            result.append("Keep-Alive: %s"% self.KEEP_ALIVE)
        result.append("Content-Length: %s"% len(body))
        result.append(head)
        result.append(body)
        result= "\r\n".join(result)
        request.write(result, 0)
        return code
           
class DirectProxy(Adapter, Reusable):
    RETRY_CODES= (10054,)
    FETCH_TIMEOUT= 10.0
    POOL_FBACK= 2.2, 0.4
    POOL_LIMIT= 1, 6
    PING_BASE= 0
    __pool_insts= []
    address= None
    
    @classmethod
    def connpool_daemon(self):
        sequeue= -1; insts= self.__pool_insts
        while True:  
            sequeue+=1; start=time.clock()
            if sequeue%10==0: peak={}; alloc={}; cost=0;   
            for inst in insts:
                pool= inst._pool; socks= [];            
                inst._pool_size+= -self.POOL_FBACK[1] if len(pool)>=inst._pool_size\
                    else self.POOL_FBACK[0] if len(pool)==0 else 0;
                inst._pool_size= min(max(inst._pool_size, inst.POOL_LIMIT[0]), inst.POOL_LIMIT[1])                
                if inst._pool_size<len(pool)+1: continue
                try: socks=[inst._connect() for _ in xrange(int(inst._pool_size)-len(pool))];
                except Exception,ex: logger.warn("Conn Pool: %s(%r)", ex, ex);
                with inst._pool_lock: 
                    if socks: pool.reverse(); pool.extend(socks); pool.reverse();
                peak[inst]= max(peak.get(inst,0), len(pool))
                alloc[inst]= alloc.get(inst,0)+ len(socks)                
            cost+= time.clock()-start; time.sleep(6);
            if sequeue%10==0:
                if logger.isEnabledFor(logging.DEBUG):
                    print "pool: %s\n"% "\t".join("%s,%d/%d: "%(inst.alias,len(inst._pool),inst._pool_size)+
                        ",".join(str(sock.getsockname()[1]) for sock in inst._pool) for inst in insts),
                    # print "\n".join(", ".join("%s="% sock.getsockname()[1]+ "%s%s%s"%
                    #    tuple(map(len, select.select([sock], [sock], [sock], 0))) 
                    #    for sock in inst._pool) for inst in self.__pool_insts)        
                stat= " ".join("%s/%s"%(peak.get(inst,len(inst._pool)),alloc.get(inst,0)) for inst in insts)                    
                logger.debug("Conn Pool: insts/tcost %d/%.3f, peak/alloc %s", len(insts), cost, stat)
                
    def __init__(self, address, ping=50, alias=""):
        with self._init_lock or nothing():
            if self._init_lock is None: return
            if isinstance(address,str):
                addr,port= address.rsplit(":",1)
                address= addr, int(port)
            if not isinstance(address,tuple):
                raise Abort("DirectProxy Address Error")
            self.alias= alias or "%s:%s"%address
            self.host,self.port= address                
            addrinfo= socket.getaddrinfo(*address)[0]  
            self.sockinfo= addrinfo[:3]
            self.address= addrinfo[-1]
            self._pool= None
            t0= time.clock()
            for _ in xrange(5 if ping>=0 else 0):
                self.connect().shutdown(socket.SHUT_RDWR)
            self._ping= (time.clock()-t0)/5.0*1000
            if self._ping>=self.PING_BASE+ping: 
                logger.info("Enable Connection pool for %s, size=%s-%s, fback=%s-%s", 
                    self.alias, *(self.POOL_LIMIT+self.POOL_FBACK))
                self._pool= []        
                self._pool_size= 0  
                self._pool_lock= threading.Lock()
                self.__pool_insts.append(self)
            self._opener= urllib2.build_opener(
                urllib2.HTTPHandler(),
                PassErrorProcesser(),
                urllib2.ProxyHandler({'http':"%s:%s"%self.address}))
            self._init_lock= None
        logging.debug("Create DirectProxy %s: address=%s ping=%.3fms",
            self.alias, "%s:%s"%self.address, self._ping)  
        
    def connect(self):
        if self._pool is not None:
            if self._pool_lock.acquire(0):
                sock= self._pool.pop() if self._pool else None
                self._pool_lock.release()
                if sock is not None:              
                    if self._socktest(sock): return sock 
                    else: self._pool_size-=1;
            else: self._pool_size+=1;
        return self._connect()
    
    @classmethod
    def _socktest(self, sock):
        try: test=[sock]; ra, wa, ea= select.select(test, test, test, 0);
        except: return False
        print "test: sock=%s:%s"%sock.getsockname()+\
            " peer=%s:%s"% sock.getpeername()+\
            " so_error=%s,"% sock.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)+\
            " select=%s%s%s"%(len(ra), len(wa), len(ea)); #"""
        return not ra and wa and not ea  
    
    def _connect(self):
        sock= socket.socket(*self.sockinfo)
        sock.connect(self.address)            
        return sock
    
    def urlfetch(self, method="GET", path="/", header="", content="", *args):
        if not isinstance(header,dict):
            header= trans.str2hdr(str(header))
        req= urllib2.Request(path,content,header)
        req.get_method= lambda:method
        ret= self._opener.open(req,None,self.FETCH_TIMEOUT)
        return ret.code, ret.msg, str(ret.headers), ret.read()

class LocalHost(DirectProxy):
    DNS_TIMEOUT= 180
    PSOCK_GATE= 3
    PSOCK_CONN= 2
    CACHE= {}; EXPIRE= [[]];    
    _CACHE_LOCK= threading.Lock()
    
    @classmethod
    def dnscache_daemon(self):
        cache, expire= self.CACHE, self.EXPIRE
        records=seq=expired= connects=hosts=expires= 0
        while True:              
            seq, timeout= seq+1, self.DNS_TIMEOUT 
            if timeout<0: cache.clear();
            elif timeout>0 and seq%10==0:
                expired=expires=errors=hosts= 0
                expire.insert(0, [])
                if timeout<=len(expire):
                    for host in expire.pop():
                        try: del cache[host]; expired+=1;
                        except: pass
                record0, records= records, len(cache.keys())
                connect0, connects= connects, 0
                with self._CACHE_LOCK:
                    for cached in self.CACHE.values():
                        if not cached[0]: errors+=1; 
                        if len(cached)<=2: continue
                        socks= select.select(cached[2:], [], [], 0)[0]
                        expires+= len(socks)
                        for sock in socks: cached.remove(sock);
                        if len(cached)>2: hosts+=1; connects+=len(cached)-2;
                logger.debug("DNS Cache: host/null %d-%d -/+ %d/%d, sock/link %d-%d -/+ %d/%d",
                    records, errors, expired, records+expired-record0,
                    connects, hosts, expired, connects+expired-connect0)
                if logger.isEnabledFor(logging.DEBUG):   
                    output= " ".join("%s%s%s"%(k if len(v)>2 or not v[0] else "", v[1], 
                        len(v)-2 if v[0] else "-") for k,v in cache.items() if v[1] or len(v)>2)
                    if output: print "sock: %s\n"% output,
            for cached in cache.values():
                if cached[1]>self.PSOCK_GATE and len(cached)-2<self.PSOCK_CONN and cached[0]:
                    socks=[]; address=cached[0];
                    try: socks=[self._connect(address) for _ in xrange(self.PSOCK_CONN)];
                    except: logger.warn("DNS Cache: connect %s:%s failed"% address)
                    with self._CACHE_LOCK: cached.extend(socks);
                cached[1]= 0  
            time.sleep(6)
               
    def __init__(self):
        with self._init_lock or nothing():
            if self._init_lock is None: return
            self.alias=self.address= "localhost"       
            self._opener= urllib2.build_opener(
                urllib2.HTTPHandler(),
                urllib2.ProxyHandler({}))
            self._init_lock= None
        logging.debug("Create LocalHost Singleton: DnsTimeout=%s", self.DNS_TIMEOUT)
    
    @classmethod
    def connect(self, netloc, xchsock=None):
        if xchsock:
            logger.debug("Connect: store: %s", xchsock[0])
            #self._socktest(xchsock[1])
            #with self._CACHE_LOCK: 
            self.CACHE[xchsock[0]].append(xchsock[1]);
        if not netloc: return
        cached= self.CACHE.get(netloc)        
        if cached is None or cached[0] is None or xchsock is False:
            address= netloc.rsplit(":", 1)
            port= address[1] if len(address)==2 else 80
            try: address= socket.getaddrinfo(
                address[0], port, socket.AF_INET, socket.SOCK_STREAM)[0][-1]
            except socket.gaierror,ex: 
                if cached: raise Abort("Connect: failagain: %s"% netloc)      
                self.CACHE[netloc]= [None,-1]
                raise Retry("Connect: gaifail: %s %s"% (netloc, ex))    
            else: logger.debug("Connect: callgai: %s %s", netloc, address)
            if self.DNS_TIMEOUT>=0: self.CACHE[netloc]= [address,0];
            if self.DNS_TIMEOUT>0: self.EXPIRE[0].append(netloc);            
        elif len(cached)>2 and self._CACHE_LOCK.acquire(0):
            sock= cached.pop() if len(cached)>2 else None
            self._CACHE_LOCK.release()
            logger.debug("Connect: hitsock:  netloc=%s, addr=%s", netloc, cached[0])
            if self._socktest(sock): return sock
            address=cached[0]; logger.warn("Connect: bad socket");
        else: address=cached[0]; cached[1]+=1; #logger.debug("Connect: hitdns: %s", netloc);
        try: return self._connect(address)
        except socket.error:
            self.CACHE[netloc]= [None,-1]
            raise Retry("Connect: connfail: %s %s"% (netloc, address))        
    
    @classmethod 
    def _connect(self, address):
        sock= socket.socket()
        sock.connect(address)
        return sock
    
class GAEServer(DirectProxy,Reusable):
    POOL_FBACK= 2.4, 2.0 
    POOL_LIMIT= 1, 3
    IDLE,PULL,PUSH= 1, 2, 3
    GOOGLE_PROXY= "www.google.cn:80"
    DEFAULT_ACTION= 3
    CONTENT_LIMIT= 1048576*8
    IDLE_INTERVAL= 0.5    
    FETCH_PIPELINE= 8
    TRANS_MAXRETRY= 2
    REQUEST_HEADER= {
        'Content-Type': "application/octet-stream",
        'User-Agent': "Mozilla/5.0 (WinNT 6.1; rv:2.0) Gecko/20110101 Firefox/20101212"
    }    
    @classmethod
    def gaeidled_daemon(self):
        while True:
            time.sleep(self.IDLE_INTERVAL)
            for server in self._instances.values():
                try: server._init_lock or server._fetch(1, None);
                except Exception,ex:
                    logger.error("GAE Idled: %s(%r)", ex, ex)
    
    def __init__(self, url, key=None, wrap=None, alias=None):
        with self._init_lock or nothing():
            if self._init_lock is None: return
            if not re.match(r"^https?://[^/\s]+/\S*$", url): 
                raise Abort("bad GAEServer url")
            self.secure= url[:6]=="https:"
            host= url.split("/",3)[2]     
            if re.match("^.*:\d+$", host):
                host, port= host.rsplit(":",1)
            else: port= 443 if self.secure else 80; 
            try: DirectProxy.__init__(self, wrap or (host,port))
            except: wrap= self.GOOGLE_PROXY; DirectProxy.__init__(self, wrap);
            self.url, self.key, self.wrap= url, key, wrap
            self.alias= alias if alias else "GAE#%d"%len(self._instances.values())        
            self.default= GAEServer.PUSH                
            self._opener= self._opener if wrap else \
                urllib2.build_opener(urllib2.ProxyHandler({}))
            self._netloc= "%s:%s"% (host, port)   
            req= self.url if wrap else "/"+self.url.split("/",3)[-1]
            header= trans.hdr2str(self.REQUEST_HEADER) if isinstance(
                self.REQUEST_HEADER, dict) else self.REQUEST_HEADER
            header+= "Host: %s\r\n"% self.host
            self._post= "POST %s HTTP/1.0\r\n%sContent-Length: %%s\r\n\r\n"% (req, header)
            self._get= "GET %s HTTP/1.0\r\n%s\r\n"% (req, header)
            self._buffer= None
            self._buffer_lock= threading.Lock()
            self._buf_space= self.CONTENT_LIMIT 
            self._init_lock= None
        logging.debug("Create GAEServer %s: url=%s wrap=%s", self.alias, self.url, self.wrap)

    def getcached(self, prefix, sequeue):
        request= urllib2.Request(
            self.url+"?prefix=%s&sequeue=%d"%(prefix,sequeue),
            None, self.REQUEST_HEADER)
        for _ in xrange(self.TRANS_MAXRETRY):
            try: return self._opener.open(request).read()
            except httplib.HTTPException,ex: pass
        else: logger.error("too many retrys in getcached: %s",ex); raise ex

    def urlfetch(self, method="GET", path="/", headers="", content="", action=0):
        if not isinstance(headers,str): headers=trans.hdr2str(headers)
        struct= [method, path, headers, content, threading.Event()]
        self._fetch(action or self.DEFAULT_ACTION, struct).wait()
        return struct[:4]

    def _fetch(self, action, entity=None):
        dispatch= event= None
        if entity is not None:
            space, event= sum(map(len, entity[:4])), entity[4]            
        elif self._buffer is None: return
        with self._buffer_lock:
            if self._buffer is None:
                dispatch= [entity]
                self._buffer= []
            elif action==3:
                if self._buf_space<=space or \
                len(self._buffer)==GAEServer.FETCH_PIPELINE:
                    dispatch= self._buffer
                    self._buffer= [entity]
                    self._buf_space= GAEServer.CONTENT_LIMIT-space
                else: self._buffer.append(entity); self._buf_space-=space;
            elif action==2:
                if len(self._buffer)>=1 and self._buf_space>=space:
                    dispatch=self._buffer
                    dispatch.append(entity)
                    self._buffer=[]
                    self._buf_space= self.CONTENT_LIMIT
                else: dispatch= [entity];
            elif action==1:
                if len(self._buffer)>=1:
                    dispatch= self._buffer
                    self._buffer= []
                    self._buf_space= self.CONTENT_LIMIT
                else: self._buffer= None;            
            else: pass;
        if dispatch is None: return event
        id= threading.currentThread().name; recv=ex=None; stop=0;  
        begin= time.clock()
        session= os.urandom(32) # TODO: is Crypto disabled?
        buffer= trans.strio.StringIO()
        trans.packit(buffer, dispatch, self.key, session)
        send= buffer.getvalue()
        for fails in xrange(self.TRANS_MAXRETRY):   
            try:
                start= time.clock() 
                sock= self._connect() if fails else self.connect()                         
                sock.sendall(self._post%len(send))
                sock.sendall(send)
                buffer= sock.makefile("rb")
                line= buffer.readline()
                if "HTTP/1.0 200" not in line: 
                    raise Abort("Trans:"+line.strip());  
                while line and line!="\r\n": 
                    line= buffer.readline()
                    if recv is None and line[:15]=="Content-Length:": 
                        recv= line.split(":")[1].strip()
                if not line: raise EOFError()                 
                stop= time.clock() 
                trans.unpack(buffer, dispatch, self.key, None)
                ex= None; break
            except Exception,ex: continue            
        for space in dispatch: space.pop().set();           
        finish= time.clock()
        logger.debug("tran: %s pipe/fail=%d/%d, fetch/tport=%.3f/%.3f, send/recv=%s/%s",
            id, len(dispatch), fails, finish-begin, stop-start, len(send), recv);           
        if not ex: return event 
        logger.error("Transfer %s raise %s", id, ex); raise ex           
        
    # TODO: trans.getlength, Event is better than Condition ?!        
    def _fetch2(self, action, entity):
        if entity is not None: space=sum(map(len,entity));
        elif self._buffer is None: return
        with self._buffer_lock:
            if self._buffer is None:
                self._buffer=[]; dispatch,cond= [entity],None;
            elif entity is None:
                if len(self._buffer)>=1:                                
                    dispatch,cond= self._buffer,self._pkg_cond
                    self._pkg_space= self.CONTENT_LIMIT
                    self._pkg_cond= threading.Condition()
                    self._buffer= []                    
                else: self._buffer= None; return
            elif action==self.PUSH:
                if len(self._buffer)>=self.FETCH_PIPELINE or self._pkg_space<=space:
                    dispatch,cond= self._buffer,self._pkg_cond
                    self._pkg_space= self.CONTENT_LIMIT-space
                    self._pkg_cond= threading.Condition()
                    self._buffer= [entity]                    
                else: self._buffer.append(entity); self._buf_space-=space;
            elif action==self.PULL:
                if len(self._buffer)>=1 and self._buf_space>=space:                
                    dispatch,cond= self._buffer+[entity],self._pkg_cond
                    self._pkg_space= self.CONTENT_LIMIT
                    self._pkg_cond= threading.Condition()                    
                    self._buffer=[]
                else: dispatch,cond= [entity],None;        
        with cond or nothing():
            if dispatch is not None:
                for _ in xrange(self.TRANS_MAXRETRY):
                    try: 
                        session= os.urandom(32)
                        send= 0#trans.getlength(dispatch)
                        sock= self.connect()
                        buffer= sock.makefile("wb")
                        buffer.write(self._post%send)
                        buffer.flush()
                        trans.packit(buffer,dispatch,self.key,session)
                        buffer= sock.makefile("rb")
                        assert "HTTP/1.0 200" in buffer.readline()
                        while space and space!="\r\n": space=buffer.readline();
                        trans.unpack(buffer,dispatch,self.key,session)
                        recv= 0#trans.getlength(dispatch)
                        cond.notifyAll(); break
                    except Exception,ex: 
                        logger.warn
                else: cond.notifyAll(); raise ex;    
                logger.debug("", send, recv)
            if entity: cond.wait(); return entity
            
    def _connect(self):
        sock= DirectProxy._connect(self)
        if not self.secure: return sock
        if self.wrap: 
            sock.sendall("CONNECT %s HTTP/1.0\r\n%s\r\n"%(self._netloc,self._header))
            line= sock.recv(8192)
            if not re.match(r"^HTTP/1\.\d 200.*\r\n\r\n$", line): raise Abort("Tu");
        return ssl.wrap_socket(sock)

class Matcher():
    HASH_OPTIMIZE= r'[a-z0-9%*]{2,}'
    HASH_BADKEY= "http,https,www,com,net,org,html,htm,jpg,png,css,js,xml"
    CACHE_LIMIT= 1000, 200 # MAXENTIYS_YOUNG, PURGEWHEN_OLDER
    CACHE_FBACK= 3.6, 1.6  # FEEDBACK_REMAIN, FEEDBACK_EXPIRE
    young= older= SEQUEUE= 0
    YOUNG, OLDER= {}, {}
    gate= 3000
    _cache_lock= threading.Lock()
     
    @classmethod
    def PATH_RULE(self, path):
        if "/" not in path: return path.rsplit(":")[0]
        if "?" in path: return path.split("?")[0]
        path= "/".join(path.split("/",4)[2:-1])
        part= 4 if re.search(r"\.\w\w[/:]|^[\d.]+[/:]", path) else 3
        return ".".join(path.rsplit(".",4)[-part:])

    def __init__(self, alias="matcher", file=None, hashkey=None,
            cache=True, query=False, case=False):
        if cache: # start at 1
            Matcher.SEQUEUE+= 1
            self.id= Matcher.SEQUEUE 
        else: self.id=0;
        self.alias= alias
        self.hashkey= hashkey or self.HASH_OPTIMIZE
        self.flags= "".join(f if v else "" for v,f in 
            zip((not query, not case, cache),"pic"))
        self.BLACK={"*":[]}; self.BP=[]; self.BH=[]; self.BS=[];
        self.WHITE={"*":[]}; self.WP=[]; self.WH=[]; self.WS=[];
        self.EXIST={}; self.hashed=0;
        self._prepare= ((lambda p:p) if query and case 
            else (lambda p:p.lower()) if query and not case 
            else (lambda p:p.split("?",1)[0]) if not query and case 
            else (lambda p:p.split("?",1)[0].lower()))
        self._part= r"^\*?%s\*?$"% re.sub(r"(\[.*)\*(.*\])",r"\1\2",self.hashkey)
        self._update_lock= threading.Lock()
        if isinstance(file,tuple):
            self._file,self._skip= file
        else: self._file,self._skip= file,[]
        if file and os.path.isfile(file):
            data= open(file, "r").read()
            for seg in self._skip:
                seg= r"! BEGINDATA %s\r\n.*! ENDDATA %s\r\n"%(seg,seg)
                data= re.sub(seg, "", data, re.S)
            open(file, "w").write("! Updated\n")
            self.init(data, "Default")
        elif file: open(file, "a").write("! Created\n")
    def __str__(self): 
        return self.alias

    def _compile(self, rawrule):
        rule= rawrule.strip(); sub=re.sub;
        if rule[:2]=="!!": self.EXIST[rule[2:]]="#"; return "","",rule,type+"!"
        elif not rule or "##" in rule or rule[0]=="!": return "","",rule,"#"
        elif rule in self.EXIST: self.EXIST[rule]+="!"; type="-";
        elif rule[:2]!="@@": type="b"; 
        else: rule=rule[2:]; type="w"; 
        if not ("\\" in rule or rule[0]=="/" and rule[-1]=="/"): type+="s"
        else: rule= sub(r"^/(.*)/$", r"\1", rule); type+="r";
        regexp=rule; hashes=rule.lower(); 
        if len(rule)<=4 or type[0] not in "wb": pass
        elif type[1]=="r": # regexp rule
            if re.search(r"[([{]", hashes):
                hashes= sub(r'\[[^]]+\]',   r'.',   hashes)    # [...] => .
                hashes= sub(r'\{[^{}]*\}',          r'*',   hashes)  # {a,b} => *
                hashes= sub(r'\((\?:)?([^?][^()]*)\)(?![*+?])',   r'\2', hashes) # (...) => ...
                hashes= sub(r'\(([^()]+)\)[?*+]*',  r'.*',  hashes)  # (?=...) =>
                hashes= sub(r'\\[1-9]',             r'.*',  hashes)  # \3 => .*
            hashes= sub(r'\\([wdSWD]|u....|x..)',   r'.',   hashes)  # \w \x.. => .
            hashes= sub(r'\\?[^\\][*?+]+',  r'.',   hashes)  # a* .? => .
            hashes= sub(r'(?<!\\)\.',       r'*',   hashes)  # . => *
            hashes= sub(r'\\(\W)',          r'\1',  hashes)  # \. => .
            hashes= sub(r'\\\w',            r'^',   hashes)  # \b \s => ^
            try: regexp= re.compile(regexp.lower() if "i" in self.flags else regexp);
            except re.error: 
                logger.warn("cannot compile %s as %s",rule,regexp); 
                return hashes,regexp,rule,"?"  
        elif type[1]=="s": # simple rule
            if re.search(r"(?=[_\W]*[*^])[_\W]{2,}",    regexp):
                regexp= sub(r'(?=[|^]*\*)[|*^]+',   r'*',   regexp) 
                regexp= sub(r"(?=[\W_]*\^)[\W_^]+", r"^",   regexp)
                regexp= sub(r"^\*+|\*+$",           r"",    regexp) 
            regexp= sub(r'(\W)',    r'\\\1',    regexp)  # / . => \/ \.
            regexp= sub(r'\\\*',    r'.*',      regexp)  # * => .*
            regexp= sub(r'\\\^',    r'[\W_]+',  regexp)  # ^ => \W+
            if "\\|" in regexp:
                regexp= sub(r'^\\\|\\\|',       r'^https?:/+[^/]*[./]', regexp) # ||...
                regexp= sub(r'^\\\|',           r'^',       regexp)  # |... => ^...
                regexp= sub(r'^(.*)\\\|\\\|$',  r"^[^?]*\1",regexp)  # ...||
                regexp= sub(r'\\\|$',           r'$',       regexp)  # ...| => ...$
        if "r" in type: logger.debug("rule=%s, hashes=%s, regexp=%s", rule, hashes, regexp);
        return hashes,regexp,rule,type
    
    def append(self, rawrule):
        hashes,regexp,rule,type= self._compile(rawrule)
        if type[0] not in "wb": return type
        TARGET,PART,HASH,SKIP= (self.BLACK,self.BP,self.BH,self.BS)\
            if type[0]=="b" else (self.WHITE,self.WP,self.WH,self.WS);
        keys= [key for key in re.findall(self.hashkey, hashes) if "*" not in key]
        index= [k for k in PART if k in hashes]+[k for k in keys if k in HASH]
        if index: logger.warn("rule %s skipped (duplicate of %s)",
            rule, str(index).strip("[]")); return "-"
        key= keys[0] if keys else "*"; skip=[];     
        if key=="*": logger.warn("rule %s causes slow (compile as %s)",
            rawrule, "r'%s'"%regexp if type[1]=="s" else "s'%s'"%hashes)
        if len(keys) > 1:
            keys.sort(lambda x,y:len(y)-len(x))
            normal= filter(lambda x:x not in self.HASH_BADKEY, keys)
            unique= filter(lambda x:x not in TARGET, normal)  
            key= (unique or normal or keys)[0]
        elif not isinstance(regexp,str): pass;
        elif key=="*" and re.match(self._part, hashes):
            logger.debug("rule %s is %sPART checker", rawrule, type[0])
            index=hashes.strip("*"); PART.append(index);
            for hash,entity in TARGET.iteritems(): 
                if index not in hash: continue
                if not isinstance(entity,list): entity=[entity];
                skip.extend(entity); del TARGET[hash];
        elif key!="*" and re.match(r"^\^?%s\^?$"%key, hashes):            
            logger.debug("rule %s is %sHASH checker", rawrule, type[0])
            HASH.append(key);
            if key in TARGET: 
                skip= TARGET[key]; del TARGET[key];
                if not isinstance(skip,list): skip=[skip];
        if skip: SKIP.extend(skip);
        for index in skip: logger.info("rule %s removed (replace by '%s')", index, rule)
        if key in TARGET:
            hashed= TARGET[key]
            if isinstance(hashed, list):
                index= len(hashed)
                hashed.append(regexp)
            else: TARGET[key]=[hashed,regexp]; index=1;
        else: TARGET[key]=regexp; index=0;
        self.EXIST[rawrule]= "%s %s %s"%(key,type,index)
        self.hashed+=1; 
        return hashes,regexp,rule,type,key,index

    # TODO: not test yet
    def remove(self, rawrule):
        _,_,rule,type= self._compile(rawrule)
        if type != "-": return
        elif rule[:2]=="@@" and rule in self.WS: return 
        elif rule[:2]!="@@" and rule in self.BS: return
        try: key,type,index= re.match(
            r"^(\S+) ([bw]) (\d+)",self.EXIST[rawrule]).groups()
        except: return
        target= self.BLACK if type=="b" else self.WHITE
        if index != "0":
            hashed= target[key]
            hashed[int(index)]= None
            for index in xrange(len(hashed)-1, -1, -1):
                if hashed[index]!=None: break
                del hashed[index]
            else: del target[key];
            if len(hashed)==1: target[key]=hashed[0];            
        elif not isinstance(target[key],list): del target[key];            
        else: target[key][0]= None; 
        del self.EXIST[rawrule]; self.hashed-=1;
        return key       
    
    def match(self, path):
        if TIMELINE%100==20 and logger.isEnabledFor(logging.DEBUG):
            print self.state(0, 0) 
        path= self._prepare(path)
        if self.id==0: return self._match(path) 
        return self._cached(self, path)
   
    @classmethod
    def _cached(self, this, path):
        id= this.id
        # TIMELINE, HITCNT, entity(ID1), entity(ID2), ...            
        result= self.OLDER.get(path)
        if result is not None\
        and len(result)>1+id\
        and result[1+id] is not None: 
            #logger.debug("Matcher %s: %s, Cache Hit by %s", this.alias, result[1+id], path)
            result[0]= TIMELINE
            result[1]+= 1
            return result[1+id]
        # ID, entity                      
        result= self.YOUNG.get(path)
        if result is not None\
        and result[0]==id:
            if  len(self.OLDER.get(path,""))<=1+id\
            and self._cache_lock.acquire(0)\
            and len(self.OLDER.get(path,""))<=1+id:
                logger.debug("Matcher %s: %s, Cache Capture: %s", this.alias, result[1], path)
                cached= self.OLDER.get(path)
                if cached is None:
                    cached=self.OLDER[path]= [TIMELINE, 1]
                if len(cached)<=1+id:
                    cached.extend((None,)*(1+id-len(cached))+(result[1],))
                self.older+= 1
                self._cache_lock.release()
            return result[1]
        result= this._match(path)
        # if not result: logger.debug("Matcher %s: no rules match %s", this.alias, path) 
        # else: logger.debug("Matcher %s: %s '%s' matches %s", 
        # this.alias, "black" if result[0] else "white", result[2], path) #"""
        result= False if result is None else result[0]
        if self.young>=self.CACHE_LIMIT[0]:            
            logger.debug("Matcher: YOUNG cache LIMIT exceeded %s", self.CACHE_LIMIT[0])
            print this.state()
            self.young, self.YOUNG= 0, {}
            with self._cache_lock:
                if self.older<self.CACHE_LIMIT[1]: return result                   
                time=TIMELINE; gate=self.gate;
                logger.debug("Matcher: Cleaning OLDER, time=%d, gate=%.3f", time, gate)
                older=self.OLDER; expire=remain=0;
                for key, cached in older.items():
                    if cached[1]*gate>=time-cached[0]:
                        remain+=1; continue
                    expire+= 1 
                    del older[key]
                self.older= remain
                self.gate+= expire*self.CACHE_FBACK[1]-remain*self.CACHE_FBACK[0]  
                logger.debug("Matcher: Finish Clean, total=%s, gate/delta=%..f/.3f, remain/expire= %d/%d",
                     remain+expire, self.gate, self.gate-gate, remain, expire)
                print this.state()
        self.young+= 1
        self.YOUNG[path]= [id, result]
        return result

    def _match(self, path):
        tokens= re.findall(self.hashkey, path)
        tokens.append("*")
        search= re.search; result= None; 
        BLACK=self.BLACK; WHITE=self.WHITE;
        for token in tokens:
            if token in WHITE:
                hashed= WHITE[token]
                if isinstance(hashed, list):
                    for regexp in hashed:
                        if regexp and search(regexp, path):
                            return False, token, regexp
                elif search(hashed, path):  
                    return False, token, hashed
            if not result and token in BLACK:
                hashed= BLACK[token]
                if isinstance(hashed, list):
                    for regexp in hashed:
                        if regexp and search(regexp, path): 
                            result= True, token, regexp                              
                elif search(hashed, path): 
                    result= True, token, hashed
        return result    
    
    def state(self, short=4, heavy=4):
        worst="*"; maxvl=-1; report="";
        if short or heavy:            
            report+= "Matcher(%s): store=%s, hash=%s, flags=%s\n"%(
                self.alias, self._file, self.hashkey, self.flags)
            report+= "       rules= slow+ fast+ zero;  bad%d- hvy%d+ keylen\n"%(short,heavy)
            for target in [self.BLACK,self.WHITE]:
                report+= "Black: " if target is self.BLACK else "White: "
                slow=fast=zero=kcnt=rcnt=ksum=ksht=khvy=0;
                for k,v in target.iteritems():
                    if isinstance(v,list):
                        vl=len(v); kl=len(k)
                        if k=="*": slow+=vl; kl=vl=0;
                    else: vl=1; kl=len(k);
                    kcnt+=1; rcnt+=vl; ksum+=kl/(vl+0.001);
                    if vl!=1: fast+=vl
                    else: zero+=1
                    if kl<short and vl>1: ksht+=1
                    if vl>heavy: khvy+=1
                    if vl>maxvl: maxvl=vl; worst=k;
                report+= "% 5d % 5d % 5d % 5d  % 5d % 5d %  6.3f\n"%(
                    slow+fast+zero,slow,fast,zero,ksht,khvy,ksum/kcnt)
        else: report+= "stat: %s "% self.alias;
        report+= "worst=%s/%d known=%d gate=%.3f cache=%d/%d  hits=%d/%d"%(
            worst, maxvl, self.hashed, Matcher.gate, Matcher.young, Matcher.older,
            sum(i[1] for i in self.OLDER.values()), TIMELINE)
        return report
       
    @timeit
    def init(self, rules, seg=""):
        import gzip, base64, StringIO
        if re.search(r"[\x80-\xFF]", rules):
            try: rules= gzip.GzipFile(fileobj=StringIO.StringIO(rules),mode="r").read()
            except: pass
        if not re.search(r'[^+/=\w\s]',rules):
            try: rules= base64.b64decode(rules)
            except: pass
        records= ""
        for rule in rules.splitlines()[1:]:
            info= self.append(rule.rsplit("$",1)[0]);
            if seg and isinstance(info,tuple):
                _,_,rule,type,_,_= info
                records+= "%s%s\n"%("" if "b" in type else "@@", rule)
        if seg and self._file: open(self._file,"a").write(
            "\n! BEGINDATA %s\r\n%s\r\n! ENDDATA %s\r\n"%(seg,records,seg))
        self.YOUNG.clear(); self.young=0;
        self.OLDER.clear(); self.older=0;
        del gzip, base64, StringIO
    
    def update(self, path, append=True):
        rule= self.PATH_RULE(path);
        with self._update_lock:
            if append:
                info= self.append(rule)
                if not isinstance(info,tuple): return
                _,regexp,rule,type,_,index= info                    
                if self._file: open(self._file,"a+").write(rule+"\n")
                logger.debug("%s: %s +", self.alias, rule)
                for path in self.OLDER.keys():
                    if re.search(regexp, path): del self.OLDER[path];
            else: # TODO: unfinished
                index= self.remove(rule) 
                if index:
                    if self._file: open(self._file,"w+").write(open(self._file,"r")
                        .read().replace("\n"+rule+"\n","\n"))
                    logger.debug("%s: %s -", self.alias, rule)  
            self.YOUNG.clear(); self.young=0;            

try: import ssl
except ImportError: ssl=None
class Handler(SocketServer.BaseRequestHandler):  
    ASSOC_ROUTE= {}
    DEFAULT_ROUTE= None
    RETRIES_ROUTE= 0
    def FIND_ROUTE(self): 
        return self.find_route(self.client_address[0], self.method, self.path, self.headers)
    def BIND_ROUTE(self, new, old, final): 
        result= self.bind_route(self.path, new, old, final, time.time())
        path= format_path(self.path)
        if final and result: logger.info("Route change: from %s to %s for %s", old, new, path);
        if not final and result: logger.warn("Retry Cancel: %s(%s): %s", new, old, path); 
    
    def read(self, cond=-1, bufsize=1024):
        temp= "\r\n"        
        if isinstance(cond, int):
            data= [self._rbuf]
            left= cond- len(self._rbuf) 
            if cond<0: left= sys.maxint
            while left>0 and temp:
                temp= self.connection.recv(bufsize)
                data.append(temp)
                left-= len(temp)
            data= "".join(data)
            if cond<0: left=0
        elif isinstance(cond, str):
            data= self._rbuf
            while cond not in data and temp:
                temp= self.connection.recv(bufsize)
                data+= temp
            left= (data.find(cond)+1 or len(data)+1)+len(cond)-len(data)-1
        elif cond is None:
            data, self._rbuf= self._rbuf, ""
            return data
        if left>0: 
            self._rbuf= data 
            raise Abort("Unfinished Data")  if data else EOFError()
        elif left==0: self._rbuf= ""
        else: data, self._rbuf= data[:left], data[left:]
        return data
    
    def write(self, data, bufsize=8192):
        temp= self._wbuf
        temp.append(data)        
        temp[0]+= len(data)
        if "\n" in data or temp[0]>bufsize:
            data= "".join(temp[1:])
            if data: self.connection.sendall(data)            
            del temp[:]; temp.append(0); 
        return temp[0]  
            
    def setup(self):
        self.connection= self.request
        self.conn_alive= False
        self.nth_request= 1
        self._rbuf= ""
        self._wbuf= [0]

    def finish(self):
        if self._wbuf[0]: self.write("", 0);           
        self.connection.shutdown(socket.SHUT_WR)
        self.connection.close()
        
    def handle(self):
        try:        
            self.handle_once()
            while self.conn_alive:
                self.nth_request+= 1   
                self.handle_once()
            if self.nth_request>1: 
                logger.debug("Handle: %s:%s serves %%d requests"% 
                self.client_address, self.nth_request)
        except Abort,ex: 
            logger.error("Handle: %s:%s got %%r"%self.client_address, ex)

    def handle_once(self):        
        try: prefix= self.read("\r\n\r\n")
        except EOFError: self.conn_alive=False; return
        prefix, self.header= prefix[:-2].split("\r\n", 1)
        self.headers= trans.str2hdr(self.header)    
        if "HTTP/1." not in prefix[prefix.rfind(" "):]:
            raise Abort("VER: %s %r %r"% (prefix, self.header, self._rbuf))
        self.method, self.path, self.ver= prefix.split()
                
        serve= self.DEFAULT_ROUTE
        try: serve= self.ASSOC_ROUTE.get(self.client_address) or self.FIND_ROUTE();
        except Exception,ex:
            logger.warn("Exception raise in FIND_ROUTE: %s", ex)
            traceback.print_exc()          
        self.curr_tries= 0
        while self.curr_tries<=self.RETRIES_ROUTE:
            if isinstance(serve, list): 
                serve= serve[ord(os.urandom(1))%len(serve)]
            if isinstance(serve, Adapter): pass  
            elif hasattr(serve, "__bases__"):
                serve= Reusable._instances[serve].values()
                serve= serve[ord(os.urandom(1))%len(serve)]
            else: serve= (localhost if serve is None 
                else EndPoint(*serve) if isinstance(serve, tuple)
                else DirectProxy(serve) if isinstance(serve, str) 
                else GAEServer(**serve) if isinstance(serve, dict) 
                else EndPoint(serve) if callable(serve) 
                else self.DEFAULT_ROUTE);
            if self.curr_tries==0: origin, failed= serve, []; 
            try:                
                if isinstance(serve, LocalHost):
                    if self.method == "CONNECT": 
                        self.do_TUNNEL(serve)
                    else: self.do_DIRECT(serve)
                elif isinstance(serve, GAEServer):
                    if self.method == "CONNECT":
                        self.do_CONNECT(serve)    
                    elif self.method in ("GET","POST","HEAD","PUT","DELETE"):
                        self.do_METHOD(serve)       
                    else: raise Abort("METHOD check: %s"% self.method)                    
                elif isinstance(serve, DirectProxy):
                    self.do_PROXY(serve)
                elif isinstance(serve, EndPoint):
                    self.do_INPLACE(serve)                
            except Igorn: pass
            except Retry, ex:          
                failed.append(serve); serve=serve.FAIL_SAFE;  
                if serve is None: raise Abort("FAIL_SAFE is not set when %s: %s fail"% (ex, failed))
                self.BIND_ROUTE(serve, failed[-1], False)                                      
                logger.info("Retry: with %s (%s failed): %s"%(serve, failed[-1], ex.where))
                self.curr_tries+=1; continue 
            if not isinstance(serve, EndPoint): self.conn_alive=False;
            if self.curr_tries!=0: self.BIND_ROUTE(serve, origin, True); 
            return
        if self.RETRIES_ROUTE: raise Abort("Too many %r: %s fail"% (ex, failed));
        else: raise Abort("FAIL_SAFE is not enabled during %s"% ex);
             
    def do_TUNNEL(self, serve):
        logger.info("Tunnel: %s:%s to %%s"%self.client_address, self.path)
        self.connection.sendall("HTTP/1.0 200 OK\r\n\r\n")
        stat=[None,None,None,None]
        assert self.read(None)==""
        try: 
            sock= serve.connect(self.path)
            trans_sockio(self.connection, sock, stat)                
        except socket.error,ex:
            if stat[1]==self.connection: raise Igorn("Local Socket: %s"% ex)
            if ex.errno not in serve.RETRY_CODES or stat[3]!=None: raise
            if serve.FAIL_SAFE is None: 
                raise Abort("Not FAILSAFE for Retry during SSL reset")
            self.BIND_ROUTE(serve.FAIL_SAFE, serve, False)
            logger.info("Retry: with %s(%s fails): Reset in SSL Handshake"%(
                serve.FAIL_SAFE, serve))
            sock= self.server.mirror.connect()            
            sockname= sock.getsockname()
            self.ASSOC_ROUTE[sockname]= serve.FAIL_SAFE
            sock.sendall("CONNECT %s %s\r\n%s\r\n"% (
                self.path, self.ver, self.header ))   
            assert sock.recv(8192)=="HTTP/1.0 200 OK\r\n\r\n"
            sock.sendall(stat[2])
            try: trans_sockio(self.connection, sock)
            finally: del self.ASSOC_ROUTE[sockname]
            self.BIND_ROUTE(serve.FAIL_SAFE, serve, True)
            
    def do_DIRECT(self, serve):
        logger.info("Direct %.4s: \t%s", self.client_address[1], format_path(self.path))             
        netloc= self.headers['host'] if self.path[0]=="/" else self.path.split("/",3)[2]
        path= re.sub(r"^http://[^/]+", r"", self.path)
        header= re.sub(r"\r\n(Proxy-)?Connection:\s+\S+", r"", self.header)
        data0= "%s %s %s\r\n%s\r\n%s" % (self.method, path, self.ver, header, self.read(None))
        try:
            sock= serve.connect(netloc)
            sock.sendall(data0)
        except socket.error,ex:
            if ex.errno not in serve.RETRY_CODES: raise
            raise Retry("Domain or Keyword blocked")
        # current_data, current_netloc, lastest_socket, requests, responses, switch_wait 
        # python 2.x doesn't support the nonlocal keyword... :(
        stat= [data0, netloc, None, 1, 0, False]; 
        def callback(source, object):
            if stat[5]:
                if object is self.connection: stat[5][2]=0                            
                elif source is None or stat[3]==stat[4] and stat[5]>=3:
                    stat[1], sock2, _=stat[5]; stat[5]= False; 
                    trans_sockio(self.connection,sock2,callback) 
                    raise Igorn("delayed Stream Switch")
                elif stat[3]==stat[4]: stat[5][2]+=1; return True;
                else: time.sleep(0.5 if source is None else 0.1); return True;              
            if object is None: return                   
            stat[2]=source; data=source.recv(8192); 
            if not data: return 
            if object is self.connection:
                if re.match("^HTTP/1\.[01] (\d\d\d)", data):
                    prefix, path= "Pipe%d"%stat[3], stat[0].split("\n", 1)[0].strip(); stat[4]+=1;
                    #if not stat[3]: prefix, path= "Direct", re.sub("/", "http://%s/"%stat[1], path, 1);
                    if stat[4]: logger.info("%s %.4s: %s\t%s", prefix, self.client_address[1],
                         data.split(" ", 2)[1], format_path(path));   
                    # Location: http://61.131.89.148/req.php?str1=...&t=...&str2=(...)
                    if "http://61.131.89.148/req.php" in data.split("\r\n\r\n",1)[0]:                            
                        source.close(); stat[4]-=1;
                        logger.warn("Remove ISP 302: %s"%
                            re.findall("^Location: \S*",data,10)[0].split("&str2=",1)[1])                        
                        sock2= serve.connect(stat[1])
                        sock2.sendall(stat[0])
                        trans_sockio(self.connection, sock2, callback)
                        raise Igorn("Transport Changes") 
            else:
                # GET|POST|HEAD|PUT|DELETE|OPTION|TRACE|CONNECT
                if re.match(r"^[A-Z]{3,7} \S+ HTTP/1\.[01]\r\n", data):
                    prefix, data= data.split("\r\n\r\n", 1)
                    try: netloc2= re.findall(r"\nHost:.*", prefix)[0].split(":", 1)[1].strip();
                    except: netloc2= prefix.split("\n", 1)[0].split("/", 3)[2];
                    prefix= re.sub(r"^(\S+ )http://[^/]+", r"\1", prefix)
                    prefix= re.sub(r"\r\n(Proxy-)?Connection:\s+\S+", "", prefix);
                    stat[0]=data= prefix+"\r\n\r\n"+data; stat[3]+=1; 
                    if netloc2 != stat[1]:  
                        sock2= serve.connect(netloc2, (stat[1], object))
                        logger.debug("Pipe %.4s Stream Switch from %s to %s", self.client_address[1], stat[1], netloc2)
                        sock2.sendall(data)                         
                        if stat[3]!=stat[4]: stat[5]=[netloc2,sock2,-1]; return True; 
                        stat[1]= netloc2   
                        trans_sockio(self.connection, sock2, callback)                             
                        raise Igorn("Pipeline Switch")                                    
                elif data: stat[0]+=data;
            stat[2]=object; return data and not object.sendall(data)                
        try: sock= trans_sockio(self.connection, sock, callback)
        except Igorn: pass
        except socket.error,ex:
            if stat[2]==self.connection: raise Igorn("Local Socket: %s"% ex);
            if ex.errno not in serve.RETRY_CODES: raise;
            prefix, self._rbuf= stat[0].split("\r\n\r\n", 1)
            prefix, self.header= (prefix+"\r\n").split("\r\n", 1)
            self.headers= trans.str2hdr(self.header)
            self.method, self.path, self.ver= prefix.split()
            self.path= "http://%s%s"% (stat[1], self.path)
            raise Retry("Socket error in Pipeline") 
        if sock is not None and serve._socktest(sock):
            serve.connect(None, (stat[1], sock))
    
    def do_INPLACE(self, serve):
        data= self.read(int(self.headers.get("content-length", 0)))
        b4= self.conn_alive
        result= serve.process(self, data)
        if self.conn_alive: conn= "=" if b4 else "+";
        else: conn= "-" if b4 else "~";
        logger.info("%s%s %.4s: %s\t%s %s", serve, conn, 
            self.client_address[1], result, self.method, format_path(self.path))
        
    def do_PROXY(self, serve):
        logger.info("Proxy:\t%s %s", self.method, format_path(self.path))
        sock= serve.connect()
        sock.sendall("%s %s %s\r\n"% (self.method, self.path, self.ver))
        sock.sendall("%s\r\n%s"% (self.header, self.read(None)))
        trans_sockio(self.connection, sock)

    def do_CONNECT(self, serve):
        if ssl is None: raise Abort("SSL unsupported or disabled")
        logging.debug("GAE_SSL: %s:%s to %%s"%self.client_address, self.path)
        if not self.curr_tries: 
            self.connection.sendall("HTTP/1.0 200 OK\r\n\r\n");
        keyf, crtf= make_cert(self.path.split(":")[0])
        try: ssl_sock= ssl.wrap_socket(self.connection, keyf, crtf, True)
        except socket.error,ex: raise Abort("SSL %r"% ex)
        apt_sock= self.server.mirror.connect()
        sockname= apt_sock.getsockname()
        self.ASSOC_ROUTE[sockname]= serve
        assert self.read(None)==""
        
        """line= r"\1https://%s/"% self.path.replace(":443", "")
        line= re.sub(r"^(\S+\s+)/", line, ssl_sock.read())
        apt_sock.sendall(line)        
        try: trans_sockio(ssl_sock, apt_sock)  ;"""            
        stat= [self.connection]
        def callback(source, object, first=[]):
            if source is None: return
            stat[0]=source; data= source.recv(8192);
            if not first and source is ssl_sock: 
                fullpath= r"\1https://%s/"% self.path.replace(":443", "")
                data= re.sub(r"^(\S+\s+)/", fullpath, data)
                first.append(0)               
            stat[0]=object; return len(data) and not object.sendall(data)
        try: trans_sockio(ssl_sock, apt_sock, callback) 
        except socket.error, ex:
            if stat[0]==self.connection:
                raise Igorn("Local Socket: %s"% ex) 
            raise Abort("GAE_SSL: got self in trouble...") #"""
        finally: del self.ASSOC_ROUTE[sockname]
        ssl_sock.shutdown(socket.SHUT_WR)
        
    def do_METHOD(self, serve):        
        if not re.match(r"^https?://[^/]+\.[^/]+/", self.path):
            raise Abort("URL check: %s"% self.path)
        if self.method not in ("POST", "PUT"): pdata=""
        else: pdata= self.read(int(self.headers['content-length']))

        code, logs, head, body= serve.urlfetch(
             self.method, self.path, self.header, pdata)
        if code in ("300","301","410","404","203") :
            print "trace 300 301 410 404 203 head for test: \n %s"%head

        if logs.startswith("Succeed"):
            # common, server process succeed.
            self.write("%s %s %s\r\n"% (self.ver, code, logs))
            self.write(head)
            self.write("\r\n")
            self.write(body)
            if "\t.A:" in logs:
                prefix, seq= re.search(r"\t.A:([\d.]+),(\d+);", logs).groups()
                for i in xrange(1,int(seq)+1):
                    self.write(serve.getcached(prefix,i))
            if "\t.T:" in logs:
                logger.error(".T command haven't implemented yet.")
            #p=self.path; a=max(20, p.find("/",8)*50/(p.find("/",8)+len(p)-p.rfind("/")));
            logger.info("%s: %s %s %s\t%s %s",
                serve, code, logs.replace("\t", "").replace("Succeed", "S"),
                len(body), self.method, format_path(self.path));

        elif logs.startswith("Continue"):
            # error occurs, but server pass the self-resolve
            logger.info("large response detected, trying range fetch.")
            headers= trans.str2hdr(self.header)
            try: begin, finish, size= map(int, re.search(
                r"^content-range:\s*bytes\s*(\d+)-(\d+)/(\d+)", head, 10).groups())
            except: begin, finish, size= -1, -1, int(re.search(
                r"^content-length:\s*(\d+)",head,10).group(1))
            head= re.sub(re.compile("^Content-(Length|Range):.*\n", 10), "", head)
            code, start, stop= "200", 0, size-1
            if "range" in headers:
                start, stop= re.search("bytes=(\d*)-(\d*)", headers['range']).groups()
                if start and stop: start, stop= int(start), int(stop);
                elif not start: start, stop= size-int(stop), size-1;
                elif not stop:  start, stop= int(start), size-1;
                assert -1<start<=stop<size
                head+="Content-Range: bytes %d-%d/%d\r\n"%(start, stop, size); code="206";
            head+="Content-Length: %d\r\n"% (stop-start+1)
            self.write("%s %s %s\r\n"% (self.ver, code, logs))
            self.write(head)
            self.write("\r\n")
            if start == begin:
                assert finish-begin+1==len(body)
                start= finish+1
                self.write(body)

            begin=finish=0; step=500000;
            logger.debug("headers calced, starting range fetch...");
            while start<=stop:
                if finish*2<=begin-3:
                    raise Abort('too many retrys in Range')
                t=time.clock(); begin+=1;
                psize= min(step, stop-start+1);
                range= "%d-%d"% (start, start+psize-1);
                headers['range']= "bytes="+ range
                code, logs, head, body= serve.urlfetch(
                    "GET", self.path, trans.hdr2str(headers), "", GAEServer.PULL)
                if code != "206":
                    begin+=1; time.sleep(2); continue
                assert "bytes %s/%d"%(range,size) in head
                if len(body)==psize:
                    self.write(body)
                elif len(body)==0 and "\t.A:" in logs:
                    prefix, seq= re.search(r"\t\.A:(\S+),(\d+);", logs).groups()
                    for i in xrange(1, int(seq)+1):
                        part= serve.getcached(prefix, i)
                        psize-= len(part)
                        self.write(part)
                    else: assert psize==0
                else: continue
                logger.debug("range %s/%d:%d in %.3fs",range,size,psize,time.clock()-t)
                start+=step; finish+=1;
            logger.info("INFO: range fetch finish successfully")
        else:
            logger.error("%s returns %s when %s %s"%(
                serve, logs, self.method, self.path))
            raise Abort("GAE cannot fetch given url")

import traceback            
class Server(SocketServer.ThreadingTCPServer):  
    request_queue_size= 16
    
    def __init__(self, address, handler, configure):
        self.address= address
        self.handler= handler
        self.configure= configure
        self.handle_idle()
        self.mirror= DirectProxy(address, -1, "Self")
        if not DirectProxy.PING_BASE:
            DirectProxy.PING_BASE= self.mirror._ping
        SocketServer.ThreadingTCPServer.__init__(
            self, self.mirror.address, handler)    
    
    def handle_error(self, request, client_address):
        etype, value, tb= sys.exc_info()
        if etype is socket.error and value.errno==10053:
            logger.debug("Connection close by client %s:%s"%client_address)
        else: 
            logger.error("[%s] %s when process %s", etype, value, client_address)
            traceback.print_tb(tb)
               
    def handle_idle(self, STATIC=[0]): 
        stamp= os.path.getmtime(self.configure)
        if stamp==STATIC[0]: return
        logger.info("-"* 60)
        logger.info("%s: Configure %s", time.ctime(), "Reloading" if STATIC[0] else "Initializing")
        STATIC[0]= stamp; 
        scope= {"Static": static, "Logger": logger, "Matcher": Matcher,  
            "LocalHost": LocalHost, "GAEServer": GAEServer, 
            "DirectProxy": DirectProxy, "EndPoint": EndPoint }
        try: 
            static.SCOPE= scope
            execfile(self.configure, scope)
            static.SCOPE= None
            if "DEFAULT_ROUTE" in scope:
                default= scope["DEFAULT_ROUTE"] 
                self.handler.DEFAULT_ROUTE= default
                logger.info("Default Route is %s(%r)", default, default)
            if "RETRIES_ROUTE" in scope:
                retries= scope['RETRIES_ROUTE']
                self.handler.RETRIES_ROUTE= max(0, int(retries)) 
                logger.info("Retry using FAIL_SAFE %s",
                    "%s times"%retries if retries>2 else "twice" if retries==2
                    else "just once" if retries==1 else "never") 
            if "FIND_ROUTE" in scope:
                find_route= scope['FIND_ROUTE']            
                self.handler.find_route= staticmethod(find_route)
                logger.info("FIND_ROUTE found: %r\n%s", find_route, 
                    re.sub(re.compile("^\s*", re.M),"INFO:\t", find_route.__doc__))
            if "BIND_ROUTE" in scope:
                bind_route= scope['BIND_ROUTE']
                self.handler.bind_route= staticmethod(bind_route)
                logger.info("BIND_ROUTE found: %r\n%s", bind_route,
                    re.sub(re.compile("^\s*", re.M),"INFO:\t", bind_route.__doc__))   
            if CONFIG['Static']:
                logger.info("Static Objects:")
                for key,value in CONFIG['Static'].iteritems():
                    logger.info("\t%s - %s(%.80r%s)", key, re.findall(".*", str(value))[0],
                        value, "..." if len(str(value))>80 else "")
            logger.info("-"* 60)                                
            if "SHUTDOWN_NOW" in scope:
                shutdown= scope["SHUTDOWN_NOW"] 
                if shutdown is not False:               
                    logger.critical("%s: Configure file requires %s",
                        time.ctime(), "Restart" if scope["SHUTDOWN_NOW"] else "Shutdown")
                    return shutdown or False      
        except Exception,ex:
            logger.critical("Fail in loading Configure")
            traceback.print_exc()
            raise Crash("[%s] %s raise when loading Configure"%(type(ex),ex))
        
    def serve_forever(self, poll_interval=10):
        global TIMELINE
        iw=[self]; ow=ew=[];    
        logger.info("Local Proxy is Running now.")    
        while self.handle_idle() is None:
            if select.select(iw, ow, ew, poll_interval)[0]:
                self._handle_request_noblock()       
                TIMELINE+= 1     
    
    def process_request(self, request, client_address):
        t= threading.Thread(
            target= self.process_request_thread,
            args= (request, client_address))
        t.setName("Thread-%s:%s"% client_address)
        t.start()
                
    def serve_forever2(self):
        global TIMELINE
        TIMEOUT= 60          
        event= self.EVENT
        buff= self.BUFF
        busy= self.BUSY        
        iw=[]; ow=[]; ew=[]; 
        while self.handle_idle() is None:
            if select.select([self], [], [], 0.1)[0]:
                self._handle_request_noblock()       
                TIMELINE+= 1    
                           
            ins, ops, exs= select.select(iw, ow, ew, 0.1)
            now= time.clock() 
            for sock in ins:      
                data= sock.recv(8192)
                if not data: continue
                pair= ow[iw.index(sock)]
                if buff[sock]: 
                    data= buff[sock]+data
                if pair in ops:  
                    data= data[:pair.send(data)]
                if data:
                    buff[sock]= data
                busy[sock]= now   
            
            timeout= 0             
            for sock,last in busy.iteritems():
                if now-last<=TIMEOUT: continue 
                index= iw.index(sock) 
                event[(sock,ow[index])].set()
                iw[index]=ow[index]=ew[index]= None
                timeout+= 1 
                              
            error= set([iw.index(s) for s in exs]+[ow.index(s) for s in exs])                
            for index in error:
                sock1, sock2= iw[index], ow[index]
                key1, key2= (sock1,sock2), (sock2,sock1) 
                evt1, evt2= event[key1], event[key2]
                if sock1 in exs: event[key1]=None;
                if sock2 in exs: event[key2]=None;
                evt1.set(); evt2.set(); 
                iw[index]=ow[index]=ew[index]= None
            
            if timeout or error: 
                iw= filter(None, iw)
                ow= filter(None, ow)
                ew= filter(None, ew)      
            
            if self._waits and self._wait_lock.acquire(0):
                for pair in self._waits:
                    iw.extend(pair)
                    ow.extend(pair[::-1])
                    ew.extend(pair)
                    busy[pair[0]]=busy[pair[1]]= now
                del self._waits[:]
                self._wait_lock.release()

    def trans_sockio(self, sockA, sockB):
        evt1= threading.Event()
        evt2= threading.Event()        
        self.EVENT[(sockA,sockB)]= evt1
        self.EVENT[(sockB,sockA)]= evt2         
        self.BUFF[sockA]=self.buff[sockB]= ""
        self.BUSY[sockA]=self.busy[sockB]= 0
        
        with self._wait_lock: 
            self._waits.append((sockA,sockB))            
        evt1.wait(); evt2.wait();
        
        del self.buff[sockA],self.buff[sockB];
        del self.busy[sockA],self.busy[sockB];
        
    
try: from OpenSSL import crypto;
except ImportError: from _OpenSSL import crypto;
except ImportError: crypto= None;
def make_cert(host, CACHE={}, CA=[None, None, 0]):
    if host in CACHE: return CACHE[host]
    if not CA or len(host)>=64:
        return ("certs/__CA__.key", "certs/__CA__.crt")
    fkey,fcrt=CACHE[host]= "certs/%s.key"%host,"certs/%s.crt"%host
    if not os.path.isfile(fkey) or not os.path.isfile(fcrt):
        if not crypto: del CA[:]; return make_cert("")
        logger.warn("Creating Fake Cert for %s", host)
        print CACHE
        CPEM= crypto.FILETYPE_PEM
        pkey= crypto.PKey()
        cert= crypto.X509()
        cert.set_notBefore("20080101120000Z")
        cert.set_notAfter("20180101120000Z")
        subj= cert.get_subject()
        subj.countryName= "CN"
        subj.stateOrProvinceName= "Fujian"
        subj.localityName= "Lyricconch"
        if host != "__CA__":  # create cert for any host
            if not CA[2]:  # load CA data
                fcakey,fcacrt= make_cert("__CA__")
                CA[0]= crypto.load_privatekey (CPEM, open(fcakey, 'rb').read())
                CA[1]= crypto.load_certificate(CPEM, open(fcacrt, 'rb').read())
                CA[2]= int(open("certs/__CA__.seq", 'rb').read())
            pkey.generate_key(crypto.TYPE_RSA, 1024)
            cert.set_pubkey(pkey);
            subj.commonName= host
            subj.organizationName= host
            subj.organizationalUnitName= "WallProxy Branch"
            cert.set_serial_number(CA[2])
            cert.set_issuer(CA[1].get_subject())
            cert.sign(CA[0], "sha1")
        else: # "init CA cert"
            pkey.generate_key(crypto.TYPE_RSA, 2048)
            cert.set_pubkey(pkey);
            subj.commonName = "WallProxy CA"
            subj.organizationName = "NeverTrust"
            subj.organizationalUnitName = "WallProxy Root"
            cert.set_serial_number(ord(os.urandom(1)))
            cert.set_issuer(crypto.X509Name(subj))
            cert.sign(pkey, "sha1")
        open(fkey,'wb').write(crypto.dump_privatekey (CPEM, pkey))
        open(fcrt,'wb').write(crypto.dump_certificate(CPEM, cert))
        CA[2]+=1; open("certs/__CA__.seq","wb").write(str(CA[2]));
    return fkey, fcrt

def format_path(path, length=60):
    if len(path)<=length: return path
    if "?" in path:
        path, query= path.split("?",1)
        if len(path)<= length*0.8: 
            return path+"?"+query[:length-len(path)]+"..."
        path+= "?"+query[:int(length*0.2)]+"..."
    goal=length*0.8-len(path); path=path.split("/"); 
    base=len(path)/2; offset=0;
    while goal>0:
        goal-= len(path[base+offset])
        path[base+offset]="%%"
        offset= -offset if offset%2 else -offset-1
    return re.sub("%(%/%)*%","...","/".join(path))    
  
def trans_sockio(sockA, sockB, handle=None, idle=60):
    if handle is None:
        def callback(source, object): 
            if source is None: return
            data= source.recv(8192); 
            return data and not object.sendall(data);
    elif callable(handle): callback=handle;
    elif isinstance(handle, list):
        if len(handle)<4: handle.extend(("",)*(4-len(handle)));
        def callback(source, object):
            if source is None: return
            handle[:2]= False, source
            data= source.recv(8192)
            handle[2 if source is sockA else 3]= data
            if not data: return          
            handle[:2]= True, object
            return not object.sendall(data)                    
    elif handle is True:
        def callback(source,object):
            try: data= source.recv(8192)
            except socket.error,ex: raise Trace("sockio", False, source, ex)
            try: return len(data) and not object.sendall(data)
            except socket.error,ex: raise Trace("sockio", True, object, ex)
    iw=[sockA,sockB]; ow=[]; loop=timeout=0;
    while loop<=idle:
        ins,_,exs= select.select(iw, ow, iw, 1); loop+=1;
        if exs: logger.error("trans_sockio: socket error"); break
        timeout= 0 if sockB in ins else timeout+1 
        if not ins: callback(None, None); continue
        for sock in ins:
            if sock is sockA and callback(sockA,sockB): loop=0;        
            if sock is sockB and callback(sockB,sockA): loop=0;  
    if timeout>=idle/2: return sockB 
#    sockB.shutdown(socket.SHUT_RD)
#    sockB.close()
            
def testMatcher():
    data= LocalHost().urlopen("http://autoproxy-gfwlist.googlecode.com/svn/trunk/gfwlist.txt")[-1]
    gfwed= Matcher()
    gfwed.init(data)
    gfwed.append("@@www.google.com")
    gfwed.remove("@@www.google.com")
    gfwed.state()
    print gfwed.match("http://www.chinagfw.org/")
    print gfwed.match("http://www.chinagfw.org/a")

def testGAEPipe():
    STAT= [threading.Lock(), 0]
    def fetch(server, *args):
        try:
            STAT[0].acquire()
            """print "\n#%s \nmethod:%s \tpath:%s \tcontent:%s \n%s\n"%(
                threading.currentThread(),
                args[0],
                args[1],
                "<None>" if len(args)<=3 else args[3][:80] or "<empty>" if len(args[3])<1000 else "<len:%d>"%len(args[3]),
                "<empty>" if len(args)<=2 else args[2]) #"""
            STAT[0].release()
            server.urlfetch(*args)
            STAT[0].acquire()
            STAT[1]= time.clock();
            """print "\n#%s \nstatus:%s \tlogs:%s \tcontent:%s \n%s\n"%(
                threading.currentThread(),
                rets[0],
                rets[1],
                rets[3][:80] or "<empty>" if len(rets[3])<1000 else "<len:%d>"%len(rets[3]),
                rets[2]) #"""
            STAT[0].release()
        except: print "error"; raise

    proxy= GAEServer("http://lyricconch-test.appspot.com/fetch2.py",(0x6904401923F92CB14D9479C9CC4B2E2644964998102B8E6BCAEF2FD6EBBF31A3,
               0,0x1A1CF75A30171BA6969DA6B70C330E19F8A40BD))
    timestamp= time.clock()
    threadcount= threading.activeCount()
    for _ in xrange(6):
        threading.Thread(target=fetch,args=(proxy,"GET","http://developer.mozilla.org","","")).start()
        threading.Thread(target=fetch,args=(proxy,"GET","https://developer.mozilla.org/media/css/common-min.css?build=9087f7f")).start()
        threading.Thread(target=fetch,args=(proxy,"GET","https://developer.mozilla.org/media/css/print.css")).start()
        threading.Thread(target=fetch,args=(proxy,"GET","https://developer.mozilla.org/media/img/mdn-logo.png")).start()
        threading.Thread(target=fetch,args=(proxy,"GET","https://developer.mozilla.org/media/img/features/feature-mobilecup.png")).start()
        threading.Thread(target=fetch,args=(proxy,"GET","https://developer.mozilla.org/media/img/features/feature-p2pu.jpg")).start()
        threading.Thread(target=fetch,args=(proxy,"GET","https://developer.mozilla.org/media/css/print.css")).start()
        threading.Thread(target=fetch,args=(proxy,"GET","https://developer.mozilla.org/media/img/mdn-logo.png")).start()
    threading.Thread(target=fetch,args=(proxy,"GET","http://www.google.com","","",GAEServer.PULL)).start()
    while threading.activeCount()>threadcount:
        time.sleep(1);
    print "finish: %.3fs"%(STAT[1]-timestamp)
    # client logging off, server logging on, pipeline=16: 800/50s 10/2s 50/3s

# Provide by Server forever Loop
# Use by Matcher match for cache PLL timestamp
# Lock on this var will cause performace problem
# do not like time.time(), an interger is enough
# VERY ugly, i know...
TIMELINE= 0

if __name__ == "__main__":
    try: from Crypto import Cipher;
    except: Cipher=None;    
    import platform   
    scope={}; CONFIG={};
         
    CONFIG['Static']= static.STATIC   
    CONFIG['File']= sys.executable if hasattr(sys,'frozen') else __file__
    CONFIG['Conf']= "proxy2.conf"    
    os.chdir(os.path.dirname(CONFIG['File']))    
    try: execfile(CONFIG['Conf'], scope)
    except: pass
    print "="* 75
    print "Welcome... current version is still under developing." 
    print "please report issues when you got BUGGed."
    print 
    
    CONFIG['App']= "WallProxyMod", __version__, __author__
    CONFIG['Env']= platform.python_version(), platform.platform()    
    CONFIG['Loc']= os.path.abspath(os.curdir)    
    CONFIG["Addr"]= scope.get('SERVER_ADDRESS', "127.0.0.1:8088")
    print "Application: %s/%s by %s"% CONFIG['App']
    print "  Runtime Env: Python %s on %s"% CONFIG['Env']    
    print "  Program Loc: %s"% CONFIG['Loc']     
    print "  Config File: %s"% CONFIG['Conf']
    print "  Listen Addr: %s"% str(CONFIG["Addr"])
    
    feature= lambda m,c: "noModule" if not m else "Disabled" if scope.get(c) else "Enable"
    CONFIG['HTTPS']= feature(ssl,"FORBIDDEN_HTTPS")
    CONFIG["CERTS"]= feature(crypto,"FORBIDDEN_CERTS")
    CONFIG["CRYPT"]= feature(Cipher, "FORBIDDEN_CRYPT")
    print "Features: %8s %8s %8s"% ("HTTPS","CERTS","CRYPT")
    print "  Require:%8s %8s %8s"% ("ssl","OpenSSL","Crypto")
    print "  Status: %(HTTPS)8s %(CERTS)8s %(CRYPT)8s"% CONFIG
    
    level=  scope.get("LOGGING_LEVEL")  or "DEBUG"
    format= scope.get("LOGGING_FORMAT") or "%(levelname)s: %(message)s"
    output= scope.get("LOGGING_OUTPUT") or "" 
    logger= logging.getLogger()
    logger.setLevel(getattr(logging,level))
    if output:
        channel= logging.FileHandler(output.strip())
        channel.setLevel(logging.DEBUG)
        channel.setFormatter(logging.Formatter(format))
        logger.addHandler(channel)
    if output is not None and "_" not in output:
        output= output+ " & STDERR" if output else "STDERR"
        channel= logging.StreamHandler()        
        channel.setLevel(logging.DEBUG)
        channel.setFormatter(logging.Formatter(format))
        logger.addHandler(channel)
    CONFIG["Logd"]= level, format, output    
    print "Logging(level=%s):"% level
    print "  Format: %s"% format
    print "  Output: %s"% output
    
    sys.stdout.flush()
    del platform, scope, feature, channel, level, format, output    
    localhost= LocalHost()
    httpd= Server(CONFIG["Addr"],Handler,CONFIG['Conf'])
    if not httpd.serve_forever(): 
        httpd.server_close()
        os.system(CONFIG['File'])
    raise Crash("User Shutdown.")

            
