#-*- coding:UTF-8 -*-
"""
"""
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.api import urlfetch
import os
import urllib
import urlparse
import logging
import re
from BeautifulSoup import BeautifulSoup

class Proxy(webapp.RequestHandler):
    def post(self):
        url=self.request.get("url")
        to="/proxy/get/%s"%url
        self.response.headers.add_header('Set-Cookie','refer=%s;'% (url))
        self.redirect(to)
    def get(self):
        template_v={}
        path = os.path.join(os.path.dirname(__file__), 'templates/proxy.html')
        self.response.out.write(template.render(path,template_v))

class ProxyGet(webapp.RequestHandler):
    def post(self,url):
        self.get(url)
    def get(self,url):
            headers={}
            headers["Referer"]=self.request.cookies.get("refer","")
            url=urllib.unquote_plus(url)
            if self.request.query_string:
                url=url+"?%s"%self.request.query_string
            logging.info(url)
            resp=urlfetch.fetch(url,headers=headers)
            content=resp.content
            type=resp.headers.get("Content-Type","")
            if type.find("htm")==-1:
            	self.response.headers['Content-Type'] = resp.headers['Content-Type']
            	self.response.out.write(content)
            else:
            	#logging.info(content)
            	CHARSET_RE = re.compile("((^|;)\s*charset=)([^;\"]*)", re.M)
            	META_RE = re.compile("(<meta.*?Content-Type.*?)>", re.I)
            	charset=None
            	try:
            		content_type=META_RE.findall(content)
            		content_type=content_type[0]
            		charset=CHARSET_RE.findall(content_type)[0][2]
            	except Exception,e:
            		logging.warning(e)
            	logging.info(charset)
            	if charset:
            		if charset.strip()=="gb2312":
            			charset="gbk"
            		bs=BeautifulSoup(content,fromEncoding=charset)
            	else:
            		bs=BeautifulSoup(content)
            	logging.info(bs.originalEncoding)
            	base=bs.findAll('base')
            	for b in base:
            		b.extract()
            	links=bs.findAll('a')
            	for link in links:
            		href=link.get("href","")
            		if href:
            			href=urlparse.urljoin(url,href)
            			link["href"]="/proxy/get/%s"%href
            	images=bs.findAll('img')
            	logging.info(images)
            	for image in images:
            		src=image.get("src","")
            		if src:
            			src=urlparse.urljoin(url, src)
            			image["src"]="/proxy/get/%s"%src
            	scripts=bs.findAll("script")
            	for script in scripts:
            		#script.extract()
            		src=script.get("src","")
            		if src:
            			src=urlparse.urljoin(url, src)
            		 	script["src"]="/proxy/get/%s"%src
            	links=bs.findAll('link')
            	for link in links:
            		href=link.get("href","")
            		if href:
            			href=urlparse.urljoin(url, href)
            			link["href"]="/proxy/get/%s"%href          
            	self.response.out.write(str(bs))

def main():
    application = webapp.WSGIApplication([
                                          #('/.*',NotFound),
                                          ('/proxy/?',Proxy),
                                          ('/proxy/get/(.*)',ProxyGet),
                                          ],debug=True)
    run_wsgi_app(application)

if __name__ == "__main__":
    main()