'''
- build fetch url based on
  - refer url
  	- http://proxy-host/0/?url=http://another-host.com/page/1
	- http://another-host/page/1
	- ''
  - request_url
  	- http://proxy-host/a/b/c?k=v
	- http://proxy-host/0/a?k=v
	- http://proxy-host/0/?url=http://another-host.com/page/1

>>> extract_fetch_url('http://proxy-host/0/?url=http://another-host.com/page/1')
'http://another-host.com/page/1'

>>> extract_fetch_url('http://proxy-host/0/?url=http%3a//another-host.com/page/1%3fa=b%26c=d%23k')
'http://another-host.com/page/1?a=b&c=d#k'

>>> extract_fetch_url('http://proxy-host/0/?url=another-host')
'http://another-host'

>>> extract_fetch_url('http://proxy-host/1/?url=http%3a//another-host.com/page/1')

>>> extract_fetch_url('http://proxy-host/0/?URL=http%3a//another-host.com/page/1')

>>> build_fetch_url('', 'http://proxy-host/0/?url=http://another-host.com/page/1')
'http://another-host.com/page/1'

>>> build_fetch_url('http://proxy-host/0/?url=http://another-host.com/page/1', 'http://proxy-host/0/a/b/c?a=b#c')
'http://another-host.com/page/a/b/c?a=b#c'

>>> build_fetch_url('http://proxy-host/0/?url=https://another-host.com/page/1', 'http://proxy-host/a/b/c?a=b#c')
'https://another-host.com/a/b/c?a=b#c'

>>> build_fetch_url('http://proxy-host/0/?url=another-host/1/', 'http://proxy-host/a/b/c?a=b#c')
'http://another-host/a/b/c?a=b#c'

>>> build_fetch_url('http://proxy-host/0/?url=another-host/1/', 'http://proxy-host/0/a/b/c?a=b#c')
'http://another-host/1/a/b/c?a=b#c'

>>> build_fetch_url('http://proxy-host/0/?url=', 'http://proxy-host/a/b/c?a=b#c')

>>> build_fetch_url('http://another-host/0/?url=http://another-host.com/page/1', 'http://proxy-host/a/b/c?a=b#c')

>>> build_fetch_url('http://proxy-host/1/?url=http://another-host.com/page/1', 'http://proxy-host/a/b/c?a=b#c')

>>> build_proxy_url('http://another-host/page')
'/0/?url=http%3A//another-host/page'

>>> replace_links('http://host1/page1/?k=v#c', 'link: <a href="link1">click</a>')
'link: <a href="/0/?url=http%3A//host1/page1/link1">click</a>'

>>> replace_links('http://host1/page1/?k=v#c', 'link: <a href="/link1">click</a>')
'link: <a href="/0/?url=http%3A//host1/link1">click</a>'

>>> replace_links('http://host1/page1/?k=v#c', 'link: <a href="http://host2/link1">click</a>')
'link: <a href="/0/?url=http%3A//host2/link1">click</a>'

>>> replace_links('http://host1/page1/?k=v#c', 'link: <img src="link1" />')
'link: <img src="/0/?url=http%3A//host1/page1/link1" />'

>>> replace_links('http://host1/page1/?k=v#c', 'transparent url(http://t.douban.com/pics/headnavbot.gif) no-repeat scroll left 33px')
'transparent url(/0/?url=http%3A//t.douban.com/pics/headnavbot.gif) no-repeat scroll left 33px'

>>> replace_links('http://host1/page1/?k=v#c', 'transparent url(/pics/headnavbot.gif) no-repeat scroll left 33px')
'transparent url(/0/?url=http%3A//host1/pics/headnavbot.gif) no-repeat scroll left 33px'

>>> replace_links('http://host1/page1/?k=v#c', 'var ONE_PX = "https://mail.google.com/mail/images/c.gif?t=" +')
'var ONE_PX = "/0/?url=https%3A//mail.google.com/mail/images/c.gif%3Ft%3D" +'

'''

import urllib
import urlparse
import cgi
import re

PROXY_URL_PATH = '/0/'
FETCH_URL_PARAM = 'url'
LINK_PATTERNS = [
	re.compile('(href=|src=|action=)([\'"])(.+?)(\\2)', re.I),
	re.compile('(url)(\()(.+?)(\))', re.I),
	re.compile('()([\'"])(https?://.+?)(\\2)', re.I),
	]

def extract_fetch_url(origin_url):
	url_split = urlparse.urlsplit(origin_url)
	params = cgi.parse_qs(url_split.query)
	if PROXY_URL_PATH == url_split.path and FETCH_URL_PARAM in params:
		fetch_url = params[FETCH_URL_PARAM][0]
		if not fetch_url.lower().startswith('http'):
			fetch_url = 'http://' + fetch_url
		return fetch_url

def build_fetch_url(refer_url, request_url):
	request_fetch_url = extract_fetch_url(request_url)
	if request_fetch_url:
		return request_fetch_url
	refer_split = urlparse.urlsplit(refer_url)
	request_split = urlparse.urlsplit(request_url)
	refer_fetch_url = extract_fetch_url(refer_url)
	if refer_split.hostname == request_split.hostname and refer_fetch_url:
		request_path = request_split.path
		if request_path.startswith(PROXY_URL_PATH):
			request_path = request_path[len(PROXY_URL_PATH):]
		fetch_url = urlparse.urljoin(refer_fetch_url, request_path)
		if request_split.query:
			fetch_url = fetch_url + '?' + request_split.query
		if request_split.fragment:
			fetch_url = fetch_url + '#' + request_split.fragment
		return fetch_url

def build_proxy_url(fetch_url):
	return PROXY_URL_PATH + '?' + FETCH_URL_PARAM + '=' + urllib.quote(fetch_url)

def replace_links(url, content):
	for link_pattern in LINK_PATTERNS:
		content = replace_links_by(url, content, link_pattern)
	return content

def replace_links_by(url, content, link_pattern):
	tokens = []
	last_start = 0
	for m in link_pattern.finditer(content):
		start,end = m.span(0)
		tokens.append(content[last_start:start])
		proxy_url = build_proxy_url(urlparse.urljoin(url, m.group(3)))
		tokens.append(m.group(1) + m.group(2) + proxy_url + m.group(4))
		last_start = end
	tokens.append(content[last_start:len(content)])
	return ''.join(tokens)

if __name__ == '__main__':
	import doctest
	doctest.testmod()
