#!/usr/bin/python
# -*- coding: utf-8 -*-
import urllib2
import urllib
import cookielib
import re, sys

# page = "http://www.budejie.com/index.php?page=3&maxid=1379494081"
page = sys.argv[1]

cache_dir = "./cache/"

ext = "jpg"

# rel_link = False
rel_link = True

# enable_proxy = False
enable_proxy = True

def get_pic(opener, cache_dir, pic):
	print "Requesting "+ pic
	pic_file_name = re.findall(r'\/([\w\d]+\.' + ext + ')', pic)[0]
	cache = file(cache_dir + pic_file_name, "wb")
	
	html = opener.open(pic)
	content = html.read()
	cache.write(content)
	cache.close()
	print pic_file_name + " saved completed."
	print ""

def fetch_page(page):
	proxy_handler = urllib2.ProxyHandler({"http" : 'http://127.0.0.1:8087'})
	
	cj = cookielib.CookieJar()
	if enable_proxy:
		opener = urllib2.build_opener(proxy_handler, urllib2.HTTPCookieProcessor(cj))
	else:
		opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))

	urllib2.install_opener(opener)

	request = ''


	
	html = opener.open(page, urllib.urlencode(request))
	content = html.read()
	
	if rel_link:
		pic_list = re.findall(r'images\/\d+.' + ext, content)
		print pic_list
		p = page.split('/')
		del p[-1]
		print p
		page = '/'.join(p)
	else:
		pic_list = re.findall(r'http\:\/\/[\w\d\_\-\.\/]+\/[a-z0-9]+\.' + ext, content)
	
	pic_list = {}.fromkeys(pic_list).keys()
	
	for pic in pic_list:
		if rel_link:
			get_pic(opener, cache_dir, page + '/' + pic)
		else:
			get_pic(opener, cache_dir, pic)

def main():
	for pg in sys.argv[1:]:
		fetch_page(pg)

if __name__ == "__main__":
	main()