#!/usr/bin/python

import sys
import os
import urllib2
import urllib

def crawlPage(url,path):
	page = urllib2.urlopen(url).read()
	file = open(path,"w")
	file.write(page)
	file.close()
	print "Write over in " + path + " for url " + url

def main():
	if(len(sys.argv) != 3):
		print "invalid argv : " + sys.argv
		print "<url> <outputfile> : tear url page into output file"
		return
	
	url = sys.argv[1]
	filefullpath = sys.argv[2]
	crawlPage(url,filefullpath)

if __name__ == "__main__":
	main()