import httplib,urllib,random,sys,re,os
from urlparse import urlparse


class PyScraper:
	def __init__(self):
		self.cookie=""
		self.currenturl=""
		self.urlhist=[]
	
	def __str__(self):
		ret=''
		for item in self.urlhist:
			ret=ret+item+'->'
		return ret
	
	def download(self,url,localfolder):
		bufsize = 1024
		self.urlhist.append(url)
		o=urlparse(url)
		scheme,hostname,path,q,query,position=o
		head,fname = os.path.split(path)
		if(query!=''):
			path=path+"?"+query
			
		conn=httplib.HTTPConnection(hostname)
		conn.request('GET', path,None,{'Cookie':self.cookie})
		resp=conn.getresponse()
		total=int(resp.getheader('content-length'))
		f = open(localfolder+"/"+fname,'wb')
		sofar = 0
		while 1:
			data = resp.read(bufsize)
			f.write(data)
			sofar += len(data)
			perc = (float(sofar)/float(total))
			count = int(perc * 20)
			#+ (' '*(20-count)) + " | " + str('%3d' % perc*100) + "% " + str('% 5d KB' % int(float(sofar)/float(1024))) + " / "  + str('% 5d KB' % int(float(total)/float(1024)))
			sys.stdout.write("\r%-30s|%-20s|%3d percent" % (fname,'#'*count,perc*100))
			
			sys.stdout.flush()
			#sys.stdout.write("\r" + str(sofar) + " / " + str(total))total
			if len(data)==0:
				break
		f.close()
		if(resp.getheader('set-cookie')!=None):
			self.cookie=resp.getheader('set-cookie')
		conn.close()
		if(resp.status==302 or resp.status ==301):
			return self.get(resp.getheader('location'))
		return data
	
	def get(self,url):
		self.urlhist.append(url)
		o=urlparse(url)
		scheme,hostname,path,q,query,position=o
		if(query!=''):
			path=path+"?"+query
			
		conn=httplib.HTTPConnection(hostname)
		conn.request('GET', path,None,{'Cookie':self.cookie})
		resp=conn.getresponse()
		data= resp.read()
		if(resp.getheader('set-cookie')!=None):
			self.cookie=resp.getheader('set-cookie')
		conn.close()
		if(resp.status==302 or resp.status ==301):
			return self.get(resp.getheader('location'))
		return data
	
	def post(self,url,data):
		self.urlhist.append(url)
		o=urlparse(url)
		scheme,hostname,path,q,query,position=o
		conn=httplib.HTTPConnection(hostname)
		conn.request('POST', path,data,{'Content-Type':'application/x-www-form-urlencoded','Cookie':self.cookie})
		resp=conn.getresponse()
		data= resp.read()
		if(resp.getheader('set-cookie')!=None):
			self.cookie=resp.getheader('set-cookie')
		conn.close()
		if(resp.status==302 or resp.status ==301):
			return self.post(resp.getheader('location'),data)
		return data
	
def downloadrsfile(filename,userid,password):
	fparser=re.compile('^.*?rapidshare.com/files/(.*?)/(.*?)$')
	fmatch=fparser.match(filename)
	
	if(fmatch==None):
		raise 
	
	fileid =fmatch.group(1)
	fname = fmatch.group(2)
	
	s=PyScraper()
	data=s.get(filename)
	robj=re.compile('.*<form action="(.*?)".*?>.*',re.DOTALL)
	m=robj.match(data)
	realurl= m.group(1)
	data=s.post(realurl,'dl.start=PREMIUM')
	o=urlparse(realurl)
	scheme,hostname,path,q,query,position=o
	serverid=hostname.replace('rs','').replace('.rapidshare.com','')
	loginurl=scheme+"://"+hostname+"/cgi-bin/premium.cgi"
	
	postdata='premiumlogin=1&fileid=%s&filename=%s&serverid=%s&accountid=%s&password=%s' % (fileid,fname,serverid,userid,password)
	data=s.post(loginurl, postdata )
	#pat=re.compile('.*<form.*?action="(.*?)".*?method="post">.*?<input.*?type="hidden".*?name="(.*?)".*?value="(.*?)">.*?<input.*?type="hidden".*?name="(.*?)".*?value="(.*?)"><input.*?type="submit".*?name="(.*?)".*?value="(.*?)">.*', re.DOTALL)
	#mp=pat.match(data)
	#print mp.groups()
	#servertopost=mp.group(1)
	#newrequest=mp.group(2)+"="+urllib.quote(mp.group(3))+"&"+mp.group(4)+"="+mp.group(5)+"&"+mp.group(6)+"="+urllib.quote(mp.group(7))
	
	#print servertopost
	#print newrequest
	
	data=s.post(realurl,'dl.start=PREMIUM')
	pat=re.compile('<a href="(http://rs.*?)">Download')
	m=pat.findall(data)
	s.download(random.choice(m),'f:\\films')

count=0
for filename in sys.argv:
	if(count!=0):
		downloadrsfile(filename,'<your username>','<your password>')
	count+=1


