import os,sys,pickle,urllib2,time,signal
import subprocess as sub
sys.path.append("./")
import threading  as td
from threadpool import Worker,ThreadPool
from scan import open_port
import socket

class Log(object):
	"""docstring for Log"""
	def __init__(self):
		pass

class ProcessData(object):
	"""docstring for ProcessData"""
	def __init__(self, finput,work_no):
		self.finput=finput
		self.work_no=work_no
		self.input={}
		self.output={}

	def init(self):
		tmp=os.popen("cat "+self.finput + "| wc -l").read()
		total_count=int(tmp)
		single_count=(total_count/25)+1
		tmp=os.popen("split -l "+str(single_count)+" "+self.finput+" -d -a 2 "+"in/in_")
		pass

	def save(self):
		tmp=os.popen("cat out/out_* > "+self.finput+".res")

	def clean(self):
		tmp=os.popen("rm in/in_*")
		tmp=os.popen("rm out/out_*")


class CmdTimer(td.Thread):
	def __init__(self, cmd, timeout):
        	td.Thread.__init__(self)
        	self.cmd = cmd
        	self.timeout = timeout

	def run(self):
		self.p = sub.Popen(self.cmd)
        	self.p.wait()

    	def exec_cmd(self):
        	self.start()
        	self.join(self.timeout)
        	if self.isAlive():
            		#self.p.terminate()
			os.kill(self.p.pid, signal.SIGKILL)
            		self.join()


class Run(object):
	"""docstring for Run"""
	def __init__(self, finput,no):
		self.no = no
		self.finput=finput
		self.pool = ThreadPool(self.no)
		self.procdata=ProcessData(finput,self.no)

	def start(self):
		self.procdata.init()
		d=Crawl()
		for i in range(0,self.no):
			t=["0"+str(i) if i<10 else str(i)][0]
			finput="in/in_"+t
			foutput="out/out_"+t
			d.pre_process(finput, self.procdata.input)
			for v in self.procdata.input.keys():
				
				self.pool.add_task(d.task,v,self.procdata.output)
			self.pool.wait_completion()
			f=open(foutput,"w")
			f.writelines(self.procdata.output.keys())
			self.procdata.output.clear()
			self.procdata.input.clear()

			f.close()
			print finput,"finish"

		self.procdata.save()
		self.procdata.clean()

"""
port 1=80:0 or 1
port 2=443:0 or 1
dns_reverse:  1,domain or 0,0
80_redirect: 1,url or 0,0
80_reverse: 1,url or 0,0
443_redirect: 1,url or 0,0
443_reverse: 1,url or 0,0


"""
socket.setdefaulttimeout(5)
class Crawl(object):
	"""docstring for Crawl"""
	def __init__(self):
		super(Crawl, self).__init__()
	
		self.headers={
        "User-Agent":"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13",
        #"User-Agent" = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.13) Gecko/20101206 Ubuntu/10.10 (maverick) Firefox/3.6.13",
        "Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
        "Accept-Language":"zh-cn,zh;q=0.5",
        #"Accept-Encoding":"gzip,deflate",
        "Accept-Charset":"GB2312,utf-8;q=0.7,*;q=0.7",
        #"Keep-Alive":"115",
        "Connection":"close"
        #"Connection":"keep-alive"
    	}

	def pre_process(self,finput,obuffer):
		for line in open(finput,"r"):
			line=line.strip("\n")
			#print line
			obuffer[line]=1
		pass


	def task(self,ip,obuffer):
		obuffer[self.reverse_domain(ip)+"\n"]=1
		pass
		
	def reverse_dns(self,ip):
		try:
			dm=os.popen("dig -x "+ip+" +short").readlines()
			if not dm:
				#print "SOA"
				return ["0","0"]
			else:
				#print "GET"
				return ["1",dm[0].strip("\n")[:len(dm[0])-1]]

		except:
			return ["0","0"]

	
	def redirect_url(self,turl):
		try:
		#if 1:
			#print turl
			req = urllib2.Request(url = turl,headers = self.headers)
			conn=urllib2.urlopen(req)
			url=conn.geturl()
			conn.close()
			return ["1",url]
		#else:
		except:
			return ["0","0"]

	
	def redirect_url_td(self,turl,obuffer):
		try:
		#if 1:
			#print turl
			req = urllib2.Request(url = turl,headers = self.headers)
			conn=urllib2.urlopen(req)
			url=conn.geturl()
			conn.close()
			obuffer.append("1")
			obuffer.append(url)
		#else:
		except:
			obuffer.append("0")
			obuffer.append("0")


	def download_page(self,ip):
		try_time=1
		log_name="log/"+str(ip)+".log"
		page_name="index/"+str(ip)+".html"
		#cmd="wget --connect-timeout 15 -t "+str(try_time)+" "+ip+" -o "+log_name+" -O "+page_name
		#print cmd
		#os.popen(cmd)
		cmd=["wget","--connect-timeout","15","-t",str(try_time),str(ip),"-o",log_name,"-O",page_name]
		CmdTimer(cmd, 30).exec_cmd()
		#c.run()


	def reverse_domain(self,ip):
		print ip
		res=[ip]
		port_80=["0"]
		port_443=["0"]
		rev_dns=["0","0"]
		red_80=["0","0"]
		rev_80=["0","0"]
		red_443=["0","0"]
		rev_443=["0","0"]
		ports=open_port(ip,80,443)

		if not ports:
			res=res+port_80+port_443
			rev_dns=self.reverse_dns(ip)
			res=res+rev_dns+red_80+rev_80+red_443+rev_443
			#print len(res)
			return "#".join(res)
		else:
			self.download_page(ip)
			rev_flag=0
			rev_dns=self.reverse_dns(ip)
			if rev_dns != ["0","0"]:
				rev_flag=1

			#print "80"
			if 80 in ports:
				port_80=["1"]
				turl="http://"+ip
				red_80=self.redirect_url(turl)
				if rev_flag==1:
					turl="http://"+rev_dns[1]
					rev_80=self.redirect_url(turl)
			#print "443"
			if 443 in ports:
				port_443=["1"]
				turl="https://"+ip
				red_443=[]
				t=td.Thread(target=self.redirect_url_td,args=(turl, red_443))
				t.setDaemon(True)
				t.start()
				
				st=time.time()
				while True:
					now=time.time()
					ex=now-st
					#print ex
					
					if red_443:
						if rev_flag==1:
							turl="https://"+rev_dns[1]
							rev_443=self.redirect_url(turl)
						break
					if ex>15:
						red_443=["0","0"]
						rev_443=["0","0"]
						break

			res=res+port_80+port_443+rev_dns+red_80+rev_80+red_443+rev_443
			#print len(res)
			return "#".join(res)
		pass










if __name__ == '__main__':
	finput=sys.argv[1]
	workno=sys.argv[2]
	r=Run(finput,int(workno))
	r.start()
	#c=Crawl()
	##p=ProcessData("in_00",25)
	#p.init()
	#ip="54.243.104.3"	
	#print c.reverse_domain("204.236.195.189")
	#ip="204.236.195.85"
	#print c.reverse_domain(ip)

	
