# -*- coding: utf-8 -*-
import commands
import socket
import time
import json
from WebBrowser import *
from spider import *
from socket import *
from comm.packer import * 
from comm.jsonparser import *
from comm.config import *
from comm.wangwangUtil import wwMessage
from comm.log import *
#from processing import Process,Queue  
from twisted.internet import task
from twisted.internet.defer import Deferred
from twisted.internet.protocol import ClientFactory
from twisted.protocols.basic import LineReceiver 
application = QApplication([])

log = Log("./logs/client.log")
configfile = "./conf/config.txt"
conf = config(configfile)
confdict = conf.getConfig()
task_type = confdict['task']['global']['task_type']#0:periodic task 1:Instant task
HOST = confdict['network']['global']['host']
PORT = int(confdict['network']['global']['port'])
def requestTaskPack():
    urls = ""
    cookie =""
    callback =""
    taskstatus = "2"
    task_id = ""
    taskRequest = requestPacker()
    taskPack = taskPacker()
    taskPack.packer(task_type,"","","","","","","","","","","","")
    cmd ="crawl_request" 
    taskRequest.packer(str(cmd),str(taskPack.getStrData()))
    taskrequestdata = str(taskRequest.getStrData())
    return taskrequestdata
class SpiderClient(LineReceiver):
    def __init__(self):
    self.spider =""
	self.task_id =""
	self.domainid =""
    def connectionMade(self):
        self.sendCrawlRequest()
    def crawl(self,data):
	#print "[+] data:",data
	url = data['urls'][0]['url']
	host = urlparse.urlparse(url)[1]
	crawl_depth = data['crawl_depth']
	crawl_maxlink = data['crawl_maxlink']
	cookie = data['cookie'] 
	post = data['urls'][0]['post']
	self.domainid =data['domainid']
	self.spider = Spider(url,crawl_depth,crawl_maxlink,post,cookie,host,application,"\w+://.*logout")	
	self.spider.crawl()
    def dataReceived(self, data):
	log.logDebug("RCV DATA:"+ str(data))
	cmdparser = requestParser()
        try:
         	cmdparser.parse(data)
        except Exception as e:
        	self.sendCrawlRequest()
                return
	try:
      		cmd = cmdparser.getCmd().encode("utf-8")
        	task = cmdparser.getData()
	except Exception as e:
		log.logError("Exception:"+ str(e))
		self.sendCrawlRequest()
		return
	if  cmd =="task_response":
		if task == "":
			time.sleep(10)
			self.sendCrawlRequest()
			return 
		else:
			self.task_id = task[0]['task_id']
			if len(task[0]['urls'])>0:
	    			log.logInfo("START CRAWL TASKID:" + str(self.task_id))
				try:
           				self.crawl(task[0])
				except Exception as e:
					log.logError("Exception:" + str(e))
					self.sendCrawlResult()
		#	print "[+] Crawl Done!"
			self.sendCrawlResult()
            		if self.task_id !='':
				pass
        		else:
            			pass
	if cmd =="status_response":
		if self.spider.result.length()>0:
			self.sendCrawlResult()
		else:
			self.sendCrawlRequest()
    def sendCrawlRequest(self):
	taskRequestData = requestTaskPack()
	print "[+] Send Task Request:",taskRequestData
        self.sendLine(str(taskRequestData))
    def sendCrawlResult(self):
	log.logDebug("Send Crawl Result!")
	resultPack = requestPacker()
	urllist = urlComposer()
	index = 0
	log.logInfo("CRAWL RESULT:"+ str(self.spider.result) + "  TASKID:" + str(self.task_id))
	while self.spider.result.length()>0 and index <= 10:
		var = self.spider.result.pop()
		log.logDebug("Pop :"+ str(var))
		if var['url'].find('https') >=0:
			port = 443
		else:
			try:
				port = urlparse.urlparse(var['url'])[1].split(':')[1]
			except Exception as e:
				port = 80
		try:
			#print "[+] URL:",var['url']," POST:",var['post']," PORT:",port	
			urllist.urlCompose(var['url'],str(var['post']),"",var['referer'],self.task_id,str(port),var['tag'],self.domainid)
		except Exception as e:
			log.logError("COMPOSE ERROR :" + str(e))
		index = index + 1	
	try:
		#print "[+] URLLIST:",urllist.getUrls()
		resultPack.packer("crawl_result",urllist.getUrls())
		log.logDebug("Send result:"+ str(resultPack.getStrData()))
		#print "[+] ",str(resultPack.getStrData())
		self.sendLine(str(resultPack.getStrData()))
	except Exception as e:
		log.logError("SEND CRAWL RESULT ERROR:" + str(e))
    def sendStatus(self):
	data = scanDemo.taskStatus
	self.sendLine(data)
	#print "[+] Send data:",data 
	scanDemo.taskStatus =""

class SpiderClientFactory(ClientFactory):
    protocol = SpiderClient
    def __init__(self):
        self.done = Deferred()
    def clientConnectionFailed(self, connector, reason):
        log.logError('connection failed:'+str(reason.getErrorMessage()))
        self.done.errback(reason)
    def clientConnectionLost(self, connector, reason):
        log.logError('connection failed:'+str(reason.getErrorMessage()))
        self.done.callback(None)

def main(reactor):
    ADDR = (HOST, PORT)
    factory = SpiderClientFactory()
    reactor.connectTCP(HOST,PORT, factory)
    return factory.done
if __name__ == '__main__':
    task.react(main)

