'''
Created on Apr 21, 2011

@author: Jialai_Zhu
'''
import mechanize
import cookielib

def FILE_PATH_EX(path,sub=''):
    import os
    BASE=os.path.dirname(p=__file__)
    p=os.path.join(BASE,sub)
    p=os.path.join(p,path)    
    p=os.path.abspath(p)
    return p
CODE_PAGE='gb2312'
global  OPEN_COUNT
OPEN_COUNT=0
BROWSERS = [
            'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.3; .NET4.0C)',#IE9                    
            'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',#'FF4'
            'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.0 Safari/534.24'#'CHROME
            'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1'            
            ]
PROXY_ON="PROXY_ON"
PROXY_STATUS_GOOD=0
PROXY_STATUS_BAD=1
PROXY_LIST_RANDOM=[]
PROXY_GOOD=0
def init_PROXY_STATUS():
    global PROXY_LIST_RANDOM
    from proxy import get_proxy
    from proxy import load_proxy_list        
    PROXY_LIST_RANDOM=load_proxy_list()    
    i=0
    while i<len(PROXY_LIST_RANDOM):
        PROXY_LIST_RANDOM[i]['status']=PROXY_STATUS_GOOD
        PROXY_LIST_RANDOM[i]['ref']=0
        i=i+1
    global PROXY_GOOD
    print PROXY_LIST_RANDOM
    PROXY_GOOD=len(PROXY_LIST_RANDOM)
def set_PROXY_STATUS(PROXY_LIST_RANDOM,PRXY,STATUS):
    i=0
    global PROXY_GOOD
    if PROXY_GOOD<1:
        import proxy
        proxy.get_proxy() 
        init_PROXY_STATUS()
    print ("---proxy %s bad")%(PRXY)
    while i<len(PROXY_LIST_RANDOM):
        if PROXY_LIST_RANDOM[i]['http']==PRXY:
            PROXY_LIST_RANDOM[i]['status']=STATUS
            print ("---proxy %s bad")%(PRXY)
            
            if STATUS!=PROXY_GOOD:
                PROXY_GOOD=PROXY_GOOD-1       
            return    
        i=i+1
def random_PROXY_STATUS(PROXY_LIST_RANDOM):
    def get_server(PROXY_LIST_RANDOM):
        range=len(PROXY_LIST_RANDOM)
        import random
        index=random.randint(0,range-1)
        proxy_server=None
        try:
            proxy_server=PROXY_LIST_RANDOM[index]
        except:
            pass
        if proxy_server!=None and proxy_server['status']==PROXY_STATUS_GOOD:
            PROXY_LIST_RANDOM[i]['ref']=PROXY_LIST_RANDOM[i]['ref']+1
            ret={'http':proxy_server['http']}
            print ret
            return ret
        return None
    i=0
    while i<10:
        try:
            s=get_server(PROXY_LIST_RANDOM)
            if s!=None:
                return s  
        except:
            return None    
        i=i+1
    return None
PAGE_DUMP=None
def tb(msg=''):
	import  traceback
	print  msg
	traceback.print_exc()
def RandomAgent(): 
    range=len(BROWSERS)
    import random
    index=random.randint(0,range-1)
#    print BROWSERS[index]
    return BROWSERS[index]
class BrowserError(Exception):
    def __init__(self, url, error,code):
        self.url = url
        self.error = error
        self.code = code
    def detail(self):   
        return ('code[%d] error[%s]')%(self.code,self.error)
class retry_count():
    def __init__(self,sleep_time,max_tries=5):
        self.default_sleep=sleep_time    
        self.retry_sleep=sleep_time
        self.max_tries=max_tries
        self.retry_time=1  
    def set(self,timeout):    
        if timeout==None:
            return
        if self.retry_time==1:
            self.retry_sleep=self.default_sleep=timeout
        pass
    def reset(self):
        self.retry_sleep=self.default_sleep
        self.retry_time=1
    def sleep(self,timeout=None):
        import time
        if timeout==0:
            time.sleep(30)
            return False
        if timeout==None:
            return False
        else:
            self.set(timeout)
        
        if self.retry_time>self.max_tries:
            return False
        time.sleep(self.retry_sleep)
        self.retry_time=self.retry_time+1
        self.retry_sleep=self.retry_sleep*2
        return True
class browser():
    OPEN_COUNT=0
    
    def reset_retry(self):
        self.retry_count.reset()
    def submit(self,timeout=50):
#        return self.br.submit()
        return self.br.sumbit_timeout(timeout=timeout)
    def __init__(self,debug=True,agent=None,cb=[],proxy=None):
        self.retry_count=retry_count(10,max_tries=2)
        self.reset_retry()
        self.cb=cb
        self.debug=True
        self.dump=None
        # self.browser
        self.br = mechanize.Browser()
        
        # Cookie Jar
        self.cj = cookielib.LWPCookieJar()
        self.br.set_cookiejar(self.cj)
        
        # self.browser options
        self.br.set_handle_equiv(True)
        self.br.set_handle_gzip(True)
        self.br.set_handle_redirect(True)
        self.br.set_handle_referer(True)
        self.br.set_handle_robots(False)
        
        # Follows refresh 0 but not hangs on refresh > 0
        self.br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1)
        
        if debug:
            # Want debugging messages?
            self.br.set_debug_http(True)
            self.br.set_debug_redirects(True)
            self.br.set_debug_responses(True)
        self.set_random_agent()
        self.set_random_proxy(proxy)
    def check_proxy(self,proxy):
        try:
            for type in proxy:
                if type=="socks":
                    ip=proxy[type].split(":")[0]
                    port=int(proxy[type].split(":")[1])
                    return self.sock5_on(ip, port)
                else:
                    self.sock5_off()
                return False
        except:
            return True
    def sock5_on(self,ip,port):
        import socks
        import socket
        try:
            if socket.old_socket!=None:
                return
            else:
                socket.old_socket=socket.socket
        except:
            socket.old_socket=socket.socket
            pass
        socket.socket = socks.socksocket
        socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, ip, port)
        return True
    def  sock5_off(self):
        import socks
        import socket
        try:
            if socket.old_socket==None:
                return
            socket.socket=socket.old_socket
            socket.old_socket=None
        except:pass           
    def set_random_proxy(self,proxy=PROXY_ON):
        from proxy import get_proxy
#        if proxy==None:
#            self.sock5_off()
#            return
        self.proxy_radom=False
        if proxy==None:
            return
        if proxy!=PROXY_ON:
            try:
                if len(proxy)==1:
                    proxy=proxy[0]
                    if self.check_proxy(proxy)==False:
                        self.proxy_server=proxy['http']
                        self.br.set_proxies(proxy)
                return
            except:
                return         
        global PROXY_LIST_RANDOM
        proxy_server=random_PROXY_STATUS(PROXY_LIST_RANDOM)
        if proxy_server!=None:
#            print proxy_server
            if self.check_proxy(proxy_server)==False:
                self.proxy_server=proxy_server['http']
                self.br.set_proxies(proxy_server)
                self.proxy_radom=True
        pass
    def remove_proxy(self):
        if self.proxy_radom==False:return
        try:
            global PROXY_LIST_RANDOM
            set_PROXY_STATUS(PROXY_LIST_RANDOM,self.proxy_server,PROXY_STATUS_BAD)
        except:
            pass
    def set_random_agent(self):
        # User-Agent (this is cheating, ok?)
        self.br.addheaders = [('User-agent', RandomAgent())]     
        pass
    def html(self):
        return self.resp.read()
#    def timeout(self):
#        self.resp=None
#        self.retry_with_error(self.br.submit,timeout=0)
    def retry_with_error(self,func,sleeptime,**arglist):
        from httplib import HTTPException
        try:       
            import urllib2
            import socket
            try:            
                self.resp=func(**arglist)
            except (urllib2.HTTPError, urllib2.URLError), e:
                try:
                    code=e.reason.args[0]
                except:
                    code=e.code
                print e
                if self.error_timeout(sleeptime)==True:
                    return self.retry_with_error(func,sleeptime,**arglist)
#                    return func(**arglist)
                raise BrowserError(self.url, str(e),code)
            except (socket.error, socket.sslerror), msg:
                print msg
                raise BrowserError(self.url, msg)
            except socket.timeout, e:
                print e
                raise BrowserError(self.url, "timeout",30000)
            except KeyboardInterrupt:
                raise
            except HTTPException,ex:
                from httplib import BadStatusLine
                print 'HTTPException'
                print BrowserError(self.url, "HTTPException",30000)
                if self.error_timeout(sleeptime)==True:
                    return self.retry_with_error(func,None,**arglist)
                else:
                    raise BrowserError(self.url, "HTTPException",30000)
            except Exception,ex:

                print BrowserError(self.url, "unknown error",20000)
                if self.error_timeout(sleeptime)==True:
                    return self.retry_with_error(func,None,**arglist)
                else:
                    raise BrowserError(self.url, "unknown error",20000)                
            self.reset_retry()
            self.print_resp()
        except Exception,ex:
            self.remove_proxy()
            self.BrowserError=ex
            tb(ex)
            raise ex
        return self.resp
    def error(self):
        ret=''
        try:
            if self.BrowserError:
                ret=self.BrowserError.detail()                
                pass
        except:
            pass
        return ret
    #retry_count==None not try
    #==0 30
    #
    def open(self,url,retry_timeout=0,timeout=50):    
        global OPEN_COUNT
        OPEN_COUNT=OPEN_COUNT+1
        print '---------------------',OPEN_COUNT
        self.url=url
        self.retry_with_error(self.br.open,retry_timeout,url=url,timeout=50)
    def error_timeout(self,timeout=None):
        return self.retry_count.sleep(timeout)
    def click(self,name,timeout=50):
#        for l in self.br.links(tag='a'):
#            print l.text
        for l in self.br.links(tag='a',text=name):
                self.resp=self.open(self.br.click_link(l))
                self.print_resp()
                return True
        return False
    def print_resp(self):
        for cb in self.cb:
            cb(self.br)
        if self.debug==True:
#            print self.br.geturl()
#            print self.br.response().read()
            # Show the html title
#            print self.br.title()        
#            print self.br.response().info()
            global PAGE_DUMP
            if PAGE_DUMP==None:
                PAGE_DUMP=open('dump.html','ab')
            PAGE_DUMP.write(self.br.response().read())
                 


import re
from xml.etree.ElementTree import ElementTree
from xml.etree.ElementTree import Element
from xml.etree.ElementTree import SubElement
from xml.etree.ElementTree import dump
from xml.etree.ElementTree import Comment
from xml.etree.ElementTree import tostring


class Result(object):
    def __init__(self):
        self.title=''
        self.href=''
        self.text=''
    def Print(self):  
        try:
            print u'\n----------------\nTitle[%s]\nHref[%s]\nText[%s]'%(self.title,self.href,self.text)
        except:
            pass
    
    def get_text(self,element):
            names=element.findAll(text=True)
            text=''
            for x in names:
                text+=x            
            return self.normalize_content(text)
        
    def get_element_text(self,element,end):
            desc_strs = []
            if not element: return
            for t in element:
                try:
                    if t.name == end: break
                except AttributeError:
                    pass
    
                try:
                    desc_strs.append(t.string)
                except:
    #            except AttributeError:
                    desc_strs.append(t)
            desc_strs =''.join(s for s in desc_strs if s)
            return self.normalize_content(desc_strs)    
    
    def replace_with(self,text,old,new):
        try:
            return re.sub(old, new, text)
        except Exception,ex: 
            return text     
    def normalize_content(self,text):
        set={'&lt;':'<','&gt;':'>','&rt;':'>','&nbsp;':' ','nbsp':' ','&amp;':'&','&quot;':'"'}
        for old, new in set.items():
            text=self.replace_with(text,old,new)
        return text
    def toXML(self):
        result={}
        result['title']=self.title
        result['text']=self.text
#        .encode('utf8')
        result['href']=self.href
#        .encode('utf8')
        return  Element("result", result)
        

        
     
         
class GoogleResult(Result): 
    tag='li'
    attrs={'class':re.compile('^g')}
    def __init__(self,soup):
        try:
            Result.__init__(self)
            if soup['class']=='g w0':
                pass
            elif soup['class']=='g':
                pass
            else:
                raise
            self.href   =   soup.a['href']
            self.title  =   self.get_text(soup.a)
            self.text   =   self.get_element_text(soup.div.div,None)
#            self.text   = get_text(soup.div.div)
        except Exception,ex:     
            raise  
from BeautifulSoup import *
import time
import string
class search_key_convert():
    def __init__(self,key=None,key_in_xml=None):
        self.key=key
        self.key_in_xml=key_in_xml
    def convert(self):
        if self.key!=None:
            self.key_in_xml=string.replace(self.key, '"', '+')
            return self.key_in_xml
        if self.key_in_xml!=None:
            self.key=string.replace(self.key_in_xml,  '+','"')
            return self.key
class search_engine():
    @staticmethod
    def factory(setting):
        type=setting['type']
        map={'google':search_engine_google,
             'baidu':None}
        try:
            return map[type](setting)
        except:
            return None
    def __init__(self,setting):
        self.key=search_key_convert(key_in_xml=setting['key']).convert()
        import db_result
        if db_result.query_keyword_status(self.key)==True:
            raise
        self.date=False
        try:
            if  len(setting['date']):
                self.date=setting['date']
        except:
            pass
        self.options=''
        pass
    def error(self):
        nav_error=self.nav.error()
        return nav_error
class search_engine_google(search_engine):
    set100=False
    def __init__(self,setting):
        search_engine.__init__(self, setting)
        self.nr=0
        self._report=[]
    def search_advance(self):
        self.__search_advance()
    def __search_advance(self):
        url='http://www.google.com.hk/advanced_search?hl=zh-CN'
        self.nav.open(url=url,retry_timeout=0,timeout=10)
        time.sleep(2)
        self.nav.br.select_form(name='f')
        self.nav.br.form['as_q']=self.key.encode(CODE_PAGE)
        self.nav.br.form['num']=["100"]
        self.options=self.options+'num:(100)'
        if self.date!=False:
            self.nav.br.form['as_qdr']=[self.date.encode(CODE_PAGE)]
        self.options=self.options+('date:%s')%(str(self.date))
        self.__submit_notry()
        
    def __submit_notry(self):
        self.nav.retry_with_error(self.nav.submit,None)
        
    def __submit_with_try(self):
        import urllib2
        try:
            self.nav.submit()    
        except urllib2.HTTPError,ex:
            print ex
            pass   
        except Exception,ex:
            try:
                if self.submit_fail_retry.sleep()==False:
                    raise ex
            except:
                self.submit_fail_retry=retry_count(30,2)
                self.submit_fail_retry.sleep()
            self.__search_advance()
            pass
        
    def set_period(self):
        if self.date!=None:
            self.nav.click(self.date.encode('utf8'))
    def search(self):        
        if self.run_set_100==False:
            self.nav.open('http://www.google.com.hk/')
        time.sleep(5)
        # Select the first (index zero) form
        self.nav.br.select_form(nr=0)        
        # Let's search 
        print self.key,len(self.key)
        self.nav.br.form['q']=self.key.encode(CODE_PAGE)
        self.nav.submit()
           

                          
    def run(self,nav):
        self.nav=nav
        self.search_advance()

    def __parser(self,page):
        found=page.find('window.google=')
        if found<0:
            raise
        soup=BeautifulSoup(page,fromEncoding=CODE_PAGE)
#        result=soup.find(name='script',text='window.google=')
        tags=soup.findAll(name=GoogleResult.tag,attrs=GoogleResult.attrs)
        print 'Search result %d'%(len(tags))
        
        for i in tags:
            try:
                r=GoogleResult(i)                    
                self._report.append(r)
            except Exception,ex:     
                pass
        return self._report
    def report(self,report_handle):
        import db_result
        sucessed=len(self._report)>0
        if sucessed==False:
            sucessed=len(self.error())==0        
        ret=report_handle.hanlde(self)
        db_result.set_key_searched(self.key,ret,sucessed)
    def parser(self): 
        result=self.__parser(self.nav.br.response().read())
        print len(result)
  
        
            
class   nav_script_parser():
        def __init__(self,script_xml):
            self.script_xml=script_xml
            self.code=[]
        def parser_xml(self):                     
            root=ElementTree().parse(self.script_xml) 
            for c in root.getchildren():
                action=c.tag
                item=c.attrib
                call=action
                if action=='search':
                    args=search_engine.factory(setting=item)
                elif action=='page':
                    args=item['page']  
                elif action=='click':
                    args=item['a']
                self.code.append([call,args])
            return self.code
            
class nav_engine():
    def __init__(self,script_xml,report_xml='test.txt',test=False):
        self.test=test
        self.nav=browser(debug=False,proxy=PROXY_ON)
        self.handle={'open' :self.open_handle,
                     'search':self.search,
                     'click':self.click_handle,
                     'page':self.page_handle,
                     'log':None}
        self.script=nav_script_parser(script_xml)
        self.output=''
        self.report_xml=report_xml
           
    def search(self,search_engine):
        self.search_engine=search_engine
        self.search_engine.run(self.nav)
        self.search_engine.parser()
#        self.output=self.output+self.nav.br.response()
#        time.sleep(5)
    def report(self,file):
        if self.test==False:
            self.search_engine.report(file)
    def run(self):
        for h in self.script.parser_xml():
            try:           
                args=h[1]
                call=h[0]                                                             
                self.handle[call](args)
            except Exception,ex:
                tb(ex)
                self.result=False
                return
        self.result=True
        
        
    def open_handle(self,args):
        self.nav.open(args)
    def click_handle(self,tag):
        self.nav.click(tag)
    def page_handle(self,pages):
        if len(pages.split('-'))==2:
            begin=int(pages.split('-')[0])
            end=pages.split('-')[1]
            if end=='N':
                end=0xFFFF
            else:
                end=int(pages.split('-')[1])
            
        else:
            begin=end=int(pages)
        while begin<end:
            try:
                if self.nav.click(str(begin))==True:
                    self.search_engine.parser()
    #                self.output=self.output+self.nav.br.response()
                    begin=begin+1
                    import time
                    time.sleep(3)
                else:
                    return begin
            except Exception,ex:
                tb(ex)
                return begin
        return begin
    
def serach_google(key,report_handle=None):
    template_google=u'''
<root>   
<search    type="google"    key="%s" date='week'/>
<page      page="2-N"/>
</root>''' 
    script_xml='script.tmp.xml'
    template=open('google_script.xml','rb').read()
    open(script_xml,'wb').write((template)%(key.encode('utf8')))   
    s=nav_engine(script_xml)
    s.run()    
    s.report(report_handle)


HOST_GOOD='url_good.txt'
HOST_3322='url_3322.txt'
HOST_IP='url_ip.txt'
HOST_UNK='url_unknown.txt'
HOST_CX_CC='url_cc.txt'
HOST_IN='url_in.txt'
HOST_QQ='url_qq.txt'
HOST_PORT='url_port.txt'

class url_info():
    DOMAIN_DICT={}
#    def get_location(self,ip):
#        locate=geoip.get_ip(ip)
#        return locate
    def ip(self):
        domain=self.domain
        try:
            if self.DOMAIN_DICT[domain]:
                pass
        except:
            import socket
            try:
                import urllib                    
                ip=socket.gethostbyname(urllib.quote(domain))
                self.DOMAIN_DICT[domain]='0.0.0.0'
#                get_location(ip)
             
            except Exception,ex:
                self.DOMAIN_DICT[domain]={'ip':None,'city':None,'country':None}
        self.__ip=self.DOMAIN_DICT[domain]['ip']
        self.city=self.DOMAIN_DICT[domain]['city']
        self.country=self.DOMAIN_DICT[domain]['country']
        return self.__ip
    def __init__(self,url):        
        from urlparse import urlparse
        n = urlparse(url)
        self.domain=''
        self.port=80
        self.url=url
        self.city=None
        self.country=None
        self.__ip=None
        try:
            self.domain=n.hostname.encode('ascii','ignore')
        except:         pass
        try:
            self.port=int(n.port)
        except:         pass
        self.type=self.get_type()
        
    def get_type(self):
        hostname=self.domain
        port=self.port
        import re
        if re.search('.*\.(\d+)\.org', hostname):
            return HOST_3322
        if re.search('(\d+)\.(\d+)\.(\d+)\.(\d+)',hostname):
            return HOST_IP
        if re.search('(.*)(\.cc|\.info)', hostname):
            return HOST_CX_CC
        if re.search('(.*)(\.in)', hostname):
            return HOST_IN
        if port!=80:
            return HOST_PORT
        if re.search('(.*).yahoo\.co\.jp',hostname):
            return HOST_GOOD
        if re.search('.*qq.*',hostname):
            return HOST_QQ
        return HOST_UNK
    
class report():   
    domainlist=[] 
    def __init__(self,file): 
        self.type=file
        self.xml=self.type+'_result.xml'
        self.domain_txt=self.type+'_domain.txt'
        self.url_txt=self.type+'_url.txt'
        


        pass
    def add2db(self,search_engine):
        import db_result
        import datetime
        today=datetime.date.today()
        time=today.strftime("%Y%m%d")        
        if len(search_engine._report)==0 and len(search_engine.error()):
            db_result.add_zq_daily(domain='',keyword=search_engine.key,time=time,title='',text='',option=search_engine.options,error=search_engine.error())
        new_domain=0
        for i in search_engine._report:
            domain=url_info(i.href).domain
            def get_type(text):
                return ''
            type=get_type(text=i.text)
            if type=='':
                type=get_type(text=i.title)
            added=db_result.add_zq_daily(domain=domain,
                                   keyword=search_engine.key,
                                   time=time,title=i.title,
                                   text=i.text,
                                   option=search_engine.options,
                                   error=search_engine.error(),
                                   type=type,
                                   url=i.href,
                                   check_domain=True)
            if added==True:
                new_domain=new_domain+1
        print new_domain
        return new_domain
    def hanlde(self,search_engine):
        new_domain=self.add2db(search_engine)
        import datetime
        today=datetime.date.today()
        date='date'+today.strftime("%Y%m%d")
        new_root=True
        url_file=open(self.url_txt,'ab+')
        domain_file=open(self.domain_txt,'ab+')
        try:
            tree=ElementTree(file=self.xml)
            root=tree.getroot()
            Element_Date=root.find(date)
            new_root=False
        except Exception,ex:
            tb(ex)
            tree = ElementTree()            
            root = Element('report')
            tree._setroot(root)
            Element_Date=None
            
        if Element_Date==None:
            Element_Date=Element(date)
        Element_Key=Element('key',{'value':search_engine.key,'count':str(len(search_engine._report)),'options':search_engine.options,'error':search_engine.error()})
        Element_results = Element('results',)
        Element_domain_set= Element('domains')
        
        domain=None   
        def append2list(list,new):
            for i in list:
                if new==i:
                    return False
            list.append(new)
            return True
        domain_list_key=[]
        for i in search_engine._report:
            domain=url_info(i.href).domain
            url_file.write(i.href+'\r\n')
            url_file.flush()
            
            
            if append2list(self.domainlist,domain)==True:
                domain_file.write(domain+'\r\n')
                domain_file.flush()                
            
            append2list(domain_list_key,domain)
            Element_results.append(i.toXML())
        
        for i in domain_list_key:
            Element_domain_set.append(Element('doamin',{'value':i}))
            
        Element_Key.append(Element_domain_set)
        Element_Key.append(Element_results)
        Element_Date.append(Element_Key)
        if new_root==True:
            root.append(Element_Date)
        import os
        if os.path.exists(self.xml)==False:
            open(self.xml,'wb').close()
        tree.write(file=self.xml,encoding='utf-8')
        return new_domain
    


def run_with_xml(xml,num):
    import os
    is_xml=os.path.splitext(xml)[1]=='.xml'
    if is_xml:
        tree=ElementTree(file=FILE_PATH_EX(xml,''))
    else:
        tree=ElementTree(file=FILE_PATH_EX('key.xml','config'))
    scripts=tree.findall('script')
    
    report_file=tree.find('report').attrib['file']
    report_zq=report(report_file)
    
    for script in scripts:
        period=script.attrib['period']
        try:
            test=False
            test=(script.attrib['test']=='y')
        except:pass
        script_template=open(FILE_PATH_EX(script.attrib['file'],'config'),'rb').read()
        private_output_file=None
        try:
            private_output_file=FILE_PATH_EX(script.attrib['output'])
        except:
            pass
        if private_output_file==None:
            report_handle=report_zq
        else:
            report_handle=report(private_output_file)
        if is_xml:
            keyword_file=script.attrib['keyword']
        else:
            keyword_file=xml
            
        def search(key): 
            if key[:3] == codecs.BOM_UTF8:
                key = key[3:]
            unicode_key=unicode(key,CODE_PAGE)
            key=search_key_convert(key=unicode_key).convert()
            print key
            import random
            script_xml='temp2\script.tmp%d.xml'%( random.randint(1, 100000))
            open(script_xml,'wb').write((script_template)%(key.encode('utf8'),period))
            try:   
                s=nav_engine(script_xml=script_xml,test=test)    
                if s!=None:
                    s.run()     
                    s.report(report_handle) 
            except KeyboardInterrupt:
                raise                    
            except Exception,ex:
                tb(ex)
                pass
        from t import ThreadPool
        pool=ThreadPool(num,300) 
        list=[] 
        keyword_file=FILE_PATH_EX(keyword_file,'config')        
        for key in open(keyword_file,'rb').readlines():
            list.append(key)
        pool.run(list=list, func=search)
import os
import os
import getopt, sys
class Config(object):
    def __init__(self):
        self.proxy=False
        self.key='all.xml'
        self.num=5
        self.db=None

class CommandLine(object):
    def __init__(self,config):       
        self.opts, self.args = getopt.getopt(sys.argv[1:], self.short_option(), self.long_option())
        self.config=config
        self.ProcessCommand()
    def ProcessCommand(self):
        for o, a in self.opts:
            self.ProcessCommandOther(o, a)
    def short_option(self):return "s:d:m:"
    def long_option(self):return ["key=","proxy","t=","db="]
    def ProcessCommandOther(self,o, a):
        if o=="--db":
            self.config.db=a
        if o=='--key':
            self.config.key=a
        if o=='--proxy':
            self.config.proxy=True
        if o=='--t':
            self.config.num=int(a)   
def try_mkdir(dir):
    try:
        os.mkdir(dir)
    except:pass         
if __name__ == '__main__':
    try_mkdir('temp')
    try_mkdir('temp2')
    setting =Config()
    cmd     =CommandLine(setting)
    if setting.proxy:
        init_PROXY_STATUS()
    if setting.db:
        import db_result
        db_result.db_init(setting.db)
    run_with_xml(setting.key,setting.num)           

    
