#!/usr/bin/python
from twisted.internet import reactor
from scrapy.crawler import Crawler
from scrapy import  signals
from itemextract.spiders.my import MySpider
from scrapy.utils.project import get_project_settings
from itemextract.commandline import *
from info_collect.collect import InfoGather
from misconfiguration.configtest import MisConfigurationTest
from session_manage.cookie import SessionManageTest
from splinter import Browser
from data_validation.vu_scan import ScanScheduler
import sys
import sqlite3
import os

if gethost():
    # spider configure and start spider
    domains = [ ]
    urls = [ ]
    domains.append(gethost())
    urls.append("http://"+gethost())
    # store links and stat
    path = "webcrawler"
    if os.path.exists(path):
        os.remove(path)
    
    i = InfoGather("http://"+gethost())
    i.gather()
    
    print "[--Vulnerability Scan--]"
    args = sys.argv
    browser = Browser("phantomjs")
    host = gethost()
    if '-L' in args:
        ind = args.index('-L')
        try:
            if args[ind+1] == "-u" and args[ind+3] == "-d" and args[ind+4]:
                is_login = True
                login_url = args[ind+2]
                form_data = contodict(args[ind+4])
                sqlite3.connect("webcrawler")
                session_test = SessionManageTest(browser,"webcrawler",host,is_login,login_url,form_data)
        except:
            Usage()
            sys.exit()
    else:
        is_login = False
        session_test = SessionManageTest(browser,"webcrawler",host,is_login)
    session_test.check()
    #initialize crawler
    spider = MySpider(allowed_domains=domains,start_urls=urls)
    settings = get_project_settings()
    crawler = Crawler(settings)
    crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
    crawler.configure()
    crawler.crawl(spider)

    
    # start crawler
    crawler.start()
    #log.start()
    reactor.run() # the script will block here until the spider_closed signal was sent
    
    #misconfiguration test
    config = MisConfigurationTest(gethost(),"webcrawler")
    config.check()
    
    #data validation test
    scheduler = ScanScheduler()
    scheduler.schedule()
     
    # remove website database
#     path = "webcrawler"
#     if os.path.exists(path):
#         os.remove(path)
    
