#!/usr/bin/python
'''
todo:
sessions/login
check js for ajax/post.get requests
command line flags
formatting.py
fix imports
queue unique
links to avoid - logout
'''
import sys
import urllib2
from urllib2 import urlparse
from bs4 import BeautifulSoup
from sets import Set
import Queue

# initialise flags
if len(sys.argv) < 3:
    message = "[+] Usage: python vectorviper.py <url> <depth>\n"
    message += "[+] Example: python vectorviper.py http://www.google.com/ 2"
    exit(message)
else:
    seed_uri = sys.argv[1]
    max_depth = int(sys.argv[2])

# initialise variables
hostname = urlparse.urlparse(seed_uri).hostname
queue = Queue.Queue()
queue.put((seed_uri,0))
seen = Set()

# link contains absolute domain - if not add it
def check_absolute(url, a):
  if urlparse.urlparse(a).hostname == None:
    return urlparse.urljoin(urlparse.urlparse(url).geturl(), a)
  return a

# link from same domain
def same_domain(link):
  return hostname[hostname.find(".")+1:] in urlparse.urlparse(link).hostname
  
# link already crawled
def new_link(url):
  return url not in seen  

# link ready to be crawled
def not_crawled(url):
  print queue
  return url in Queue.queue
  
  
# extract links, forms, scripts from url
def parse(url): 
  soup = BeautifulSoup(urllib2.urlopen(url).read())
  links = [check_absolute(url, link.get('href')) for link in soup.find_all('a') if not_crawled(new_link(same_domain(check_absolute(url, link.get('href')))))]
  #forms = [form.get('name') if form.get('name') != None else form.get('id') if form.get('id') != 'None' else form.get('class') if form.get('class') != None else "Unknown Form" for form in soup.find_all('form')]
  forms = [form.get('action') if form.get('action') != None else "Unknown Form" for form in soup.find_all('form')]
  scripts = ["Inline Script" if script.get('src') == None else check_absolute(url, script.get('src')) for script in soup.find_all('script')] # misses inline None
  return (links, forms, scripts)
  
# add link to queue
def add_to_queue(link, current_depth):
  if new_link(link) and (current_depth + 1) <= max_depth:
    queue.put((link,(current_depth+1)))
  return

# display depth 'graphically'
def depth_indent(size):
  indent = ""
  while size > 0:
    indent += "-"
    size-=1
  return indent + "> "

# extract stats
def stats(url, depth, info):
  links = info[0]
  forms = info[1]
  scripts = info[2]
  msg = depth_indent(depth) + "New Links: %d \n" % len(links)
  for link in links:
    msg += "-" + depth_indent(depth) + link + "\n"
    add_to_queue(link, depth)
  msg += depth_indent(depth) + "Forms: %d \n" % len(forms)
  for form in forms:
    msg += "-" + depth_indent(depth) + form + "\n"
  msg += depth_indent(depth) + "Scripts: %d \n" % len(scripts)
  for script in scripts:
    msg += "-" + depth_indent(depth) + script + "\n"
  return msg

# check all links in queue
while not queue.empty():
  pop = queue.get()
  url = pop[0]
  depth = pop[1]
  seen.add(url)
  print "Parsing: " + url + " at depth %d" % depth
  print stats(url, depth, parse(url))