import sys
import re
import urllib2
import urlparse
import robotparser

# Import pygraph
from pygraph.classes.graph import graph
from pygraph.classes.digraph import digraph
from pygraph.algorithms.searching import breadth_first_search
from pygraph.readwrite.dot import write
from collections import deque
from BeautifulSoup import BeautifulSoup as bs
import urllib2

def getlinks(curl,filterstr):

    url = urlparse.urlparse(curl)

    try:

        request = urllib2.Request(curl)
    
        # Add user-agent header to the request
        request.add_header("User-Agent", "WebLinkGraph")
    
        # Build the url opener, open the link and read it into msg
        opener = urllib2.build_opener()
        msg = opener.open(request).read()
        soup = bs(msg)        

        links=soup.findAll('a')  
#        print links
    except:
        print "Can't open ",curl#, " linked from: ", curl
        return []

    resultlinks=[]

    for a in links:
        link=a.get('href')
        if link is None:
            continue
        link=link.strip()
        if link.startswith('/'):
            link = 'http://' + url[1] + link
        elif link.startswith('#'):
            continue
        elif not link.startswith('http'):
            link = urlparse.urljoin(url.geturl(),link)

        o=urlparse.urlparse(link)
        link=(o.scheme+"://"+o.netloc+o.path)

        if not re.match(filterstr,link) is None and curl!=link:
            resultlinks.append(link)

    return resultlinks


import sys
starturl=sys.argv[1]

linkqueue=deque([starturl])
linkvisited=[]
linkgraph=digraph()

import time

while len(linkqueue)>0:

    time.sleep(0.3)
    #get first one in the queue
    curl=linkqueue.popleft()
    linkvisited.append(curl)

    #get links
    print "Getting Links for ",curl

    links=getlinks(curl,"^"+starturl)

    #insert non-duplicated links into queue and graph

    if not linkgraph.has_node(curl):
        linkgraph.add_node(curl)
    

    for link in links:
        #Adding the edge for all cases
        if not linkgraph.has_node(link):
            linkgraph.add_node(link)
        if not linkgraph.has_edge((curl,link)):
            linkgraph.add_edge((curl,link))

        if not link in linkqueue and not link in linkvisited:
            print "Adding link ",link, "Queue size=",len(linkqueue)
            linkqueue.append(link)


import pickle

ouf=open("linkgraph.pickle","w")
pickle.dump(linkgraph,ouf)
ouf.close()

