from xgoogle.search import GoogleSearch
from random import randint, choice
from xgoogle.BeautifulSoup import BeautifulSoup
from sys import setrecursionlimit, exit
import urllib, urllister

# Extracts urls from the webpage
def extract_urls(page):
    usock = urllib.urlopen(page)
    parser = urllister.URLLister()
    parser.feed(usock.read())
    mylist = []
    for url in parser.urls:
        if url[:7] == 'http://': # Only urls to diff sites
            mylist.append(url)
    usock.close()
    parser.close()
    return mylist

# Returns a random url result from the given search query
def get_rand_google_result(search_query):
    try:
        gs = GoogleSearch(search_query)
        gs.results_per_page = 25
        results = gs.get_results() # returns a list of SearchResult instances
        random_result = randint(0,len(results)) # is second arg to randint correct?
        return results[random_result].url
    except SearchError, e:
        print "Search failed: %s" % e
        sys.exit(0)

# Goes to the next site in the web crawl
def web_crawl(url_list):
    global visited
    if num_sites is not visited and len(url_list) != 0:
        rand_url = choice(url_list)
        url_list.remove(rand_url)
        visited += 1
        print visited, rand_url
        url_list = extract_urls(rand_url)
        web_crawl(url_list)

print '''This program written by Chris Lemire crawls the web. It starts by 
taking a word or phrase from the user, choosing a random google search result
on that word, and then randomly choosing a link from each website it visits to
go on to the next website where it then randomly picks another website to go
to. Each new website causes one call of recursion to happen. If a dead end is
reached, where a new website can\'t be obtained from the current website,
then the recursion stack unwinds until a different website can be obtained
from one of the previously visited websites. This program is just for fun to
see where you will end up and for practice.\n'''

search_query = raw_input('Enter a word or phrase to begin the web crawl: ')
num_sites = raw_input('Give a number of sites to crawl: ')
print '\nBeginning...'
rand_url = get_rand_google_result(search_query)
url_list = extract_urls(rand_url)
visited = 1
print visited, rand_url
web_crawl(url_list) # Start recursion

