"""
DOWNLOADS ESPN NCAA FOOTBALL SCORES FROM 2003-2011
STORES THE SCORES IN A NEO4J GRAPH DATABASE

CREATED BY B.J.BUTER, [BJBUTER [AT] GMAIL [DOT] NL]

CREATED FOR THE COURSERA SNA COURSE
AND TO PLAY WITH
THREADING
LOGGING
NEO4J
IGRAPH

BASED PARTIALLY ON:
 "ESPN SCOREBOARD PARSER V1
  ROBERT WOLTERMAN (xtacocorex) - 2011"

REQUIRES:
    NEO4J GRAPH DATABASE TO STORE GAME AND TEAM DATA
    PY2NEO
    IGRAPH

DATASTRUCTURES:
- OPTS: options as created by cmdparser, added some personal options
- URLOPT:{'confId']:80, 'seasonYear': 2003-current, seasonType: 2-3, 'weekNumber': 1-20}
- GAMES:{Gameid:{'status': 'Final', seasonYear: 2003-current, seasonWeek: 4, 'seasonType': 2, 'date': datestruct, 
    'away': {'conference': 'SEC2012', Q1: int, Q2: int, Q3: int, Q4: int, Q5: int, 'final': int, 'name': UCF },
    'home': {home team dict}}}
"""

# MODULE IMPORTS
import urllib2, sys, os, optparse, time, cProfile, pstats, Queue, re, threading, logging, igraph
from HTMLParser import HTMLParser
from neo4jrestclient.client import GraphDatabase
from py2neo import neo4j, cypher
from Queue import Queue
from igraph import Graph

# GLOBAL VARIABLES
URL          = "http://scores.espn.go.com/ncf/scoreboard"
CONFID       = 80
YEARS        = [0] + range(2003, 2012+1)
WEEKS        = range(0, 20)
NEO4JPATH    = r'C:\Neo4j\neo4j-community-1.8\bin\neo4j.bat'
NEO4JCONNECT = "http://localhost:7474/db/data/"

class urlParser(HTMLParser):
    """
    urlParser
    CLASS THAT PARSES ESPN PAGES TO EXTRACT OPTIONS FROM WHICH URLS CAN BE CONSTRUCTED
    FOR MORE INFO ON THE CLASS OR FUNCTIONS SEE HTMLPARSER CLASS
    """
    def __init__(self, *args, **kwargs): 
        HTMLParser.__init__(self)

    def feed(self, data):
        """
        feed(self, data)
         - FUNCTION THAT INITIALIZES VARIABLES AND STARTS PARSING
        """
        self.stack = []
        self.urlOpts = []
        HTMLParser.feed(self, data)
     
    def handle_starttag(self, tag, attrs):
        """
        handle_starttag()
         - FUNCTION THAT HANDLES THE START TAGS IN THE HTML
         - START PUSHING TAGS ON THE STACK ONCE THE PROPER START TAG HAS BEEN FOUND
         - STORE INFORMATION FROM WHICH URLS CAN BE CONSTRUCTED
        """
        if len(self.stack) > 0:
            self.stack.append((tag, set(attrs)))
            
            if tag == 'a' and len(self.stack) > 1:
                if ('class', 'week') in self.stack[-2][1]:
                    (href, url) = attrs[-1]
                    # Hack to skip links to local files
                    if len(url) < 100:
                        #Throw all away  before ? kv pairs are separated with & then k and v are separated with = 
                        self.urlOpts.append({kv[0]:int(kv[1]) for kv in [kvs.split('=') for kvs in url.split('?')[-1].split('&')]})
                    
        # start builing the stack from this tag    
        if tag =='div' and [('class', 'selectdates')] == attrs:
            self.stack = [(tag, set(attrs))]

    def handle_endtag(self, tag):
        """
        handle_endtag()
         - FUNCTION THAT HANDLES THE END TAGS IN THE HTML
         - START POPPING TAGS FROM THE STACK ONCE THE PROPER START TAG HAS BEEN FOUND
        """
        if len(self.stack) > 0 :
            stackTag = ''
            # tags for valid html are either opened and closed or only started such as <img> <br>
            # if tags are not closed they are popped off the stack until an opening tag matches the current end tag
            while stackTag != tag:
                (stackTag, attrs) = self.stack.pop()

class gameParser(HTMLParser):
    """
    gameParser
    CLASS THAT PARSES ESPN PAGES TO EXTRACT GAME SCORES
    FOR MORE INFO ON THE CLASS OR FUNCTIONS SEE HTMLPARSER CLASS
    """
    def __init__(self, opts):
        self.opts = opts
        HTMLParser.__init__(self)
        
    def feed(self, data, urlOpt):
        """
        feed()
         - FUNCTION THAT RESETS VARIABLES OF URLPARSER CLASS
         - INPUT:
             - DATA TO BE PARSED
             - URLOPTIONS WITH WHICH THE DATA WAS RETREIVED 
         - OUPUTS: NONE         
        """
        self.curGameID = ''
        self.Q = 1
        self.stack = []
        self.attrCache= {}
        self.date = None
        self.games = {}
        self.inTeamNameAway = False
        self.inTeamNameHome = False
        self.inTeamName = False
        self.team = ''
        self.urlOpt = urlOpt
        HTMLParser.feed(self, data)
    
    def handle_starttag(self, tag, attrs):
        """
        handle_starttag()
         - FUNCTION THAT HANDLES THE START TAGS IN THE HTML
         - START PUSHING TAGS ON THE STACK ONCE THE PROPER START TAG HAS BEEN FOUND
         - SIGNALS THAT WE ARE IN THE TEAMNAMES PORTION OF THE HTML
        """
        if len(self.stack) > 0 :
            #attrs = set(attrs)
            self.stack.append((tag, attrs))
            if tag == 'span' and self.curGameID != '' and len(attrs)>0:
                if self.attrCache['aTeamName'] in set(attrs):
                    self.inTeamName = True
                    self.team = 'away'
                if self.attrCache['hTeamName'] in set(attrs):
                    self.inTeamName = True
                    self.team = 'home'
                                     
        # start builing the stack from this tag    
        if tag =='div' and [('class', 'span-4')] == attrs:
            self.stack.append((tag, attrs))
            
    def handle_endtag(self, tag):
        """
        handle_endtag()
         - FUNCTION THAT HANDLES THE END TAGS IN THE HTML
         - START POPPING TAGS FROM THE STACK ONCE THE PROPER START TAG HAS BEEN FOUND
        """
        if len(self.stack) > 0 :
            stackTag = ''
            # tags for valid html are either stared and ended or only started such as <img> <br>
            # if tags are only opened they are popped off the stack till a opening tag matches the current end tag
            while stackTag != tag:
                (stackTag, attrs) = self.stack.pop()

            #if it was False it stays false, if it was true the data has been processed therefore can be False.
            self.inTeamName = False

    def handle_entityref(self, name):
        """
        handle_entityref()
        - FUNCTION THAT IS USED TO HANDLE DATA PORTION WITH AN & SUCH AS "TEXAS A&M"
        """
        if len(self.stack) > 0:
            if self.inTeamName and not name == 'nbsp':
                self.games[self.curGameID][self.team]['name'] += '&' + name

    def handle_data(self, data):
        """
        handle_data()
         - FUNCTION THAT HANDLES THE DATA IN BETWEEN TAGS IN THE HTML
         - EXTRACTS:
             - TEAM NAMES
             - GAME DATES
             - GAME IDs
             - GAME STATUS : FINAL etc.
             - TEAM CONFERENCE
             - SCORES QUARTER1,2,3,4 OT(Q5) FINAL
        """
        # TEAM NAME
        if self.inTeamName:
            self.games[self.curGameID][self.team]['name'] += data
        if len(self.stack) > 0 :
                # DATE
                if self.stack[-1][0] == 'h4' and ('class', 'games-date') in set(self.stack[-1][1]):
                    if data == "Today's Games":
                        self.date = time.localtime() # this should be EST but is ok for now
                    else:
                        self.date = time.strptime(data, '%A, %B %d %Y')
                # GAME ID, prepare strings that use this game ID and store info that is know at this point
                elif self.stack[-1][0] == 'span' and ('class', 'sort') in set(self.stack[-1][1]):
                    logging.debug("Game id :", data)
                    self.curGameID = int(data)
                    self.attrCache['TeamName'] = str(self.curGameID) + '-[ah]TeamName'
                    self.attrCache['aTeamName'] = ('id', str(self.curGameID) + '-aTeamName')
                    self.attrCache['hTeamName'] = ('id', str(self.curGameID) + '-hTeamName')
                    self.attrCache['Record'] = str(self.curGameID) + '-[ah]Record'
                    self.attrCache['status'] = ('id', str(self.curGameID) + '-statusText')
                    self.attrCache['Total'] = str(self.curGameID) + '-[ah]Total'
                    self.attrCache['Score'] = str(self.curGameID) + '-[ah]Score'
                    self.games[self.curGameID] = {'date': self.date, 'away':{'name': ''}, 'home':{'name': ''}}
                    self.games[self.curGameID]['seasonYear'] = self.urlOpt['seasonYear']
                    self.games[self.curGameID]['seasonType'] = self.urlOpt['seasonType']
                    self.games[self.curGameID]['seasonWeek'] = self.urlOpt['weekNumber']
                elif self.stack[-1][0] == 'p' and len(self.stack[-1][1]) > 0:
                    # GAME STATUS
                    if self.attrCache['status'] in set(self.stack[-1][1]):
                        self.games[self.curGameID]['status']=data
                    # TEAM CONFERENCE
                    elif re.match(self.attrCache['Record'], self.stack[-1][1][0][1]):
                        self.games[self.curGameID][self.team]['conference']= data.split(' ', 2)[-1][:-1]    
                elif self.stack[-1][0] == 'li' and len(self.stack[-1][1]) > 0:
                    # SCORE QUARTERS + OT
                    if re.match(self.attrCache['Score']+str(self.Q), self.stack[-1][1][0][1]):
                        self.games[self.curGameID][self.team]['Q'+str(self.Q)] = data  
                        self.Q += 1
                    # SCORE FINAL
                    elif re.match(self.attrCache['Total'], self.stack[-1][1][-1][1]) :
                        self.games[self.curGameID][self.team]['final'] = int(data)  
                        self.Q = 1

class putter(threading.Thread):
    """
    putter
     - CLASS THAT PROVIDES A THREAD TO PUT EXTRACTED GAME INFO INTO A CHOSEN STRUCTURE    
    """
    def __init__(self, opts, q, lock):
        """
        init()
        - FUNCTION THAT INITIALIZES THE PUTTER
        - INPUT:
            - OPTS, OPTIONS OF THE PROGRAM
            - Q, QUEUE THAT IS FILLED WITH GAMEINFO EXTRACTED FROM HTML
        """
        self.opts = opts
        self.q = q
        self.qNeo = None
        self.qIgraph = None
        threading.Thread.__init__(self)
        self.name = 'putter'
        self.lock = lock
        self.lock.acquire()
        
        if opts.neo:
            self.qNeo = Queue()
            # START NEO PUTTER THREAD (only 1 because else deadlocks can occur in eo4j)
            self.neoThread = neoPutter(opts, self.qNeo)
            self.neoThread.setDaemon(True)
            self.neoThread.start()

        if opts.igraph:
            self.qIgraph = Queue()
            # START IGRAPH PUTTER THREAD 
            self.igraphThread = igraphPutter(opts, self.qIgraph)
            self.igraphThread.setDaemon(True)
            self.igraphThread.start()
            
    def run(self):
        """
        run()
         - FUNCTION THAT TAKES GAME INFO AND PUTS IT INTO A CHOSEN STRUCTURE
        """
        logging.info("*** Starting putter thread ***")
        
        # Thread Loop
        while True:
            
            # Get item to process from queue
            games = self.q.get()
            numGames = len(games)-1
            
            # Store on queue, to be put into Neo4j
            if self.opts.neo:
                self.qNeo.put(games)

            # Store on queue, to be put into Neo4j
            if self.opts.igraph:
                self.qIgraph.put(games)
                
            self.q.task_done()
                
    def halt(self):
        # block until qNeo is empty and the NeoPutter thread is done
        if self.opts.neo:
            self.qNeo.join()
        if self.opts.igraph:
            self.qIgraph.join()
            # remove the initialization relation and node
            self.igraphThread.g.delete_edges([(0,0)])
            self.igraphThread.g.delete_vertices([0])
            try :
                logging.info("*** Writing graphml file ***")
                self.igraphThread.g.write_graphml('ncaafb.graphml')
            except Exception as e :
                logging.warning("### Writing files failed ###")
                logging.warning("Exception: " + str(type(e)) +" "+str(e))
            finally :
                time.sleep(5) # wait for the iGraph to be written to file
                self.lock.release()
        
                    


class igraphPutter(threading.Thread):
    """
    neoPutter
     - CLASS THAT PROVIDES A THREAD TO PUT EXTRACTED GAME INFO INTO AN IGRAPH STRUCTURE AND SAVE AS GRAPHML
    """
    def __init__(self, opts, q):
        """
        init()
        - FUNCTION THAT INITIALIZES THE IGRAPHPUTTER
        - INPUT:
            - OPTS, OPTIONS OF THE PROGRAM
            - Q, QUEUE THAT IS FILLED WITH GAMEINFO EXTRACTED FROM HTML
        """
        self.opts = opts
        self.q = q
        # initialize graph with 1 node and 1 relation
        self.g = Graph(n = 1,
            edges = [(0,0)],
            directed = True,
            vertex_attrs = {'type': [None],
                            'name': [None]
            },
            edge_attrs = {'type': [None],
                'name': [None],
                'year': [None],
                'gameID': [None],
                'away-Score': [None],
                'home-Score': [None],
                'winner': [None],
                'delta-Score': [None],
                'seasonWeek': [None],
                'seasonType': [None],
                'weight': [None]
            }
        )
        self.names = {} # We won't delete vertices therefore we can keep this name to idx mapping
        threading.Thread.__init__(self)
        self.name = 'igraphPut'

    def run(self):
        """
        run()
         - FUNCTION THAT TAKES GAME INFO AND PUTS IT INTO A NEO4J DATABASE
        """    
        logging.info("*** Starting igraph putter thread ***")
        
        # Thread Loop
        while True:
            
            # Get item to process from queue
            games = self.q.get()
            numGames = len(games)-1
            
            # Process all games from a html page
            while len(games) > 0:
                (game, gameInfos) = games.popitem()
                if len(games) == numGames:
                    logging.info('*** Storing games, year: '+str(gameInfos['seasonYear'])+', week: '+str(gameInfos['seasonWeek'])+', type: '+str(gameInfos['seasonType'])+' ***'  )
                logging.debug("*** Storing game in iGraph ***")
                logging.debug("Game: " + str(game))
                #Only process finished games and games of type 2, 3 1=pre-season, 2=regular season 3=bowls, 4=pro-games (1 and 4 usually gives wrong results)
                if gameInfos['status'] == 'Final' and gameInfos['seasonType'] <= 3:

                    # AWAY AND HOME TEAM
                    for team in [gameInfos['away'], gameInfos['home']]:
            
                        # TEAM NODE
                        if self.names.has_key(team['name']):
                            nt = self.names[team['name']]
                        else :
                            nt = self.g.vcount()
                            self.g.add_vertex(name=team['name'])
                            self.names[team['name']] = nt
                            self.g.vs[nt]['type'] = 'team'
                            #self.g.vs[nt]['name'] = team['name']

                        if 'conference' in team :
                            # sometimes the conferences are filled with home or away, skip these 
                            if 'away' != team['conference'] and 'home' != team['conference']:

                                # CONFERENCE NODE
                                if self.names.has_key(team['conference']):
                                    nc = self.names[team['conference']]
                                else:
                                    nc = self.g.vcount()
                                    self.g.add_vertex(name=team['conference'])
                                    self.names[team['conference']] = nc
                                    self.g.vs[nc]['type']='conference'
                                    #self.g.vs[nc]['name']=team['conference']

                            # TEAM - CONFERENCE RELATION
                                relName = str(gameInfos['seasonYear']) +'-'+ team['name'] +'-'+ team['conference']       
                                if self.names.has_key(relName):
                                    rel_tc = self.names[relName]
                                else:
                                    rel_tc = self.g.ecount()
                                    self.g.add_edge(nt, nc)
                                    self.names[relName] = rel_tc
                                    self.g.es[rel_tc]['type'] = 'memberOf'
                                    self.g.es[rel_tc]['year'] = gameInfos['seasonYear']
                                    self.g.es[rel_tc]['name'] = relName

                            
                    # GAME RELATION
                    winlose = []
                    if gameInfos['away']['final'] >= gameInfos['home']['final']:
                        winlose.append((gameInfos['away']['name'], gameInfos['home']['name']))
                    if gameInfos['away']['final'] <= gameInfos['home']['final']:
                        winlose.append((gameInfos['home']['name'],gameInfos['away']['name']))

                    for (win, lose) in winlose:
                        rel_lose_win = nch = self.g.ecount()
                        self.g.add_edge(lose, win)
                        self.g.es[rel_lose_win]['gameID'] =  game
                        self.g.es[rel_lose_win]['away'] = gameInfos['away']['name']
                        self.g.es[rel_lose_win]['home'] = gameInfos['home']['name']
                        self.g.es[rel_lose_win]['name'] = gameInfos['away']['name'] + '@' + gameInfos['home']['name']
                        self.g.es[rel_lose_win]['type'] = 'game'
                        self.g.es[rel_lose_win]['away-Score'] = gameInfos['away']['final']
                        self.g.es[rel_lose_win]['home-Score'] = gameInfos['home']['final']
                        self.g.es[rel_lose_win]['winner'] = win
                        self.g.es[rel_lose_win]['delta-Score'] = abs(gameInfos['home']['final']-gameInfos['away']['final'])
                        self.g.es[rel_lose_win]['year'] = gameInfos['seasonYear']
                        self.g.es[rel_lose_win]['seasonWeek'] = gameInfos['seasonWeek']
                        self.g.es[rel_lose_win]['seasonType'] = gameInfos['seasonType']
                        self.g.es[rel_lose_win]['weight'] = self.g.es[rel_lose_win]['delta-Score'] 

            self.q.task_done()
    
    
## CAN BE IMPROVED WITH NEOBATCHES SOMETIME
class neoPutter(threading.Thread):
    """
    neoPutter
     - CLASS THAT PROVIDES A THREAD TO PUT EXTRACTED GAME INFO INTO A NEO4J GRAPH DATABASE
         USING MULTIPLE OF THESE THREADS CAN CAUSE NEO TO GIVE DEADLOCK WARNINGS USE ONLY 1    
    """
    def __init__(self, opts, q):
        """
        init()
        - FUNCTION THAT INITIALIZES THE NEOPUTTER
        - INPUT:
            - OPTS, OPTIONS OF THE PROGRAM
            - Q, QUEUE THAT IS FILLED WITH GAMEINFO EXTRACTED FROM HTML
        """
        self.opts = opts
        self.q = q
        threading.Thread.__init__(self)
        self.name = 'NeoPut'

    def run(self):
        """
        run()
         - FUNCTION THAT TAKES GAME INFO AND PUTS IT INTO A NEO4J DATABASE
        """    
        logging.info("*** Starting neo putter thread ***")

        #Get Neo indexes
        teamsIdx = self.opts.gdb.get_or_create_index(neo4j.Node, "Teams")
        conferencesIdx = self.opts.gdb.get_or_create_index(neo4j.Node, "Conferences")
        gamesIdx = self.opts.gdb.get_or_create_index(neo4j.Relationship, "Games")
        membersIdx = self.opts.gdb.get_or_create_index(neo4j.Relationship, "Members")
    
        # Thread Loop
        while True:
            
            # Get item to process from queue
            games = self.q.get()
            numGames = len(games)-1
            
            # Process all games from a html page
            while len(games) > 0:
                (game, gameInfos) = games.popitem()
                if len(games) == numGames:
                    logging.info('*** Storing games, year: '+str(gameInfos['seasonYear'])+', week: '+str(gameInfos['seasonWeek'])+' ***'  )
                logging.debug("*** Storing game in Neo4J Graph database ***")
                logging.debug("Game: " + str(game))
                #Only process finished games
                if gameInfos['status'] == 'Final':
                    ta = gameInfos['away']
                    th = gameInfos['home']

                    # AWAY TEAM NODE + RELATION
                    nta = teamsIdx.get_or_create('Teams', ta['name'], {'type': 'team', 'name': ta['name']} )
                    if 'conference' in ta :
                        # sometimes the conferences are filled with home or away, skip these 
                        if 'away' != ta['conference'] and 'home' != ta['conference']:
                            nca = conferencesIdx.get_or_create('Conferences', ta['conference'], {'type': 'conference', 'name': ta['conference']})
                            rel_ta_ca = membersIdx.get_or_create('Members', str(gameInfos['seasonYear']) +'-'+ ta['name'] +'-'+ ta['conference'], (nta, 'memberOf', nca) )
                            rel_ta_ca.set_properties({'year': gameInfos['seasonYear'],
                                'name': str(gameInfos['seasonYear']) +'-'+ ta['name'] +'-'+ ta['conference']})

                    # HOME TEAM NODE + RELATION
                    nth = teamsIdx.get_or_create('Teams', th['name'], {'type': 'team', 'name': th['name']} )
                    if 'conference' in th :
                        # sometimes the conferences are filled with home or away, skip these                                         
                        if 'away' != th['conference'] and 'home' != th['conference']:
                            nch = conferencesIdx.get_or_create('Conferences', th['conference'], {'type': 'conference', 'name': th['conference']})
                            #rel_th_ch = membersIdx.get_or_create('Members', time.strftime('%Y',gameInfos['date'])+'-'+ta['name'], (nth, 'memberOf', nch) )
                            rel_th_ch = membersIdx.get_or_create('Members', str(gameInfos['seasonYear']) +'-'+ th['name'] +'-'+ th['conference'], (nth, 'memberOf', nch) )
                            rel_th_ch.set_properties({'year': int(time.strftime('%Y',gameInfos['date'])),
                                'name': str(gameInfos['seasonYear']) +'-'+ th['name'] +'-'+ th['conference']})

                    # HOME TEAM WON GAME
                    if th['final'] >= ta['final']:
                        rel_ta_th = gamesIdx.get_or_create('Games', game, (nta, 'Played', nth) )
                        rel_ta_th.set_properties({'gameID': game,
                            'name': ta['name'] + '@' + th['name'],
                            'type': 'game',
                            'year': int(time.strftime('%Y',gameInfos['date'])),
                            'week': int(time.strftime('%W',gameInfos['date'])),
                            'away-Score': ta['final'],
                            'home-Score': th['final'],
                            'winner': th['name'],
                            'delta-Score': th['final']-ta['final'],
                            'seasonYear': gameInfos['seasonYear'],
                            'seasonWeek': gameInfos['seasonWeek'],
                            'seasonType': gameInfos['seasonType']}
                        )
                        
                    # AWAY TEAM WON GAME
                    if ta['final'] >= th['final']:
                        rel_th_ta = gamesIdx.get_or_create('Games', game, (nth, 'Played', nta) )
                        rel_th_ta.set_properties({'gameID': game,
                            'name': ta['name'] + '@' + th['name'],
                            'type': 'game',
                            'year': int(time.strftime('%Y',gameInfos['date'])),
                            'week': int(time.strftime('%W',gameInfos['date'])),
                            'away-Score': ta['final'],
                            'home-Score': th['final'],
                            'winner': ta['name'],
                            'delta-Score': ta['final']-th['final'],
                            'seasonYear': gameInfos['seasonYear'],
                            'seasonWeek': gameInfos['seasonWeek'],
                            'seasonType': gameInfos['seasonType']}
                        )
            self.q.task_done()
            
class gamesGetter(threading.Thread):
    """
    gamesGetter
     - CLASS THAT PROVIDES A THREAD TO GET HTML FROM FILE OR URL
         AND PARSES THIS HTML TO EXTRACT GAME INFORMATION
         AND PARSES THIS HTML TO EXTRACT URL INFORMATION
         
    """
    def __init__(self, opts, qIn, qOut):
        """
        init()
        - FUNCTION THAT INITIALIZES THE HTMLGETTER
        - INPUT:
            - OPTS, OPTIONS OF THE PROGRAM
            - QIN, QUEUE WITH URLOPTS USED TO CREAT URL TO DOWNLOAD
            - QOUT, QUEUE THAT GETS FILLED WITH GAMEINFO EXTRACTED FROM HTML
        """
        self.opts = opts
        self.qIn = qIn
        self.qOut = qOut
        self.gameP = gameParser(opts)
        self.urlP = urlParser()
        threading.Thread.__init__(self)

    def run(self):
        """
        run()
         - FUNCTION THAT GETS HTML, PARSES HTML FOR GAME INFORMATION AND URL INFORMATION
        """    
        logging.info("*** Starting html getter thread ***")
        while True:
            urlOpt = self.qIn.get()
            try:
                # GET HTML, PARSE HTML, PUT GAMES INFO ON QOUT
                html = self.getPage(urlOpt)
                self.gameP.feed(html, urlOpt)
                self.qOut.put(self.gameP.games)
                
                # IF ALL WEEKS NEED TO BE PARSED, AND THIS IS THE 1ST WEEK PARSE FOR URL INFO
                if self.opts.week == 0 and urlOpt['weekNumber'] == 1 and urlOpt['seasonType'] == 2:
                    self.urlP.feed(html)
                    newUrlOpts = self.urlP.urlOpts[1:]
                    for newUrlOpt in newUrlOpts:
                        self.qIn.put(newUrlOpt)
                        
            except Exception as e :
                logging.warning("### Downloading failed, retrying later ###")
                logging.warning("Exception: " + str(type(e)) +" "+str(e))
                # put the failed opts back on the stack (try downloading again)
                self.qIn.put(urlOpt)
            finally :
                self.qIn.task_done()

    def getPage(self, urlOpt):
        """
        getPage()
         - FUNCTION THAT GETS HTML FROM FILE OR URL
         - INPUT:
             - URLOPT, DICT WITH OPTIONS TO BUILD URL STRING
        """  
        if self.opts.file is not None:
            # GET HTML FROM FILE
            logging.info("*** Trying to read html from file ***")
            logging.info("File: " + self.opts.file)
                
            f = open(self.opts.file, 'r')
            html = f.read()
            f.close()
        else:
            # BUILD URL STRING FROM URL OPTIONS
            url = URL + '?confId=' + str(urlOpt['confId']) + '&seasonYear=' + str(urlOpt['seasonYear']) + '&seasonType='+str(urlOpt['seasonType'])+'&weekNumber=' + str(urlOpt['weekNumber'])
            logging.info("*** Trying to read html from url ***")
            logging.info("Url: " + url)
                    
            # GET THE URL DATA
            headers = { 'User-Agent' : 'Mozilla/5.0' } # need to spoof user agent or get different html 
            req = urllib2.Request(url, None, headers)
            f = urllib2.urlopen(req)
            html = f.read()
            f.close()
            logging.info('Downloaded: %s bytes' % len(html))    

        return html                
    
def cmdLineOptionParser():
    """
        cmdLineOptionParser()
         - PARSES THE COMMAND LINE ARGUMENTS
         - INPUT:  NONE
         - OUPUTS: NONE
    """
    # CREATE OUR USAGE REPSONSE 
    usage = ("%prog [options]",__doc__)

    usage = "\n".join(usage)
    
    # CREATE OUR COMMAND LINE PARSER
    cmdparser = optparse.OptionParser(usage)
    
    # ADD OPTIONS
    cmdparser.add_option('-f', '--file',
        dest='file',
        help='load html file for processing',
        metavar='FILE',
        default=None                 
    )
    cmdparser.add_option('-t', '--threads',
        dest='numThreads',
        help="Number of downloading threads",
        metavar = 'NUMTHREADS',
        type='int',
        default=5
    )
    cmdparser.add_option('-v', '--verbosity',
        dest='verbosity',
        help="Set verbosity level, 1=Critical, 2=Error, 3=Warning, 4=Info, 5=Debug",
        metavar = 'LEVEL',
        type='int',
        default="3"
    )
    cmdparser.add_option('-i', '--igraph',
        action='store_true',
        dest='igraph',
        help="Store games in an iGraph graphml file",
        default=True
    )
    cmdparser.add_option('-n', '--neo4j',
        action='store_true',
        dest='neo',
        help="Store games in a Neo4j database",
        default=False
    )
    cmdparser.add_option('-c', '--clear',
        action='store_true',
        dest='clear',
        help="Clear Neo4j database",
        default=False
    )
    cmdparser.add_option('-y', '--years', 
        help="Specify the year to process: 2003, 2004,...,current",
        dest='years',
        type='int',
        default="0"
    )
    cmdparser.add_option('-w', '--week', 
        help="Specify the Week to process: 1,2,...,20",
        dest='week',
        type='int',
        default="0"
    )
    
    # RETURN THE PARSER
    return cmdparser

def initOpts(opts):
    """
        verifyOpts()
         - FUNCTION TO VERIFY THE COMMAND LINE ARGUMENTS FOR VALIDITY
         - INPUT:  opts - COMMAND LINE OPTIONS
         - OUPUTS: rtnval - True IF GOOD, False IF VALIDITY CHECK FAILED
    """
    # CHECK VERBOSITY
    if opts.verbosity == 1:
        opts.level = logging.CRITICAL
    elif opts.verbosity == 2:
        opts.level = logging.ERROR
    elif opts.verbosity == 3:
        opts.level = logging.WARNING
    elif opts.verbosity == 4:
        opts.level = logging.INFO
    elif opts.verbosity == 5:
        opts.level = logging.DEBUG
    else:
        logging.error("*** INVALID VERBOSITY LEVEL SPECIFIED ***")
        logging.error("Level: ", str(opts.verbosity))
        return False

    # START LOGGING
    # CREATE LOGGER
    logger = logging.getLogger()                
    #logger.removeHandler(logger.handlers[0])    # remove stdErr from root logger
    logger.setLevel(opts.level)
    # CREATE CONSOLE HANDLER 
    ch = logging.StreamHandler(sys.stdout)
    ch.setLevel(opts.level)
    # ADD FORMATTER TO HANDLER
    ch.setFormatter(logging.Formatter('%(threadName)-10s %(levelname)-8s %(message)s'))
    # ADD HANDLER TO LOGGER
    logger.addHandler(ch)

    logging.info("*** Initializing command line arguments ***")
    logging.info("*** Logging Level: " + logging.getLevelName(logger.getEffectiveLevel()))
    #TODO ADD LOGLEVEL
        
    # CHECK WEEK
    if opts.week not in WEEKS:
        logging.error("*** INVALID WEEK SPECIFIED ***")
        logging.error("Week: "+ str(opts.week))
        return False
    logging.info("Week: " + str(opts.week))

    # CHECK YEAR
    if opts.years not in YEARS:
        logging.error("*** INVALID YEAR SPECIFIED ***")
        logging.error("Year: "+ str(opts.years))
        return False
    elif opts.years == 0:
        opts.years = YEARS[1:]
    else:
        opts.years = [opts.years]
    logging.info("Years: "+ str(opts.years))
    
    # CHECK FILE
    if not opts.file is None:
        if not os.path.exists(opts.file) :
            logging.debug("*** FILE DOES NOT EXIST ***")
            logging.debug("File: "+ opts.file)
            return False
        opts.week = -1
        opts.years = [0]
        logging.info("File: "+ opts.file)
        
    # CONNECT NEO4J
    if opts.neo:
        logging.info("*** Connecting to Neo4j ***")
        try:
            opts.gdb = neo4j.GraphDatabaseService(NEO4JCONNECT)
        except Exception as e:
            logging.info(e)
            logging.info("*** Error connecting to Neo4j ***")
            return False
    
        # CLEARING NEO4J
        if opts.clear:
            logging.info("*** Clearing Neo4j Database ***")
            opts.gdb.clear()
    
    # RETURN TO MAIN
    return True

def Main(argv):
    """
        Main()
         - MAIN SCRIPT FUNCTION THAT WILL GET FACEBOOK FEED INFORMATION
         - INPUT:  argv - COMMAND LINE ARGUMENTS
         - OUPUTS: NONE
    """  
    # FIGURE OUT COMMAND LINE ARGUMENTS
    cmdparser = cmdLineOptionParser()
    opts, args = cmdparser.parse_args(argv)

    # VERIFY THE OPTIONS
    if not initOpts(opts):
        logging.info("*** SCRIPT INITIALIZATION ERROR EXITING ***")
        sys.exit(1)
        
    # START DOING THE WORK
    logging.info("*** start scraping ***")

    # INITIALIZE
    week = 1 if opts.week == 0 else opts.week
    qJobs = Queue()  # Queue with url options to process
    qGames = Queue() # Queue with game info downloaded from url 
    for y in opts.years:
        qJobs.put({'weekNumber': week, 'seasonType': 2, 'seasonYear': y, 'confId': 80} )

    # START HTML GETTER THREADS
    for i in range(opts.numThreads):
        t = gamesGetter(opts, qJobs, qGames)
        t.setDaemon(True)
        t.start()

    # CREATE A LOCK TO MAKE SURE THE PUTTERTHREAD WILL FINISH
    lock = threading.Lock()
    # START PUTTER THREAD 
    putterThread = putter(opts, qGames, lock)
    putterThread.setDaemon(True)
    putterThread.start()    

    # JOIN THEN EXIT
    logging.info("*** Waiting until threads are finished ***")
    qJobs.join()  # block until qJobs is empty and therefore the htmlGetter tasks are done
    qGames.join() # block until qGames is empty and the NeoPutter thread is done
    putterThread.halt() # the putter threads may finish now
    lock.acquire() # wait untill the putter thread finishesh, thus releasing it's lock
    #time.sleep(5) # to make sure the write has time to finish properly

    # NO NEED TO CLEAN UP THREADS BECAUSE THEY ARE DAEMONIZED
        
def MyMain(argv):
    #Main(['-c', '-v', '4', '-f', 'espn1.html'])
    #Main(['-d', '-f', 'espn2.html'])
    #Main(['-d', '-f', 'my.html'])
    #Main(['-c', '-v', '4', '-w', '7', '-t', '10'])
    #Main(['-y', '2011', '-c', '-w','4','-v', '4'])
    Main(['-c', '-v', '4',  '-y', '2006'])
    #cProfile.run('Main(["-y", "2003"])','fooprof')
    #p = pstats.Stats('fooprof')
    #p.strip_dirs()
    #p.sort_stats('cumulative').print_stats(50)
    Main(['-v','4', '-c'])
    
if __name__ == "__main__":
    if len(sys.argv[1:]) == 0:
        MyMain(sys.argv[1:])
    else:
        Main(sys.argv[1:])
        
