from BeautifulSoup import BeautifulSoup
import urllib2
import sys, traceback
import time

BASE_URL = 'http://www.nfl.com'

TEAMS = sorted([
    "Baltimore Ravens",
    "Cincinnati Bengals",
    "Cleveland Browns",
    "Pittsburgh Steelers",
    "Buffalo Bills",
    "Miami Dolphins",
    "New England Patriots",
    "New York Jets",
    "Chicago Bears",
    "Detroit Lions",
    "Green Bay Packers",
    "Minnesota Vikings",
    "Dallas Cowboys",
    "New York Giants",
    "Philadelphia Eagles",
    "Washington Redskins",
    "Houston Texans",
    "Indianapolis Colts",
    "Jacksonville Jaguars",
    "Tennessee Titans",
    "Denver Broncos",
    "Kansas City Chiefs",
    "Oakland Raiders",
    "San Diego Chargers",
    "Atlanta Falcons",
    "Carolina Panthers",
    "New Orleans Saints",
    "Tampa Bay Buccaneers",
    "Arizona Cardinals",
    "San Francisco 49ers",
    "Seattle Seahawks",
    "St. Louis Rams"
        ])

def p(x):
    print x

class Player:
    lastName = ''
    firstName = ''
    position = ''
    number =  '' 

    def __init__(self, _lastName, _firstName, _position, _number):
        self.lastName = _lastName
        self.firstName = _firstName
        self.position = _position
        self.number = _number

    def __str__(self):
        return "%s %s %s, %s" % (self.position, self.number, self.lastName, self.firstName)

class Roster:
    players = []

    def __str__(self):
        return ",\n".join("   %s" % str(player) for player in self.players)

    def addPlayer(self, lastName, firstName, position, number):
        self.players.append(Player(lastName, firstName, position, number))

class Team:
    name = ''
    url = ''
    roster = Roster()

    def __init__(self, _name):
        self.name = _name

    def __str__(self):
        string = "%s - %s\n" % (self.name, self.url)
        string += "Roster:"
        string += str(self.roster)
        return string

    def parseRoster(self, soup):
        try:
            table = soup.find('table', id='result')

            rows = table.findAll('tr')
            rows.pop(0)

            for row in rows:
                cols = row.findAll('td')

                if len(cols) > 2:
                    position = cols[0].contents
                    number = cols[1].contents
                    (lastName, firstName) = cols[2].contents[0].contents[0].split(',')

                    self.roster.addPlayer(lastName, firstName, position, number)

        except:
            print "Unexpected error:", sys.exc_info()[0]
            traceback.print_exc(file=sys.stdout)

    def getRosterWithUrl(self, url):
        url = BASE_URL + self.url

        # Sleep to avoid hitting the server too hard
        time.sleep(3)

        try:
            response = urllib2.urlopen(url)

            html = response.read()

            soup = BeautifulSoup(html)

            self.parseRoster(soup)

            # Go to the next roster page if there is one.
            next_text = soup.find(text='next')

            if next_text:
                next_url = next_text.parent['href']

                self.getRosterWithUrl(self, next_url)

        except urllib2.URLError as (errno, strerror):
            print "URL error(%d): %d" % errno, strerror
        except:
            print "Unexpected error:", sys.exc_info()[0]

    def getRoster(self):
        self.getRosterWithUrl(self.url)

class TeamList:
    teams = []

    def __init__(self, teamList):
        self.teams = [Team(team) for team in teamList]

    def __str__(self):
        string = "Team List:\n"
        string += ",\n".join(["   %s" % str(team) for team in self.teams])
        return string

    def getTeamUrls(self):
        url = BASE_URL + '/players'

        try:
            response = urllib2.urlopen(url)

            html = response.read()

            soup = BeautifulSoup(html)

            for team in self.teams:
                # Finding all text elements with the team name, there will be 3, only one of which we want
                links_text = soup.findAll(text=team.name)
                links = [text.parent for text in links_text]

                # Find the element with an href that starts with '/players'
                team_url = [tag['href'] for tag in links 
                        if tag.has_key('href') and tag['href'].startswith('/players')][0] 

                team.url = team_url


        except urllib2.URLError as (errno, strerror):
            print "URL error(%d): %d" % errno, strerror
        except:
            print "Unexpected error:", sys.exc_info()[0]

    def getTeamRosters(self):
        for team in self.teams:
            team.getRoster()
            print team

class TeamScraper:
    teamList = None

    def __init__(self):
        self.teamList = TeamList(TEAMS)

    def scrape(self):
        self.teamList.getTeamUrls()

        self.teamList.getTeamRosters()


def main():
    teamScraper = TeamScraper()
    teamScraper.scrape()

if __name__ == "__main__":
    main()


