#
# This module defines and registers Pyramid application event handlers.
#
import cPickle as pickle 
from functools import partial
import json
import logging
import os
import tempfile
import types
from pyramid.events import ApplicationCreated
from pyramid.events import subscriber
from bbkb import mturk
from bbkb.KnowledgeBase import KnowledgeBase, Item, _id_generator
from bbkb.KnowledgeBase import _id_generator

kb = KnowledgeBase

log = logging.getLogger(__name__)
here = os.path.dirname(os.path.abspath(__file__))
data_root = os.path.join(here, 'data')
log.debug('load_data_on_startup from data directory: %s', data_root)

loaded_data = False

#temp lookup table only used on import for finding player KB id from playerid
pID_LT_temp = {}

# Years of Data to Load for player stats
loadYears = ['2010','2009','2008','2007','2006','2005','2004','2003','2002','2001','2000']
ignoreStats = ['playerid','yearid','lgid','teamid','gidp','g_old','lahmanid','lahman40id','lahman45id','retroid','holtzid','bbrefid']

@subscriber(ApplicationCreated)
def load_data_on_startup(event):
    """Loads data from JSON files into knowledge base"""
    global loaded_data
    print '************************* LOADING INITIAL STARTUP DATA *****************************'
    if loaded_data is True:
        #raise "ALREADY LOADED DATA"  -- Not sure why we are getting called twice?
        return
    else:
        loaded_data = True
    
    # Check whether we can load from serialized form or not
    #ser_data = os.path.join(tempfile.gettempdir(), 'bbkb.dat')
    ser_data = 'bbkb.dat'
    log.info("BBKB pickle file is %s", ser_data)
    if os.path.exists(ser_data):
        log.info('Loading BBKB data from serialized data file')
        with open(ser_data,'rb') as data_file:
            data = pickle.load(data_file)
            kb.load_state(data)
    else:
        log.info('Loading BBKB data then creating serialized data file')
        _load_all_leagues()
        _persist_kb(kb, ser_data)
    
    # Load MTurk
    for f in mturk.list_files():
        mturk.load_surveys(f)
    print '************************* FINISHED INITIAL STARTUP DATA *****************************'

def _persist_kb(kb, ser_data='bbkb.dat'):
    with open(ser_data,'wb') as data_file:
        data = kb.dump_state()
        pickle.dump(data, data_file)

def _load_all_leagues():
    curry_join = partial(os.path.join, data_root)
    league_files = map(curry_join,['AmericanLeague.js', 'NationalLeague.js'])
    map(_load_league, league_files)
    # TODO: Handle Players
    _load_players()
    

def _load_league(league_filename):
    teams = {}
    with open(league_filename,'r') as league_file:
        raw_data = league_file.read()
        league_json = json.loads(raw_data)
        league = kb.add(Item('league'), Item('name', league_json['league']))[0]
        for conference_json in league_json['conferences']:
            conference = kb.add(Item('conference'), Item('name',conference_json['conference']))[0]
            kb.add(league, conference)
            for team_json in conference_json['teams']:
                (team, team_shortcode) = _load_team(conference, team_json)
                teams[team_shortcode] = team


def _load_team(conference, team_json):
    team = kb.add(Item('team'))[0]
    kb.add(conference, team)
    _convert_jsonmap_to_items(team, team_json, ['league', 'division'])
    return (team, team_json['shortcode'])


def _load_players():
    log.info('Loading Players')
    playerscore_filename = os.path.join(data_root, 'playersCore.js')
    with open(playerscore_filename, 'r') as playerscore_file:
        raw_data = playerscore_file.read()
        core_json = json.loads(raw_data)
        for player_json in core_json:
            # TODO store playeritem/id in lookup map
            player = kb.add(Item('player'))[0]
            #Temp lookup table for bringing in other stats tied to playerid
            pID_LT_temp[player_json['playerid']] = player
            _convert_jsonmap_to_items(player, player_json,ignoreStats)
            # TODO hookup team with player?

    #Real Load Stats location   
    _load_batting_stats()
    _load_pitching_stats()
    _load_salary_stats()
    _load_fielding_stats()
    
	
def _load_batting_stats():
    log.info('Loading batting stats')
    for yearFile in loadYears:
        fileName = 'batting/batting' + yearFile + '.js'
        log.info('Loading:' + fileName)
        stat_filename = os.path.join(data_root, fileName)
        with open(stat_filename, 'r') as stat_file:
            raw_data = stat_file.read()
            core_json = json.loads(raw_data)
            for stat_json in core_json:
                # Get player's kb_id from when they were added 
                player_kb_id = pID_LT_temp[stat_json['playerid']]
                _convert_jsonmap_to_items_multilevel(player_kb_id, stat_json,yearFile,'b_',ignoreStats)               
                
def _load_pitching_stats():            
    log.info('Loading pitching stats')
    for yearFile in loadYears:
        fileName = 'pitching/pitching' + yearFile + '.js'
        log.info('Loading:' + fileName)
        stat_filename = os.path.join(data_root, fileName)
        with open(stat_filename, 'r') as stat_file:
            raw_data = stat_file.read()
            core_json = json.loads(raw_data)
            for stat_json in core_json:
                # Get player's kb_id from when they were added 
                
                player_kb_id = pID_LT_temp[stat_json['playerid']]
                _convert_jsonmap_to_items_multilevel(player_kb_id, stat_json,yearFile,'p_',ignoreStats)

def _load_salary_stats():            
    log.info('Loading salary stats')
    for yearFile in loadYears:
        fileName = 'salary/salary' + yearFile + '.js'
        log.info('Loading:' + fileName)
        stat_filename = os.path.join(data_root, fileName)
        with open(stat_filename, 'r') as stat_file:
            raw_data = stat_file.read()
            core_json = json.loads(raw_data)
            for stat_json in core_json:
                # Get player's kb_id from when they were added 
                
                player_kb_id = pID_LT_temp[stat_json['playerid']]
                _convert_jsonmap_to_items_multilevel(player_kb_id, stat_json,yearFile,'s_',ignoreStats)                
   
def _load_fielding_stats():            
    log.info('Loading fielding stats')
    for yearFile in loadYears:
        fileName = 'fielding/fielding' + yearFile + '.js'
        log.info('Loading:' + fileName)
        stat_filename = os.path.join(data_root, fileName)
        with open(stat_filename, 'r') as stat_file:
            raw_data = stat_file.read()
            core_json = json.loads(raw_data)
            for stat_json in core_json:
                # Get player's kb_id from when they were added 
                
                player_kb_id = pID_LT_temp[stat_json['playerid']]
                _convert_jsonmap_to_items_multilevel(player_kb_id, stat_json,yearFile,'f_',ignoreStats)     
    
def _convert_jsonmap_to_items(root, json_data, ignores=[]):
    """Converts JSON map to a list of Item objects associated with root
    root - base fact to associate to
    json_data - root JSON object containing key-values to convert
    ignroes - list of keys to skip
    """
    items = []
    for key in json_data:
        # skip division and league
        if key in ignores:
            continue
        for value in _convert_value(json_data[key]):
            if value is not None:
                items.append(Item(key, value))
    map(lambda i: kb.add(root, i), items)
    
    
def _convert_jsonmap_to_items_multilevel(root, json_data,year_input,stat_cat,ignores=[]):
    """Converts JSON map to a list of Item objects associated with root
    root - base fact to associate to
    json_data - root JSON object containing key-values to convert
    ignroes - list of keys to skip
    """
    #log.info('multilevel add')
    items = []
    for key in json_data:
        # skip division and league
        if key in ignores:
            continue
        for value in _convert_value(json_data[key]):
            if value is not None:
                items.append(Item(stat_cat + key, value))  

    newYear = Item('year',year_input)
                
    #map(lambda i: kb.add(root, newYear, Item('stat_category',stat_cat), i), items) 
    map(lambda i: kb.add(root, newYear, i), items)      


def _convert_value(value):
    """Utility method to convert collections and integers to appropriate types when loading data"""
    # Disregard empty associationts
    if value is None or len(value) == 0:
        return [None]
    # Handle typechecking
    if type(value) is types.ListType:
        value_list = []
        for v in value:
            value_list.extend(_convert_value(v))
        return value_list
    try:
        return [int(value)]
    except ValueError:
        return [value]
        


    
