"""Routines to handle data fetching + upload.
"""

import csv
import logging
import os
import re
import urllib
import StringIO

from django.utils import simplejson as json
from google.appengine.api import urlfetch
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app

from google.appengine.ext import db

import data
import util

# replaces an entire user chunk with the new data.
def LoadChunk(bgguser, contentblock):
  """Replaces a user's game collection chunk entirely.

  The passed-in contentblock is chewed on to make sure it's in the
  required format and then stored as a chunk of csv lines into the
  data store.

  Any previous chunk for the user is deleted.
  """

  def Listify(list_string):
    """Converts the named field to a search list

    This has the form "1 ,2,3" => ":1:2:3:" where numbers can be easily found
    by substring searches for ":n:"
    """
    if not list_string or not list_string[0].isdigit():
      return ""

    numbers = map(util.PlayerCountInt, list_string.split(","))
    return ":" + ":".join(map(str, numbers)) + ":"


  datablob = StringIO.StringIO()
  if len(contentblock) > 0:

    # convert the incoming csvfile into canonical form.
    csv_out = csv.writer(datablob)

    # These are the headings we'll extract and keep from the input
    # csv file.
    useful_columns = [ ("objectname", "name"),
                       ("objectid", "id"),
                       ("rating", "rating"),
                       ("baverage", "avg_rating"),
                       ("avgweight", "weight"),
                       ("numplays", "num_plays"),
                       ("bggbestplayers", "best_with"),
                       ("bggrecplayers", "rec_with"),
                       ("minplayers", "min_players"),
                       ("maxplayers", "max_players"),
                       ("playingtime", "playing_time"),
                       ("rank", "rank"), ]

    in_columns = map(lambda x: x[0], useful_columns)
    out_columns = map(lambda x: x[1], useful_columns)

    csv_out.writerow(out_columns)

    count = 0
    headings = None
    if type(contentblock) is unicode:
      contentblock = contentblock.encode('utf-8')
    csv_reader = csv.reader(contentblock.split("\n"))

    for row in csv_reader:
      if not row or len(row) < 2 or row[0] == "#":
        continue
      if not headings:
        headings = row
        continue

      count += 1
      game_object = dict(zip(headings, row))
      out_row = []

      # skip unowned games.
      if "own" in game_object and game_object["own"] != "1":
        continue

      for col in in_columns:
        # if not col in game_object:
        #   out_row.append("")
        #   continue

        if col in [ "objectid", "numplays", "rank" ]:
          element = util.SafeInt(game_object[col])
        elif col in [ "rating", "baverage", "avgweight" ]:
          element = util.SafeFloat(game_object[col])
        elif col in [ "minplayers", "maxplayers", "playingtime" ]:
          element = util.PlayerCountInt(game_object[col])
        elif col in [ "objectname" ]:
          #element = unicode(game_object[col], "utf-8")
          element = game_object[col]
          if type(element) is unicode:
            element = element.encode("utf8")
        elif col in [ "bggbestplayers", "bggrecplayers" ]:
          element = Listify(game_object[col])
        else:
          element = game_object["COLUMN NOT HANDLED:" + col]

        # error handling?

        out_row.append(element)

      csv_out.writerow(out_row)

    # update the datastore.
  else:
    # handle the special case of a blank input chunk. This can indicate
    # a real user with no game records or a nonexistent BGG user.
    count = 0

  # write the new chunk or update the old one.
  chunk_query = data.GameChunk.all()
  chunk_query.filter("bgguser =", bgguser)
  res = chunk_query.fetch(1)

  if not res:
    chunk = data.GameChunk()
    chunk.bgguser = bgguser
  else:
    chunk = res[0]

  if chunk.csvdata and len(datablob.getvalue()) == 0:
    log.error("Not updating known user %s with empty data" % bgguser)
    count = -1
  else:
    chunk.csvdata = unicode(datablob.getvalue(), 'utf-8')
  chunk.put()

  util.ClearMissingRecord(bgguser)
  datablob.close()

  # we don't do anything further on the error case.
  if count < 0:
    return count

  # update metadata.
  coll_query = data.CollectionEntry.all()
  coll_query.filter("bgguser = ", bgguser)
  res = coll_query.fetch(1)
  if not res:
    coll = data.CollectionEntry()
    coll.bgguser = bgguser
    coll.num_fetches = 1
    coll.size = count
  else:
    coll = res[0]
    coll.size = count
    if not coll.num_fetches:
      coll.num_fetches = 0
    coll.num_fetches = coll.num_fetches + 1
  coll.put()

  return count

# If called, loads a chunk of data (with heading 1st line) into the user's
# database.
def LoadGames(bgguser, contentblock):

  def Listify(list_string):
    "Converts the named field to a list of numbers from csv"
    if not list_string or not list_string[0].isdigit():
      return []
    else:
      return map(PlayerCountInt, list_string.split(","))

  count = 0
  headings = None
  if type(contentblock) is unicode:
    contentblock = contentblock.encode('utf-8')
  csv_reader = csv.reader(contentblock.split("\n"))
  for row in csv_reader:
    if not row or len(row) < 2:
      continue
    if not headings:
      headings = row
      continue

    count += 1
    game_object = dict(zip(headings, row))

    try:
      game = data.GameEntry()
      game.bgguser = bgguser
      game.name = unicode(game_object["objectname"], "utf-8")
      game.bggid = int(game_object["objectid"])
      game.my_rating = float(game_object["rating"])
      game.bgg_average = float(game_object["baverage"])
      game.weight = float(game_object["avgweight"])
      game.num_plays = util.SafeInt(game_object["numplays"])
      game.best_with = Listify(game_object["bggbestplayers"])
      game.rec_with = Listify(game_object["bggrecplayers"])
      game.min_players = PlayerCountInt(game_object["minplayers"])
      game.max_players = PlayerCountInt(game_object["maxplayers"])
      game.playing_time = PlayerCountInt(game_object["playingtime"])
      game.put()
    except KeyError:
      # record an error here.
      pass


  coll_query = data.CollectionEntry.all()
  coll_query.filter("bgguser = ", bgguser)
  res = coll_query.fetch(1)
  if not res:
    coll = data.CollectionEntry()
    coll.bgguser = bgguser
    coll.num_fetches = 1
    coll.size = count
  else:
    coll = res[0]
    if not coll.size:
      coll.size = count
    else:
      coll.size += count
    if not coll.num_fetches:
      coll.num_fetches = 1
    else:
      coll.num_fetches = coll.num_fetches + 1

  coll.put()

  return count

class FetchHandler(webapp.RequestHandler):
  """This is the external interface to a fetch.
  """
  def get(self):
    self.response.headers["Content-Type"] = "text/plain"
    self.response.out.write("fish!")

class QueueFetch(webapp.RequestHandler):
  """Fetch + store the games information for one BGG user.

  This handler really only works in the TaskQueue where long deadlines
  are available for it.

  """
  def CheckAndLockFetchLog(self, bgguser):
    # add locking to check that we're not already doing one.
    log_entry = data.FetchLogEntry()
    log_entry.bgguser = bgguser
    log_entry.status = "fetching"
    log_entry.put()
    return True

  def UpdateFetchLog(self, bgguser, status):
    log_entry = data.FetchLogEntry()
    log_entry.bgguser = bgguser
    log_entry.status = status
    log_entry.put()

  def UnlockFetchLog(self, bgguser):
    log_entry = data.FetchLogEntry()
    log_entry.bgguser = bgguser
    log_entry.status = "done"
    log_entry.put()


  def get(self):

    bgguser = self.request.get("bgguser")
    if not bgguser or not re.match("^[a-zA-Z0-9 ]*$", bgguser):
      self.response.headers["Content-Type"] = "text/plain"
      self.response.out.write("Invalid BGG user")
      return

    # Avoid spamming BGG while this is being set up.
    if bgguser == "thepackrat":
      url = "http://www.rattus.net/~packrat/games/files/thepackrat.gamecsv"
    else:
      params = urllib.urlencode({ "action": "exportcsv",
                                  "subtype": "boardgame",
                                  "username": bgguser })
      url = "http://www.boardgamegeek.com/geekcollection.php?" + params

    if not self.CheckAndLockFetchLog(bgguser):
      return

    webfile = urlfetch.fetch(url, deadline = 500)
    if webfile.status_code != 200:
      self.response.headers["Content-Type"] = "text/plain"
      self.response.out.write("Error fetching user data for %s" % bgguser)
      return

    collection_csv = webfile.content
    self.UpdateFetchLog(bgguser, "updating")
#    ClearUser(bgguser)
    count = LoadGames(bgguser, collection_csv)
    self.UnlockFetchLog(bgguser)

    self.response.headers["Content-Type"] = "text/plain"
    self.response.out.write("Loaded %d games for %s\n" % (count, bgguser))

class StoreChunk(webapp.RequestHandler):
  "Access point for storing data chunk with an external tool"
  def post(self):
    bgguser = self.request.get('bgguser')
    data = self.request.get('data')
    token = self.request.get('token')

    if not util.CheckToken("StoreChunk", token):
      self.response.headers["Content-Type"] = "text/plain"
      self.response.out.write("Invalid token provided, no data stored")
      logging.error("Invalid token provided when uploading games for %s" %
                    bgguser)

    else:
      count = LoadChunk(bgguser, data)
      self.response.headers["Content-Type"] = "text/plain"
      self.response.out.write("%d games stored as chunk" % count)


# make this prettier.
class DataStatus(webapp.RequestHandler):
  def get(self):

    self.response.headers["Content-Type"] = "text/plain"
    coll_query = data.CollectionEntry.all()
    coll_query.order("-size")
    for coll in coll_query:
      if not coll.num_fetches:
        coll.num_fetches = 1
      self.response.out.write(
        ("%-5d %-30s    created:%s  last:%s  %d total\n") % (
        coll.size, coll.bgguser,
        coll.created.date().isoformat(),
        coll.last_updated.date().isoformat(),
        coll.num_fetches))

    log_query = data.FetchLogEntry.all()
    log_query.order("-date")
    logs = log_query.fetch(10)
    self.response.out.write("\n")
    for log in logs:
      self.response.out.write("%-30s  %-10s  %s\n" % (
        log.date.isoformat(), log.status, log.bgguser))

class ListMissing(webapp.RequestHandler):
  """Display requested but unloaded bgg users

  This provides a bare list of what users need to be loaded, suitable for
  use with external data scripts
  """
  def get(self):
    self.response.headers["Content-Type"] = "text/plain"
    missing_query = data.MissingBGGUser.all()
    res = missing_query.fetch(10)
    if res:
      for result in res:
        self.response.out.write("%s\n" % result.bgguser)


# TODO. move the queue fetch into a separate namespace.

application = webapp.WSGIApplication([('/data/fetch', FetchHandler),
                                      ('/data/queuefetch', QueueFetch),
                                      ('/data/missing', ListMissing),
                                      ('/data/storechunkcsv', StoreChunk),
                                      ('/data/status', DataStatus),
                                      ],
                                     debug=True)

def main():
  run_wsgi_app(application)

if __name__ == "__main__":
  main()
