import hashlib
import logging

from google.appengine.ext import blobstore
from google.appengine.ext import deferred
from google.appengine.ext import db

from lib import cache
from lib import models


class Error(Exception): pass
class BlobNotFoundException(Error): pass
class BadHashException(Error): pass


def ParseReplay(replay):
  blob_reader = blobstore.BlobReader(replay.blob)
  try:
    sha256 = hashlib.sha256(blob_reader.read()).hexdigest()
  except blobstore.BlobNotFoundError:
    raise BlobNotFoundException('blob not found')
  if sha256 != replay.sha256:
    raise BadHashException('%s != %s' % (sha256, replay.sha256))


def ParseReplays(batch_task_key, replays):
  for replay in replays:
    try:
      ParseReplay(replay)
    except Error as e:
      models.TaskResult(
          batch_task=batch_task_key,
          target=replay,
          result=str(e)).put()


def FetchAndParseReplays(batch_task_key, start_cursor, limit):
  cache.DisableCache()
  query = (models.Replay.all()
                        .with_cursor(start_cursor))
  replays = query.fetch(limit)
  replay_count = len(replays)

  if replay_count == limit:
    deferred.defer(FetchAndParseReplays, batch_task_key, query.cursor(), limit)

  try:
    ParseReplays(batch_task_key, replays)

    def MarkReplaysProcessed():
      batch_task = models.BatchTask.get(batch_task_key)
      batch_task.entities_processed += replay_count
      batch_task.complete = batch_task.complete or (replay_count < limit)
      batch_task.put()
    db.run_in_transaction_custom_retries(100, MarkReplaysProcessed)
  except Exception as e:
    logging.exception('Exception in critical section')
    raise deferred.PermanentTaskFailure('unsafe to restart')
