#!/usr/bin/python2.7
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import datetime
import logging
import os
import sys
import time

from google.appengine.api import app_identity
from google.appengine.api import channel
from google.appengine.api import memcache
from google.appengine.ext import ndb

from log2gs import FIELDS, Log2Gs

import httplib2
import webapp2
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.appengine import AppAssertionCredentials
from mapreduce import base_handler
from mapreduce.lib import pipeline


import jinja2
jinja_environment = jinja2.Environment(
    loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))


bqproject = '56968918038'
bqdataset = 'logs'
bqtable = 'requestlogs_%s_%s'
gsbucketname = 'applibotlog2bq'

_MESSAGES_DISABLED = False
_CLEAN_UP_AFTER_DAYS = 30 # Set to -1 to not clean up.
_PIPELINE_DELAY_S = 10

bigquery_service = None

def get_bigquery_service():
  global bigquery_service
  if not bigquery_service:
    credentials = AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery')
    http = credentials.authorize(httplib2.Http(memcache))
    bigquery_service = build('bigquery', 'v2', http=http)
  return bigquery_service


def get_bigquery_service_jobs():
  return get_bigquery_service().jobs()


def message(root_pipeline_id, template, *args, **kwargs):
  if _MESSAGES_DISABLED:
      return

  message = jinja2.Template(template).render(root_pipeline_id=root_pipeline_id, **kwargs)
  logging.debug(message)
  client_id = memcache.get('client_id')
  channel.send_message(client_id, "%s,%s" % (root_pipeline_id, message))


class VersionId(ndb.Model):
    create_gmt = ndb.DateTimeProperty(indexed=True, auto_now_add=True)
    modification_gmt = ndb.DateTimeProperty(indexed=False, auto_now=True)
    instance = ndb.StringProperty(indexed=False, required=False)
    datacenter = ndb.StringProperty(indexed=False, required=False)

    @staticmethod
    def parent_key():
        return ndb.Key('VersionIdParent', 1)

    @classmethod
    def new_instance(cls, id, **kwargs):
        return cls(id=id, parent=VersionId.parent_key(), **kwargs)


class LogsRun(ndb.Model):
  create_gmt = ndb.DateTimeProperty(auto_now_add=True, indexed=True, required=True)
  logs_start_gmt = ndb.DateTimeProperty(required=True, indexed=False)
  logs_end_gmt = ndb.DateTimeProperty(required=True, indexed=False)
  files = ndb.StringProperty(repeated=True, indexed=False)
  table = ndb.StringProperty(indexed=False)
  finish_gmt = ndb.DateTimeProperty(required=False, indexed=False)

  @classmethod
  def new_instance(cls, logs_start_time, logs_end_time):
      logs_run = cls()
      logs_run.logs_start_gmt = datetime.datetime(*tuple(time.gmtime(logs_start_time))[:7])
      logs_run.logs_end_gmt = datetime.datetime(*tuple(time.gmtime(logs_end_time))[:7])
      return logs_run


def get_last_logs_run_end_time():
  query = LogsRun.query()
  query = query.order(-LogsRun.create_gmt)
  last_run = query.get()
  if last_run:
      return (last_run.logs_end_gmt - datetime.datetime.utcfromtimestamp(0)).total_seconds()
  return time.time() - 3600 * 24


class Log2Bq(base_handler.PipelineBase):
  """A pipeline to ingest log as CSV in Google Big Query
  """

  def run(self, start_time, end_time, version_ids, logs_run_key=None):
    message(self.root_pipeline_id, '<span class="label label-info">started</span> ae://logs <i class="icon-arrow-right"></i> bq://{{ dataset }} <a href="{{ base_path }}/status?root={{ root_pipeline_id }}#pipeline-{{ pipeline_id }}">pipeline</a>',
            dataset=bqdataset,
            base_path=self.base_path,
            pipeline_id=self.pipeline_id)
    futures = []  # all_files contains a list of futures that resolve to file names.
    for version_id in version_ids:
      files = yield Log2Gs(gsbucketname, start_time, end_time, [version_id])
      futures.append(files)

    date = Log2Gs.start_date_from_time(start_time)
    yield Gs2Bq(date.strftime('%Y%m%d'), logs_run_key, *futures)
    if _CLEAN_UP_AFTER_DAYS > 0:
      yield CleanUpOldRuns()


class CleanUpOldRuns(base_handler.PipelineBase):
  def __init__(self, *args, **kwargs):
      super(CleanUpOldRuns, self).__init__(*args, **kwargs)
      self.storage_credentials = AppAssertionCredentials(
          'https://www.googleapis.com/auth/devstorage.read_write')
      self.storage_http = self.storage_credentials.authorize(httplib2.Http(memcache))
      self.gs_prefix_len = len('/gs/%s/' % gsbucketname)

  def delete_bigquery_table(self, table):
      try:
          get_bigquery_service().tables().delete(
              projectId=bqproject, datasetId=bqdataset, tableId=table).execute()
      except HttpError as e:
          if e.resp['status'] == '404':
              logging.info('CleanUpOldRuns.delete_bigquery_table - %s is already deleted', table)
          else:
              raise e

  def clean_up_files(self, files):
      for csv_file in files:
          csv_file = csv_file[self.gs_prefix_len:]
          resp, content = self.storage_http.request('https://%s.storage.googleapis.com/%s' % (
              gsbucketname, csv_file), method='DELETE')
          if resp['status'] == '404':
              logging.info('CleanUpOldRuns.clean_up_files - %s is already deleted.', csv_file)
          elif resp['status'] == '204':
              logging.info('CleanUpOldRuns.clean_up_files - %s deleted.', csv_file)
          else:
              logging.info('CleanUpOldRuns.clean_up_files - unknown status (%s) for %s.\n\t%s',
                           resp['status'], csv_file, content)
              raise HttpError(resp, content, uri=csv_file)

  def run(self):
    days_30_ago = datetime.datetime.utcnow() - datetime.timedelta(days=_CLEAN_UP_AFTER_DAYS)
    query = LogsRun.query(LogsRun.create_gmt < days_30_ago)
    query = query.order(LogsRun.create_gmt)
    log_runs = query.fetch(100)
    for log_run in log_runs:
      logging.info('Deleting files %s & bigquery tablelog %s for %s',
                   log_run.files[0][:60], log_run.table, log_run.create_gmt)
      try:
        self.clean_up_files(log_run.files)
        self.delete_bigquery_table(log_run.table)
        log_run.key.delete()
      except HttpError:
        logging.error('CleanUpOldRuns.run - Not deleting run from %s', log_run.create_gmt)

  def run_test(self):
    self.run()

  def finalized_test(self):
    pass


class Gs2Bq(base_handler.PipelineBase):
  """A pipeline to ingest log csv from Google Storage to Google BigQuery.
  """

  def run(self, date, logs_run_key, *files):
    table = (bqtable % (app_identity.get_application_id(), date)).replace('-', '_')

    all_files = []
    for csv_files in files:
        if isinstance(csv_files, list):
            all_files.extend(csv_files)
        else:
            all_files.append(csv_files)

    logs_run = None
    if logs_run_key:
      logs_run = ndb.Key(urlsafe=logs_run_key).get()
      logs_run.files = all_files
      logs_run.table = table
      logs_run.put()

    gspaths = [f.replace('/gs/', 'gs://') for f in all_files]
    message(self.root_pipeline_id, '<span class="label label-info">started</span> {{ gs }} <i class="icon-arrow-right"></i> bq://{{ dataset }}/{{ table }} <a href="{{ base_path }}/status?root={{ root_pipeline_id }}#pipeline-{{ pipeline_id }}">pipeline</a>',
            gs=gspaths[0],
            dataset=bqdataset,
            table=table,
            base_path=self.base_path,
            pipeline_id=self.pipeline_id)

    if os.environ.get('SERVER_SOFTWARE','').startswith('Dev'):
        if logs_run:
          logs_run.finish_gmt = datetime.datetime.utcnow()
          logs_run.put()
        return

    jobs = get_bigquery_service_jobs()
    result = jobs.insert(projectId=bqproject, body=jobData(table, gspaths)).execute()
    yield BqCheck(result['jobReference']['jobId'], logs_run_key)


class BqCheck(base_handler.PipelineBase):
  def run(self, job, logs_run_key):
    jobs = get_bigquery_service_jobs()
    status = jobs.get(projectId=bqproject, jobId=job).execute()

    if status['status']['state'] == 'PENDING' or status['status']['state'] == 'RUNNING':
      message(self.root_pipeline_id, '<span class="label label-warning">{{ status }}</span> bq://jobs/{{ job }}',
              job=job,
              status=status['status']['state'].lower())
      delay = yield pipeline.common.Delay(seconds=_PIPELINE_DELAY_S)
      with pipeline.After(delay):
        yield BqCheck(job, logs_run_key)
    else:
      message(self.root_pipeline_id, '<span class="label label-success">{{ status }}</span> bq://jobs/{{ job }} <a href="{{ base_path }}/status?root={{ root_pipeline_id }}#pipeline-{{ pipeline_id }}">pipeline</a>',
              job=job,
              status=status['status']['state'].lower(),
              base_path=self.base_path,
              pipeline_id=self.pipeline_id)
      if logs_run_key:
        logs_run = ndb.Key(urlsafe=logs_run_key).get()
        logs_run.finish_gmt = datetime.datetime.utcnow()
        logs_run.put()
      yield pipeline.common.Return(status)


# Below is a sample handler that could be used to start the log2bq pipeline
# from a cronjob.
class BaseHandler(webapp2.RequestHandler):
    def pipeline(self):
        raise NotImplementedError('Must return a pipeline to run')

    def allow_cron(self):
        """Returns true if the pipeline should start when run from cron."""
        return False

    def get(self):
        if self.allow_cron():
            cron = self.request.headers.get('X-Appengine-Cron')
            if cron == 'true':
                self._start_job()
                return

        self.response.out.write("""
<html><body>

<form method="POST">
    <button>Start Job</button>
</form>

</body></html>
""")

    def post(self):
        self._start_job()

    def _start_job(self):
        pipeline = self.pipeline()
        pipeline.start(queue_name='mapreduce')
        self.redirect(pipeline.base_path + "/status?root=" + pipeline.pipeline_id)


class Log2BqHandler(BaseHandler):
    def allow_cron(self):
        return True

    @staticmethod
    def get_version_ids(versions, start_gmt):
        if not versions:
            return ['beta']

        versions = sorted(versions, key=lambda v: v.modification_gmt, reverse=True)
        current_version = versions[0]
        three_days_ago = datetime.datetime.now() - datetime.timedelta(days=3)
        version_ids = [version.key.id() for version in versions
                       if (version.modification_gmt >= start_gmt or
                           version.modification_gmt >= three_days_ago or
                           version.modification_gmt >= current_version.modification_gmt)]
        version_ids.append('beta')
        return version_ids

    def pipeline(self):
        start_time = get_last_logs_run_end_time()
        end_time = time.time() - 60  # The logs service can be a bit behind.
        assert start_time < end_time, 'Start time must be more than 1 minute ago %s < %s' % (
            start_time, end_time)
        query = VersionId.query()
        query = query.order(-VersionId.create_gmt)  # Ideally this should be modification_gmt
        versions = query.fetch(100)

        logs_run = LogsRun.new_instance(logs_start_time=start_time, logs_end_time=end_time)
        logs_run.put()
        version_ids = Log2BqHandler.get_version_ids(versions, logs_run.logs_start_gmt)

        return Log2Bq(start_time, end_time, version_ids, logs_run_key=logs_run.key.urlsafe())


def jobData(tableId, sourceUris):
  return {'projectId': bqproject,
          'configuration': {
              'load': {
                  'sourceUris': sourceUris,
                  'schema': {
                      'fields': FIELDS
                      },
                  'destinationTable': {
                     'projectId': bqproject,
                     'datasetId': bqdataset,
                     'tableId': tableId
                     },
                  'createDisposition': 'CREATE_IF_NEEDED',
                  'writeDisposition': 'WRITE_APPEND',
                  'encoding': 'UTF-8'
                  }
              }
          }
