"""
Pipelines for log dump to Cloud Storage and then ingestion to BigQuery.
"""
import time
import json
import datetime

from google.appengine.api import memcache
import httplib2
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.appengine import AppAssertionCredentials
from mapreduce import base_handler, mapreduce_pipeline
from mapreduce.lib import pipeline
from mapreduce.lib.pipeline.common import Log
from mapreduce import context
import webapp2

from log2bq.config import config

class HourlyCron(webapp2.RequestHandler):
    """
    A GET handler for cron.
    """
    def get(self):
        """
        GET
        """
        debug = int(self.request.GET.get('debug', 0))
        run(debug=debug)

def run(debug=None):
    """
    Starts the overall pipeline.

    Assumes that we are running once an hour. So if we run at 01:02, then we pull logs for
    [12:00, 01:00).

    If debug is provided, we will upload the logs for [debug] hours, and get the logs
    from the immediate now (as opposed to normalizing back to the previous hour).
    This makes it easier to ensure that there are some logs to harvest (e.g., on a
    test app that gets no traffic). Note that multiple uploads with a non-zero debug
    period will probably lead to duplicate rows in BigQuery.
    """
    now = datetime.datetime.utcnow()
    if debug:
        end_time = now
        start_time = end_time - datetime.timedelta(hours=debug)
    else:
        # normalize to be on the hour, in case cron fired late (can be up to 59 minutes late)
        end_time = datetime.datetime(now.year, now.month, now.day, now.hour)
        start_time = end_time - datetime.timedelta(hours=1)
    # convert to time.time() format
    start_time = float(start_time.strftime('%s.%f'))
    end_time = float(end_time.strftime('%s.%f'))
    versions = config.get_versions()
    p = Log2Bq(start_time, end_time, versions)
    p.start(queue_name=config.QUEUE)

class Log2Bq(base_handler.PipelineBase):
    """
    A pipeline to ingest log as CSV in Google Big Query
    """
    def run(self, start_time, end_time, version_ids):
        num_dropped = yield DropOldBqTables(start_time)
        yield Log.info('Dropped %s old table(s).', num_dropped)
        yield Log.info('Dumping logs for period (%s, %s) for versions %s.', start_time, end_time, version_ids)
        files = yield Log2Gs(start_time, end_time, version_ids)
        yield Log.info('Logs dumped to these Cloud Storage files: %s', files)
        gs2bq = yield Gs2Bq(start_time, files)
        with pipeline.After(gs2bq, files):
            yield Log.info('Files have been ingested into BigQuery.')
            delete = yield DeleteGsFiles(files)
            with pipeline.After(delete):
                yield Log.info('Files have been deleted from Cloud Storage: %s', files)

class Log2Gs(base_handler.PipelineBase):
    """
    A pipeline to send logs as CSV to Google Storage
    """
    def run(self, start_time, end_time, version_ids):
        yield mapreduce_pipeline.MapperPipeline(
            config.GS_FILE_PREFIX,
            "log2bq.bq.log2json",
            "mapreduce.input_readers.LogInputReader",
            output_writer_spec="mapreduce.output_writers.FileOutputWriter",
            params={
                "input_reader" : {
                    "start_time": start_time,
                    "end_time": end_time,
                    "version_ids": version_ids,
                    "include_app_logs": config.INCLUDE_APP_LOGS,
                },
                "output_writer" : {
                    "filesystem": "gs",
                    "gs_bucket_name": config.GS_BUCKET_NAME,
                },
                "root_pipeline_id": self.root_pipeline_id,
            },
            shards=16
        )

def getBqService():
    scope = 'https://www.googleapis.com/auth/bigquery'
    credentials = AppAssertionCredentials(scope=scope)
    http = credentials.authorize(httplib2.Http(memcache))
    return build('bigquery', 'v2', http=http)

class Gs2Bq(base_handler.PipelineBase):
    """
    A pipeline to ingest log csv from Google Storage to Google BigQuery.
    """
    def run(self, start_time, files):
        tableId = config.BQ_TABLE_PREFIX + datetime.datetime.fromtimestamp(start_time).strftime('%Y%m%d')
        sourceUris = [f.replace('/gs/', 'gs://') for f in files]
        body = jobData(config.BQ_PROJECT_ID, config.BQ_DATASET, sourceUris, tableId)
        yield Log.info('Sending BigQuery job %s', body)
        jobs = getBqService().jobs()
        result = jobs.insert(projectId=config.BQ_PROJECT_ID, body=body).execute()
        yield BqCheck(result['jobReference']['jobId'], tableId)

class BqCheck(base_handler.PipelineBase):
    """
    Checks to see if the job is complete, sleeps and loops if not.
    """
    def run(self, jobId, tableId):
        jobs = getBqService().jobs()
        status = jobs.get(projectId=config.BQ_PROJECT_ID, jobId=jobId).execute()
        if status['status']['state'] == 'PENDING' or status['status']['state'] == 'RUNNING':
            yield Log.info('BigQuery job "%s" still running. Sleeping.', jobId)
            delay = yield pipeline.common.Delay(seconds=5)
            with pipeline.After(delay):
                yield BqCheck(jobId, tableId)
        else:
            if 'errors' in status['status']:
                for error in status['status']['errors']:
                    yield Log.error('Import error "%s" (%s).', error.get('message'), error.get('location'))
            else:
                yield Log.info('Logs have been imported to [%s.%s].' % (config.BQ_DATASET, tableId))
            yield pipeline.common.Return(status)

def getGsService():
    # scope = 'https://www.googleapis.com/auth/devstorage.full_control'
    scope = 'https://www.googleapis.com/auth/devstorage.read_write'
    credentials = AppAssertionCredentials(scope=scope)
    http = credentials.authorize(httplib2.Http(memcache))
    return build('storage', 'v1beta1', http=http)

class DeleteGsFiles(base_handler.PipelineBase):
    """
    Deletes a list of files from Google Storage.
    """
    def run(self, files):
        for file_ in files:
            yield DeleteGsFile(file_)

class DeleteGsFile(base_handler.PipelineBase):
    """
    Deletes file from Google Storage.

    See https://google-api-client-libraries.appspot.com/documentation/storage/v1beta1/python/latest/index.html
    """
    def run(self, file_):
        """
        file_ is assumed to be a full /gs/[bucket_name]/[object] specifier.
        """
        service = getGsService()
        # DELETE https://www.googleapis.com/storage/v1beta1/b/{bucket}/o/{object}
        objectId = file_.rstrip('/').rpartition('/')[2]
        objects = service.objects()
        # Following line requires Cloud Storage JSON API, which is currently by invitation-only
        # Unless you have this enabled on your Services tab in API Console, this part of the pipeline
        # will fail, and the staging files will not be automatically deleted from Cloud Storage.
        objects.delete(bucket=config.GS_BUCKET_NAME, object=objectId).execute()

class DropOldBqTables(base_handler.PipelineBase):
    """
    Drops tables from BigQuery that are due to be scrubbed.

    See https://google-api-client-libraries.appspot.com/documentation/bigquery/v2/python/latest/bigquery_v2.tables.html#list
    """
    def run(self, start_time):
        if config.SCRUB_AGE is None:
            yield pipeline.common.Return(0)
        else:
            dt = datetime.datetime.fromtimestamp(start_time)
            olddt = dt - datetime.timedelta(days=config.SCRUB_AGE)
            oldTableName = config.BQ_TABLE_PREFIX + olddt.strftime('%Y%m%d')
            # get all the tables, sort them, and spin a pipeline for each
            tablesService = getBqService().tables()
            tables = tablesService.list(projectId=config.BQ_PROJECT_ID, datasetId=config.BQ_DATASET).execute()
            # yield Log.critical('tables response from BigQuery: %s', tables)
            # """
            # {u'totalItems': 3,
            #  u'tables': [
            #     {u'kind': u'bigquery#table',
            #      u'id': u'426491053653:fantasm_hr.Review',
            #      u'tableReference': {
            #         u'projectId': u'426491053653',
            #         u'tableId': u'Review',
            #         u'datasetId': u'fantasm_hr'
            #      }
            #     },
            #     {u'kind': u'bigquery#table',
            #      u'id': u'426491053653:fantasm_hr.log_20130322',
            #      u'tableReference': {
            #         u'projectId': u'426491053653',
            #         u'tableId': u'log_20130322',
            #         u'datasetId': u'fantasm_hr'}
            #     },
            #     {u'kind': u'bigquery#table',
            #         u'id': u'426491053653:fantasm_hr.log_20130323',
            #         u'tableReference':
            #         {u'projectId': u'426491053653',
            #          u'tableId': u'log_20130323', u'datasetId': u'fantasm_hr'}}
            # ], u'kind':
            # u'bigquery#tableList',
            # u'etag': u'"C5cqm8DTB59L7otLfkDVRQxNBP0/MG6450Hs8M-jxpU4C3di6ffn3pY"'}
            # """
            if 'tables' not in tables or not tables['tables']:
                yield pipeline.common.Return(0)
            else:
                tableIds = [table['tableReference']['tableId'] for table in tables['tables']
                            if table['tableReference']['tableId'].startswith(config.BQ_TABLE_PREFIX) and
                               table['tableReference']['tableId'] < oldTableName]
                drops = []
                for tableId in tableIds:
                    drop = yield DropBqTable(tableId)
                    drops.append(drop)
                with pipeline.After(*drops):
                    yield pipeline.common.Return(len(tableIds))

class DropBqTable(base_handler.PipelineBase):
    """
    Drops a BigQuery table.

    See https://google-api-client-libraries.appspot.com/documentation/bigquery/v2/python/latest/bigquery_v2.tables.html#delete
    """
    def run(self, tableId):
        tablesService = getBqService().tables()
        tablesService.delete(projectId=config.BQ_PROJECT_ID, datasetId=config.BQ_DATASET, tableId=tableId).execute()
        yield Log.info('Dropped tableId "%s"', tableId)

def log2json(l):
    """
    Convert the log API RequestLog object to JSON.
    """
    path, _, query_string = l.resource.partition('?')
    tags = config.generate_tags(l)
    data = {
        'timestamp'     : '%.6f' % l.start_time,
        'method'        : l.method,
        'path'          : path,
        'query'         : query_string or None,
        'status'        : l.status,
        'response_ms'   : int(l.latency*1000),
        'pending_ms'    : int(l.pending_time*1000),
        'loading'       : l.was_loading_request,
        'host'          : l.host,
        'ip'            : l.ip,
        'cost'          : '%.12f' % l.cost,
        'request_id'    : str(l.request_id),
        'task_name'     : l.task_name if l.task_name else None,
        'queue_name'    : l.task_queue_name if l.task_queue_name else None,
        'version_id'    : l.version_id,
        'response_size' : l.response_size,
        'tag'           : tags,
    }
    # add the app logs
    if config.INCLUDE_APP_LOGS:
        app_logs = []
        for al in l.app_logs:
            al_dict = {
                'timestamp' : '%.6f' % al.time,
                'level'     : al.level,
                'message'   : al.message,
            }
            app_logs.append(al_dict)
        if app_logs:
            data['app_log'] = app_logs
    # remove the keys with None as value; BigQuery import doesn't like them.
    for key in data.keys():
        if data[key] is None:
            data.pop(key)
    yield json.dumps(data) + '\n'

def jobData(bqproject, bqdataset, sourceUris, table):
    return {
        'projectId': bqproject,
        'configuration': {
            'load': {
                'sourceUris': sourceUris,
                'sourceFormat': 'NEWLINE_DELIMITED_JSON',
                'schema': {
                    'fields': [
                        {
                            'name':'timestamp',
                            'type':'TIMESTAMP',
                            'mode':'REQUIRED',
                        },
                        {
                            'name':'method',
                            'type':'STRING',
                            'mode':'REQUIRED',
                        },
                        {
                            'name':'path',
                            'type':'STRING',
                            'mode':'REQUIRED',
                        },
                        {
                            'name':'query',
                            'type':'STRING',
                        },
                        {
                            'name':'status',
                            'type':'INTEGER',
                            'mode':'REQUIRED',
                        },
                        {
                            'name':'response_ms',
                            'type':'INTEGER',
                            'mode':'REQUIRED',
                        },
                        {
                            'name':'pending_ms',
                            'type':'INTEGER',
                            'mode':'REQUIRED',
                        },
                        {
                            'name':'loading',
                            'type':'BOOLEAN',
                            'mode':'REQUIRED',
                        },
                        {
                            'name':'host',
                            'type':'STRING',
                            'mode':'REQUIRED',
                        },
                        {
                            'name':'ip',
                            'type':'STRING',
                            'mode':'REQUIRED',
                        },
                        {
                            'name':'cost',
                            'type':'FLOAT',
                            'mode':'REQUIRED',
                        },
                        {
                            'name':'request_id',
                            'type':'STRING',
                            'mode':'REQUIRED',
                        },
                        {
                            'name':'task_name',
                            'type':'STRING',
                        },
                        {
                            'name':'queue_name',
                            'type':'STRING',
                        },
                        {
                            'name':'version_id',
                            'type':'STRING',
                            'mode':'REQUIRED',
                        },
                        {
                            'name':'response_size',
                            'type':'INTEGER',
                            'mode':'REQUIRED',
                        },
                        {
                            'name':'tag',
                            'type':'STRING',
                            'mode':'REPEATED',
                        },
                        {
                            'name':"app_log",
                            'type':"RECORD",
                            'mode':"REPEATED",
                            'fields': [
                                {
                                    'name':"timestamp",
                                    'type':"TIMESTAMP",
                                },
                                {
                                    'name':"level",
                                    'type':"INTEGER",
                                },
                                {
                                    'name':"message",
                                    'type':"STRING",
                                }
                            ]
                        }
                    ]
                },
                'destinationTable': {
                    'projectId': bqproject,
                    'datasetId': bqdataset,
                    'tableId': table
                },
                'createDisposition':'CREATE_IF_NEEDED',
                'writeDisposition':'WRITE_APPEND', #'WRITE_TRUNCATE',
                'encoding':'UTF-8'
            }
        }
    }
