"""
Functions for interacting with BigQuery.

Thanks to Braden Bassingthwaite and VendAsta Technologies.
Provided without license.
"""
import logging
import datetime

try:
    import httplib2
    from oauth2client.appengine import AppAssertionCredentials
    from apiclient.discovery import build
    from apiclient.errors import HttpError
except ImportError:
    print """
    apiclient is here: https://code.google.com/p/google-api-python-client/
    oauth2client is here: https://code.google.com/p/google-api-python-client/wiki/OAuth2Client
    httplib2 is here: https://code.google.com/p/httplib2/
    uritemplate is here: https://pypi.python.org/pypi/uritemplate/0.5.2
    gflags (and gflags_validators) is here: https://code.google.com/p/python-gflags/
    """
    raise

from dsbq.config import config

__all__ = ['submit_query_job', 'submit_backup_ingestion_job', 'submit_table_transfer_job',
           'submit_delete_table_job', 'is_job_complete', 'get_results_page']

SCOPE               = 'https://www.googleapis.com/auth/bigquery'
PAGE_SIZE           = 5000
BQ_ROWS             = 'rows'
BQ_TOTAL_ROWS       = 'totalRows'
BQ_STATUS           = 'status'
BQ_STATE            = 'state'
BQ_ERRORS           = 'errors'
BQ_ERROR_RESULT     = 'errorResult'
BQ_JOB_REFERENCE    = 'jobReference'
BQ_JOB_ID           = 'jobId'
BQ_MESSAGE          = 'message'
BQ_LOCATION         = 'location'
BQ_REASON           = 'reason'
BQ_SCHEMA           = 'schema'
BQ_FIELDS           = 'fields'
BQ_NAME             = 'name'
BQ_PENDING          = 'PENDING'
BQ_RUNNING          = 'RUNNING'
BQ_DONE             = 'DONE'
BQ_BATCH_MODE       = 'BATCH'
BQ_INTERACTIVE_MODE = 'INTERACTIVE'

def submit_query_job(query, query_mode=BQ_INTERACTIVE_MODE):
    """
    Submits a query to BigQuery for processing, returning a job_id.
    """
    jobs = get_jobs()
    body = build_query_job_data(query, query_mode)
    result = jobs.insert(projectId=config.API_PROJECT_ID, body=body)
    response = result.execute()
    if response[BQ_STATUS].get(BQ_ERRORS):
        raise Exception('Error in queueing job "%r".' % response)
    return response[BQ_JOB_REFERENCE][BQ_JOB_ID]

def submit_backup_ingestion_job(entity_kind, backup_id, table_id=None):
    """
    Starts the ingestion job from a backup file, returning a job_id.
    """
    if not table_id:
        table_id = entity_kind
    jobs = get_jobs()
    body = build_load_job_data(config.DATASET, entity_kind, backup_id, table_id)
    result = jobs.insert(projectId=config.API_PROJECT_ID, body=body)
    response = result.execute()
    if response[BQ_STATUS].get(BQ_ERRORS):
        raise ValueError('Error in queueing job "%r".' % response)
    return response[BQ_JOB_REFERENCE][BQ_JOB_ID]

def submit_table_transfer_job(entity_kind, temporary_table_id):
    """
    Starts the job that transfers the temporary table to the permanent table, returning a job_id.
    """
    jobs = get_jobs()
    body = build_copy_job_data(config.DATASET, temporary_table_id, entity_kind)
    result = jobs.insert(projectId=config.API_PROJECT_ID, body=body)
    response = result.execute()
    if response[BQ_STATUS].get(BQ_ERRORS):
        raise ValueError('Error in queueing job "%r".' % response)
    return response[BQ_JOB_REFERENCE][BQ_JOB_ID]

def submit_delete_table_job(table_id):
    """
    Deletes the given table from BigQuery.
    """
    dataset_id = build_dataset_id_from_dataset(config.DATASET)
    tables = get_tables()
    tables.delete(projectId=config.API_PROJECT_ID, datasetId=dataset_id, tableId=table_id).execute()

def is_job_complete(job_id):
    """
    Check to see if the jobId is complete or not
    """
    try:
        jobs = get_jobs()
        result = jobs.get(projectId=config.API_PROJECT_ID, jobId=job_id).execute()
        if result[BQ_STATUS][BQ_STATE] == BQ_DONE and not result[BQ_STATUS].get(BQ_ERRORS):
            return True
        if result[BQ_STATUS].get(BQ_ERROR_RESULT):
            message = '%s\n%s\n%s' % (result[BQ_STATUS][BQ_ERROR_RESULT].get(BQ_MESSAGE),
                                      result[BQ_STATUS][BQ_ERROR_RESULT].get(BQ_MESSAGE),
                                      result[BQ_STATUS][BQ_ERROR_RESULT].get(BQ_REASON))
            raise Exception(message)
    except HttpError as e:
        if e.resp.status == 404: # job_id not found
            raise Exception('job_id "%s" not found.' % job_id)
        raise
    return False

def get_results_page(job_id, start_index=None, page_size=PAGE_SIZE):
    """
    See response format here: https://developers.google.com/bigquery/docs/reference/v2/jobs/getQueryResults#response
    """
    start_index = start_index or 0
    jobs = get_jobs()
    results = jobs.getQueryResults(jobId=job_id, projectId=config.API_PROJECT_ID,
                                   startIndex=start_index, maxResults=page_size).execute()
    return BigQueryPage(results)

def get_bigquery_service():
    """
    Helper method to authenticate with BigQuery
    """
    credentials = AppAssertionCredentials(scope=SCOPE)
    http = credentials.authorize(httplib2.Http())
    return build('bigquery', 'v2', http=http)

def get_jobs():
    """
    Helper method to get the BigQuery jobs api object
    """
    return get_bigquery_service().jobs()

def get_tables():
    """
    Helper method to get the bigquery tables api object
    """
    return get_bigquery_service().tables()

def build_query_job_data(query, query_mode):
    """
    Builds the body of a query job.
    """
    job_data = {
      'configuration': {
        'query': {
          'query': query,
          'priority': query_mode,
          'preserveNulls': True,
        }
      }
    }
    return job_data

def build_load_job_data(dataset, entity_kind, backup_id, table_id):
    """
    Helper method to get a dict containing the settings for the BigQuery ingest job.
    """
    source_uris = ['gs://%s/%s/%s.%s.backup_info' % (config.BUCKET_NAME, dataset, backup_id, entity_kind)]
    dataset_id = build_dataset_id_from_dataset(dataset)
    return {
        'configuration': {
            'load': {
                'sourceFormat': 'DATASTORE_BACKUP',
                'writeDisposition': 'WRITE_TRUNCATE',
                'sourceUris' : source_uris,
                'destinationTable': {
                    'projectId': config.API_PROJECT_ID,
                    'datasetId': dataset_id,
                    'tableId': table_id
                }
            }
        }
    }

def build_copy_job_data(dataset, temporary_table_id, entity_kind):
    """
    Builds the job configuration to transfer the temporary table over.
    """
    dataset_id = build_dataset_id_from_dataset(dataset)
    return {
        'configuration': {
            'copy': {
                'sourceTable': {
                    'projectId': config.API_PROJECT_ID,
                    'datasetId': dataset_id,
                    'tableId': temporary_table_id
                 },
                'destinationTable': {
                    'projectId': config.API_PROJECT_ID,
                    'datasetId': dataset_id,
                    'tableId': entity_kind
                 },
                'writeDisposition': 'WRITE_TRUNCATE'
            }
        }
    }

def build_dataset_id_from_dataset(dataset):
    """
    Generates a BigQuery-friendly dataset name.
    """
    return dataset.replace('-', '_')

def utc_datetime_to_usec(dt):
    """
    Convenience method to convert a datetime to microseconds since epoch (use UTC input).
    """
    epoch = datetime.datetime.utcfromtimestamp(0)
    delta = dt - epoch
    return int(delta.total_seconds() * 1000000)

class BigQueryPage(object):
    """
    A page of big query results.
    """
    def __init__(self, raw_result):
        """
        Initialize with the raw BigQuery result.
        """
        self.raw = raw_result

    @property
    def column_names(self):
        """
        Returns a list of column names.
        """
        return [field[BQ_NAME] for field in self.raw[BQ_SCHEMA][BQ_FIELDS]]

    def get_index_for_column_name(self, requested_name):
        """
        Returns the index (0-based) of the column with the given name. None if not found.
        """
        for index, name in enumerate(self.column_names):
            if name == requested_name:
                return index
        return None

    @property
    def total_rows(self):
        """
        Returns the total rows in the entire result set.
        """
        return int(self.raw[BQ_TOTAL_ROWS])

    def __len__(self):
        """
        Returns the number of rows on this page.
        """
        if 'rows' not in self.raw:
            return 0
        return len(self.raw[BQ_ROWS])

    def __iter__(self):
        """
        A row iterator.
        """
        row_index = 0
        while row_index < len(self):
            yield BigQueryRow(self.raw[BQ_ROWS][row_index])
            row_index += 1

class BigQueryRow(object):
    """
    A BigQuery row.
    """
    def __init__(self, raw_row):
        """
        Initialize with a raw BigQuery row.
        """
        self.raw = raw_row

    def __getitem__(self, index):
        """
        Returns the value at the index-th column.
        """
        return self.raw['f'][index]['v']
