"""
Ingest listing and reviews from cloud storage into big query.
"""
import os
import time
import logging

from google.appengine.ext import deferred

from dsbq.config import config
from dsbq.pipeline import bigquery
from dsbq.pipeline.models import BackupInformation

__all__ = ['start_upload']

COUNTDOWN = 5*60

def start_upload(entity_kind, iteration=1):
    backup_id = BackupInformation.get_todays_backup_id(config.BUCKET_NAME)
    if not backup_id:
        # loop with countdown to check again later
        logging.info('Backup not yet complete. Will check again in a bit.')
        deferred.defer(start_upload, entity_kind,
                       iteration=iteration+1,
                       _queue=config.DEFERRED_QUEUE,
                       _url=config.DEFERRED_URL_PREFIX + '/start_upload/%s' % iteration+1,
                       _countdown=COUNTDOWN)
        return
    logging.info('Found backup_id "%s" to upload.', backup_id)
    deferred.defer(upload_to_temp_table, entity_kind, backup_id,
                   _queue=config.DEFERRED_QUEUE,
                   _url=config.DEFERRED_URL_PREFIX + '/upload_to_temp_table')

def upload_to_temp_table(entity_kind, backup_id):
    temporary_table_id = '%s_%d' % (entity_kind, int(time.time()))
    logging.info('Uploading backup_id "%s", entity_kind "%s" to temporary table "%s".',
                 backup_id, entity_kind, temporary_table_id)
    job_id = bigquery.submit_backup_ingestion_job(entity_kind, backup_id, table_id=temporary_table_id)
    logging.info('Upload job "%s" submitted.', job_id)
    deferred.defer(transfer_to_final_table, entity_kind, temporary_table_id, job_id,
                   _queue=config.DEFERRED_QUEUE,
                   _url=config.DEFERRED_URL_PREFIX + '/transfer_to_final_table',
                   _countdown=60)

def transfer_to_final_table(entity_kind, temporary_table_id, job_id, iteration=1):
    logging.info('Checking if upload job "%s" is complete.', job_id)
    if not bigquery.is_job_complete(job_id):
        # loop with countdown to check again later
        logging.info('Upload job "%s" is not complete. Will check again in a bit.', job_id)
        deferred.defer(transfer_to_final_table, entity_kind, temporary_table_id, job_id,
                       iteration=iteration+1,
                       _queue=config.DEFERRED_QUEUE,
                       _url=config.DEFERRED_URL_PREFIX + '/transfer_to_final_table/%s' % iteration+1,
                       _countdown=COUNTDOWN)
        return
    logging.info('Upload job "%s" complete.', job_id)
    logging.info('Transferring data from "%s" to "%s".', temporary_table_id, entity_kind)
    transfer_job_id = bigquery.submit_table_transfer_job(entity_kind, temporary_table_id)
    logging.info('Transfer job "%s" submitted.', transfer_job_id)
    deferred.defer(delete_temporary_table, temporary_table_id, transfer_job_id,
                   _queue=config.DEFERRED_QUEUE,
                   _url=config.DEFERRED_URL_PREFIX + '/delete_temporary_table',
                   _countdown=60)

def delete_temporary_table(temporary_table_id, transfer_job_id, iteration=1):
    logging.info('Checking if transfer job "%s" is complete.', transfer_job_id)
    if not bigquery.is_job_complete(transfer_job_id):
        # loop with countdown to check again later
        logging.info('Transfer job "%s" is not complete. Will check again in a bit.', transfer_job_id)
        deferred.defer(delete_temporary_table, temporary_table_id, transfer_job_id,
                       iteration=iteration+1,
                       _queue=config.DEFERRED_QUEUE,
                       _url=config.DEFERRED_URL_PREFIX + '/delete_temporary_table/%s' % iteration+1,
                       _countdown=COUNTDOWN)
        return
    logging.info('Transfer job "%s" complete.', transfer_job_id)
    logging.info('Deleting temporary table "%s".', temporary_table_id)
    bigquery.submit_delete_table_job(temporary_table_id)
